diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml
index 5b2a2add5d..ab744149e7 100644
--- a/.mvn/extensions.xml
+++ b/.mvn/extensions.xml
@@ -27,4 +27,12 @@
maven-notifier
2.1.2
+
+
+ io.opentelemetry.contrib
+ opentelemetry-maven-extension
+ 1.24.0-alpha
+
diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java
index 732313c431..84d1e60d8d 100644
--- a/.mvn/wrapper/MavenWrapperDownloader.java
+++ b/.mvn/wrapper/MavenWrapperDownloader.java
@@ -23,85 +23,50 @@
import java.net.PasswordAuthentication;
import java.net.URL;
import java.nio.file.Files;
-import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
-import java.nio.file.StandardOpenOption;
-import java.util.Properties;
public final class MavenWrapperDownloader
{
- private static final String WRAPPER_VERSION = "3.1.1";
+ private static final String WRAPPER_VERSION = "3.2.0";
private static final boolean VERBOSE = Boolean.parseBoolean( System.getenv( "MVNW_VERBOSE" ) );
- /**
- * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
- */
- private static final String DEFAULT_DOWNLOAD_URL =
- "https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/" + WRAPPER_VERSION
- + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
-
- /**
- * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to use instead of the
- * default one.
- */
- private static final String MAVEN_WRAPPER_PROPERTIES_PATH = ".mvn/wrapper/maven-wrapper.properties";
-
- /**
- * Path where the maven-wrapper.jar will be saved to.
- */
- private static final String MAVEN_WRAPPER_JAR_PATH = ".mvn/wrapper/maven-wrapper.jar";
-
- /**
- * Name of the property which should be used to override the default download url for the wrapper.
- */
- private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
-
public static void main( String[] args )
{
- if ( args.length == 0 )
- {
- System.err.println( " - ERROR projectBasedir parameter missing" );
- System.exit( 1 );
- }
+ log( "Apache Maven Wrapper Downloader " + WRAPPER_VERSION );
- log( " - Downloader started" );
- final String dir = args[0].replace( "..", "" ); // Sanitize path
- final Path projectBasedir = Paths.get( dir ).toAbsolutePath().normalize();
- if ( !Files.isDirectory( projectBasedir, LinkOption.NOFOLLOW_LINKS ) )
+ if ( args.length != 2 )
{
- System.err.println( " - ERROR projectBasedir not exists: " + projectBasedir );
+ System.err.println( " - ERROR wrapperUrl or wrapperJarPath parameter missing" );
System.exit( 1 );
}
- log( " - Using base directory: " + projectBasedir );
-
- // If the maven-wrapper.properties exists, read it and check if it contains a custom
- // wrapperUrl parameter.
- Path mavenWrapperPropertyFile = projectBasedir.resolve( MAVEN_WRAPPER_PROPERTIES_PATH );
- String url = readWrapperUrl( mavenWrapperPropertyFile );
-
try
{
- Path outputFile = projectBasedir.resolve( MAVEN_WRAPPER_JAR_PATH );
- createDirectories( outputFile.getParent() );
- downloadFileFromURL( url, outputFile );
+ log( " - Downloader started" );
+ final URL wrapperUrl = new URL( args[0] );
+ final String jarPath = args[1].replace( "..", "" ); // Sanitize path
+ final Path wrapperJarPath = Paths.get( jarPath ).toAbsolutePath().normalize();
+ downloadFileFromURL( wrapperUrl, wrapperJarPath );
log( "Done" );
- System.exit( 0 );
}
catch ( IOException e )
{
- System.err.println( "- Error downloading" );
- e.printStackTrace();
+ System.err.println( "- Error downloading: " + e.getMessage() );
+ if ( VERBOSE )
+ {
+ e.printStackTrace();
+ }
System.exit( 1 );
}
}
- private static void downloadFileFromURL( String urlString, Path destination ) throws IOException
+ private static void downloadFileFromURL( URL wrapperUrl, Path wrapperJarPath )
+ throws IOException
{
- log( " - Downloading to: " + destination );
+ log( " - Downloading to: " + wrapperJarPath );
if ( System.getenv( "MVNW_USERNAME" ) != null && System.getenv( "MVNW_PASSWORD" ) != null )
{
final String username = System.getenv( "MVNW_USERNAME" );
@@ -115,40 +80,11 @@ protected PasswordAuthentication getPasswordAuthentication()
}
} );
}
- URL website = new URL( urlString );
- try ( InputStream inStream = website.openStream() ) {
- Files.copy( inStream, destination, StandardCopyOption.REPLACE_EXISTING );
- }
- log( " - Downloader complete" );
- }
-
- private static void createDirectories(Path outputPath) throws IOException
- {
- if ( !Files.isDirectory( outputPath, LinkOption.NOFOLLOW_LINKS ) ) {
- Path createDirectories = Files.createDirectories( outputPath );
- log( " - Directories created: " + createDirectories );
- }
- }
-
- private static String readWrapperUrl( Path mavenWrapperPropertyFile )
- {
- String url = DEFAULT_DOWNLOAD_URL;
- if ( Files.exists( mavenWrapperPropertyFile, LinkOption.NOFOLLOW_LINKS ) )
+ try ( InputStream inStream = wrapperUrl.openStream() )
{
- log( " - Reading property file: " + mavenWrapperPropertyFile );
- try ( InputStream in = Files.newInputStream( mavenWrapperPropertyFile, StandardOpenOption.READ ) )
- {
- Properties mavenWrapperProperties = new Properties();
- mavenWrapperProperties.load( in );
- url = mavenWrapperProperties.getProperty( PROPERTY_NAME_WRAPPER_URL, DEFAULT_DOWNLOAD_URL );
- }
- catch ( IOException e )
- {
- System.err.println( " - ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'" );
- }
+ Files.copy( inStream, wrapperJarPath, StandardCopyOption.REPLACE_EXISTING );
}
- log( " - Downloading from: " + url );
- return url;
+ log( " - Downloader complete" );
}
private static void log( String msg )
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
index dc3affce3d..d8b2495a1e 100644
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -6,7 +6,7 @@
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
@@ -14,5 +14,5 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip
-wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.1/apache-maven-3.9.1-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar
diff --git a/README.md b/README.md
index 90fe547da0..e80fb73961 100644
--- a/README.md
+++ b/README.md
@@ -12,7 +12,7 @@ Documentation is available at https://docs.dremio.com.
* JDK 8 or 11 (OpenJDK or Oracle) as the default JDK (`JAVA_HOME` set to it)
* JDK 8 (OpenJDK or Oracle) in Maven toolchain, required to run certain integration tests
-* (Optional) Maven 3.3.9 or later (using Homebrew: `brew install maven`)
+* (Optional) Maven 3.9.1 or later (using Homebrew: `brew install maven`)
Run the following commands to verify that you have the correct versions of Maven and JDK installed:
diff --git a/client/base/pom.xml b/client/base/pom.xml
index c9380e8658..be2da7417d 100644
--- a/client/base/pom.xml
+++ b/client/base/pom.xml
@@ -22,7 +22,7 @@
com.dremio.client
dremio-client-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-client-base
diff --git a/client/base/src/main/java/com/dremio/exec/client/DremioClient.java b/client/base/src/main/java/com/dremio/exec/client/DremioClient.java
index 50c4a23d7d..1bb2da2344 100644
--- a/client/base/src/main/java/com/dremio/exec/client/DremioClient.java
+++ b/client/base/src/main/java/com/dremio/exec/client/DremioClient.java
@@ -148,6 +148,11 @@ public ServiceSet getOrCreateServiceSet(String serviceName) {
return clusterCoordinator.getOrCreateServiceSet(serviceName);
}
+ @Override
+ public void deleteServiceSet(String serviceName) {
+ clusterCoordinator.deleteServiceSet(serviceName);
+ }
+
@Override
public Iterable getServiceNames() throws Exception {
return clusterCoordinator.getServiceNames();
@@ -852,6 +857,7 @@ void cleanUpResources() {
resources.add(clusterCoordinator);
resources.add(new AutoCloseable() {
+ @Override
public void close() throws Exception {
try {
eventLoopGroup.shutdownGracefully(0, 0, TimeUnit.SECONDS).sync();
diff --git a/client/jdbc/pom.xml b/client/jdbc/pom.xml
index 035d0c4236..292ef6fba8 100644
--- a/client/jdbc/pom.xml
+++ b/client/jdbc/pom.xml
@@ -21,7 +21,7 @@
com.dremio.client
dremio-client-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-client-jdbc
Client - JDBC Driver
diff --git a/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java b/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java
index e7f8b08a53..ac881bf9c4 100644
--- a/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java
+++ b/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java
@@ -59,6 +59,7 @@ public int getInt(int index) {
return ac.get(index);
}
+ @Override
public boolean getBoolean(int index) {
if (ac.isNull(index)) {
return false;
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java
index ec955a2177..f199b1d97d 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java
@@ -94,7 +94,8 @@ protected DremioConnectionImpl(DriverImpl driver, AvaticaFactory factory,
this.client = new DremioClient(driver.getSabotConfig(), config.isDirect());
final String connect = config.getZookeeperConnectionString();
- this.client.setClientName("Dremio JDBC Driver");
+ this.client.setClientName(isDremioToDremio(info) ? "Dremio-to-Dremio" : "Dremio JDBC Driver");
+
this.client.connect(connect, info);
} catch (OutOfMemoryException e) {
throw new SQLException("Failure creating root allocator", e);
@@ -106,6 +107,10 @@ protected DremioConnectionImpl(DriverImpl driver, AvaticaFactory factory,
}
}
+ private boolean isDremioToDremio(Properties info) {
+ return Boolean.parseBoolean((String) info.get("D2D"));
+ }
+
@Override
protected AvaticaStatement lookupStatement(StatementHandle h) throws SQLException {
return super.lookupStatement(h);
@@ -159,8 +164,7 @@ public void commit() throws SQLException {
throwIfClosed();
if ( getAutoCommit() ) {
throw new JdbcApiSqlException( "Can't call commit() in auto-commit mode." );
- }
- else {
+ } else {
// (Currently not reachable.)
throw new SQLFeatureNotSupportedException(
"Connection.commit() is not supported. (Dremio is not transactional.)" );
@@ -172,8 +176,7 @@ public void rollback() throws SQLException {
throwIfClosed();
if ( getAutoCommit() ) {
throw new JdbcApiSqlException( "Can't call rollback() in auto-commit mode." );
- }
- else {
+ } else {
// (Currently not reachable.)
throw new SQLFeatureNotSupportedException(
"Connection.rollback() is not supported. (Dremio is not transactional.)" );
@@ -256,13 +259,11 @@ public void setNetworkTimeout( Executor executor, int milliseconds )
if ( null == executor ) {
throw new InvalidParameterSqlException(
"Invalid (null) \"executor\" parameter to setNetworkTimeout(...)" );
- }
- else if ( milliseconds < 0 ) {
+ } else if ( milliseconds < 0 ) {
throw new InvalidParameterSqlException(
"Invalid (negative) \"milliseconds\" parameter to"
+ " setNetworkTimeout(...) (" + milliseconds + ")" );
- }
- else {
+ } else {
if ( 0 != milliseconds ) {
throw new SQLFeatureNotSupportedException(
"Setting network timeout is not supported." );
@@ -336,8 +337,7 @@ public CallableStatement prepareCall(String sql) throws SQLException {
throwIfClosed();
try {
return super.prepareCall(sql);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -347,8 +347,7 @@ public String nativeSQL(String sql) throws SQLException {
throwIfClosed();
try {
return super.nativeSQL(sql);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -442,8 +441,7 @@ public CallableStatement prepareCall(String sql, int resultSetType,
throwIfClosed();
try {
return super.prepareCall(sql, resultSetType, resultSetConcurrency);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -453,8 +451,7 @@ public Map> getTypeMap() throws SQLException {
throwIfClosed();
try {
return super.getTypeMap();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -464,8 +461,7 @@ public void setTypeMap(Map> map) throws SQLException {
throwIfClosed();
try {
super.setTypeMap(map);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -490,8 +486,7 @@ public CallableStatement prepareCall(String sql, int resultSetType,
try {
return super.prepareCall(sql, resultSetType, resultSetConcurrency,
resultSetHoldability);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -502,8 +497,7 @@ public PreparedStatement prepareStatement(String sql,
throwIfClosed();
try {
return super.prepareStatement(sql, autoGeneratedKeys);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -514,8 +508,7 @@ public PreparedStatement prepareStatement(String sql,
throwIfClosed();
try {
return super.prepareStatement(sql, columnIndexes);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -526,8 +519,7 @@ public PreparedStatement prepareStatement(String sql,
throwIfClosed();
try {
return super.prepareStatement(sql, columnNames);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -537,8 +529,7 @@ public Clob createClob() throws SQLException {
throwIfClosed();
try {
return super.createClob();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -548,8 +539,7 @@ public Blob createBlob() throws SQLException {
throwIfClosed();
try {
return super.createBlob();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -559,8 +549,7 @@ public NClob createNClob() throws SQLException {
throwIfClosed();
try {
return super.createNClob();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -570,8 +559,7 @@ public SQLXML createSQLXML() throws SQLException {
throwIfClosed();
try {
return super.createSQLXML();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -590,8 +578,7 @@ public void setClientInfo(String name, String value) throws SQLClientInfoExcepti
}
try {
super.setClientInfo(name, value);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
SQLFeatureNotSupportedException intended =
new SQLFeatureNotSupportedException(e.getMessage(), e);
throw new SQLClientInfoException(e.getMessage(), null, intended);
@@ -607,8 +594,7 @@ public void setClientInfo(Properties properties) throws SQLClientInfoException {
}
try {
super.setClientInfo(properties);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
SQLFeatureNotSupportedException intended =
new SQLFeatureNotSupportedException(e.getMessage(), e);
throw new SQLClientInfoException(e.getMessage(), null, intended);
@@ -620,8 +606,7 @@ public String getClientInfo(String name) throws SQLException {
throwIfClosed();
try {
return super.getClientInfo(name);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -631,8 +616,7 @@ public Properties getClientInfo() throws SQLException {
throwIfClosed();
try {
return super.getClientInfo();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -642,8 +626,7 @@ public Array createArrayOf(String typeName, Object[] elements) throws SQLExcepti
throwIfClosed();
try {
return super.createArrayOf(typeName, elements);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -653,8 +636,7 @@ public Struct createStruct(String typeName, Object[] attributes) throws SQLExcep
throwIfClosed();
try {
return super.createStruct(typeName, attributes);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -676,8 +658,7 @@ public void abort(Executor executor) throws SQLException {
throwIfClosed();
try {
super.abort(executor);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java
index 5ab1b2487b..031c7cf65b 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java
@@ -518,8 +518,7 @@ private boolean nextRowInternally() throws SQLException {
try {
schemaChanged = currentBatchHolder.load(qrb.getHeader().getDef(),
qrb.getData());
- }
- finally {
+ } finally {
qrb.release();
}
schema = currentBatchHolder.getSchema();
@@ -533,32 +532,27 @@ private boolean nextRowInternally() throws SQLException {
}
return true;
}
- }
- catch ( UserException e ) {
+ } catch ( UserException e ) {
// A normally expected case--for any server-side error (e.g., syntax
// error in SQL statement).
// Construct SQLException with message text from the UserException.
// TODO: Map UserException error type to SQLException subclass (once
// error type is accessible, of course. :-( )
throw new SQLException( e.getMessage(), e );
- }
- catch ( TimeoutException e ) {
+ } catch ( TimeoutException e ) {
throw new SqlTimeoutException(
String.format("Cancelled after expiration of timeout of %d seconds.", statement.getQueryTimeout()),
e);
- }
- catch ( InterruptedException e ) {
+ } catch ( InterruptedException e ) {
// Not normally expected--Dremio doesn't interrupt in this area (right?)--
// but JDBC client certainly could.
throw new SQLException( "Interrupted.", e );
- }
- catch ( SchemaChangeException e ) {
+ } catch ( SchemaChangeException e ) {
// TODO: Clean: DRILL-2933: RecordBatchLoader.load(...) no longer
// throws SchemaChangeException, so check/clean catch clause.
throw new SQLException(
"Unexpected SchemaChangeException from RecordBatchLoader.load(...)" );
- }
- catch ( RuntimeException e ) {
+ } catch ( RuntimeException e ) {
throw new SQLException( "Unexpected RuntimeException: " + e.toString(), e );
}
@@ -649,14 +643,12 @@ public boolean next() throws SQLException {
if ( afterLastRow ) {
// We're already after end of rows/records--just report that after end.
return false;
- }
- else if ( returnTrueForNextCallToNext ) {
+ } else if ( returnTrueForNextCallToNext ) {
++currentRowNumber;
// We have a deferred "not after end" to report--reset and report that.
returnTrueForNextCallToNext = false;
return true;
- }
- else {
+ } else {
accessors.clearLastColumnIndexedInRow();
boolean res = nextRowInternally();
if (res) { ++ currentRowNumber; }
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java
index 9ad9c8ca13..3970732e2d 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java
@@ -1277,13 +1277,11 @@ public boolean ownUpdatesAreVisible(int type) throws SQLException {
throwIfClosed();
try {
return super.ownUpdatesAreVisible(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"ownUpdatesAreVisible(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1294,13 +1292,11 @@ public boolean ownDeletesAreVisible(int type) throws SQLException {
throwIfClosed();
try {
return super.ownDeletesAreVisible(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"ownDeletesAreVisible(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1311,13 +1307,11 @@ public boolean ownInsertsAreVisible(int type) throws SQLException {
throwIfClosed();
try {
return super.ownInsertsAreVisible(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"ownInsertsAreVisible(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1328,13 +1322,11 @@ public boolean othersUpdatesAreVisible(int type) throws SQLException {
throwIfClosed();
try {
return super.othersUpdatesAreVisible(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"othersUpdatesAreVisible(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1345,13 +1337,11 @@ public boolean othersDeletesAreVisible(int type) throws SQLException {
throwIfClosed();
try {
return super.othersDeletesAreVisible(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"othersDeletesAreVisible(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1362,13 +1352,11 @@ public boolean othersInsertsAreVisible(int type) throws SQLException {
throwIfClosed();
try {
return super.othersInsertsAreVisible(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"othersInsertsAreVisible(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1379,13 +1367,11 @@ public boolean updatesAreDetected(int type) throws SQLException {
throwIfClosed();
try {
return super.updatesAreDetected(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"updatesAreDetected(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1396,13 +1382,11 @@ public boolean deletesAreDetected(int type) throws SQLException {
throwIfClosed();
try {
return super.deletesAreDetected(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"deletesAreDetected(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1413,13 +1397,11 @@ public boolean insertsAreDetected(int type) throws SQLException {
throwIfClosed();
try {
return super.insertsAreDetected(type);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"insertsAreDetected(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
@@ -1497,13 +1479,11 @@ public boolean supportsResultSetHoldability(int holdability) throws SQLException
throwIfClosed();
try {
return super.supportsResultSetHoldability(holdability);
- }
- catch (RuntimeException e) {
+ } catch (RuntimeException e) {
if ("todo: implement this method".equals(e.getMessage())) {
throw new SQLFeatureNotSupportedException(
"supportsResultSetHoldability(int) is not supported", e);
- }
- else {
+ } else {
throw new SQLException(e.getMessage(), e);
}
}
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java
index af39e77e5a..4b1c77b8d4 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java
@@ -75,8 +75,7 @@ private DremioMeta getDelegate() throws SQLException {
if (connection.getConfig().isServerMetadataDisabled() ||
! connection.getClient().getSupportedMethods().containsAll(requiredMetaMethods)) {
delegate = new DremioMetaClientImpl(connection);
- }
- else {
+ } else {
delegate = new DremioMetaServerImpl(connection);
}
}
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java
index d0a6871dff..74d6585291 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java
@@ -121,8 +121,7 @@ public ResultSet executeQuery(String sql) throws SQLException {
throwIfClosed();
try {
return super.executeQuery(sql);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -132,8 +131,7 @@ public long executeLargeUpdate(String sql) throws SQLException {
throwIfClosed();
try {
return super.executeLargeUpdate(sql);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -145,8 +143,7 @@ public int getMaxFieldSize() throws SQLException {
throwIfClosed();
try {
return super.getMaxFieldSize();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -156,8 +153,7 @@ public void setMaxFieldSize(int max) throws SQLException {
throwIfClosed();
try {
super.setMaxFieldSize(max);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -185,8 +181,7 @@ public void setEscapeProcessing(boolean enable) throws SQLException {
throwIfClosed();
try {
super.setEscapeProcessing(enable);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -226,8 +221,7 @@ public void setCursorName(String name) throws SQLException {
throwIfClosed();
try {
super.setCursorName(name);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -255,8 +249,7 @@ public boolean getMoreResults() throws SQLException {
throwIfClosed();
try {
return super.getMoreResults();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -290,8 +283,7 @@ public int getResultSetConcurrency() throws SQLException {
throwIfClosed();
try {
return super.getResultSetConcurrency();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -301,8 +293,7 @@ public int getResultSetType() throws SQLException {
throwIfClosed();
try {
return super.getResultSetType();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -312,8 +303,7 @@ public void addBatch(String sql) throws SQLException {
throwIfClosed();
try {
super.addBatch(sql);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -329,8 +319,7 @@ public int[] executeBatch() throws SQLException {
throwIfClosed();
try {
return super.executeBatch();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -340,8 +329,7 @@ public boolean getMoreResults(int current) throws SQLException {
throwIfClosed();
try {
return super.getMoreResults(current);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -351,8 +339,7 @@ public ResultSet getGeneratedKeys() throws SQLException {
throwIfClosed();
try {
return super.getGeneratedKeys();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -362,8 +349,7 @@ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException
throwIfClosed();
try {
return super.executeUpdate(sql, autoGeneratedKeys);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -373,8 +359,7 @@ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
throwIfClosed();
try {
return super.executeUpdate(sql, columnIndexes);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -384,8 +369,7 @@ public int executeUpdate(String sql, String[] columnNames) throws SQLException {
throwIfClosed();
try {
return super.executeUpdate(sql, columnNames);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -395,8 +379,7 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
throwIfClosed();
try {
return super.execute(sql, autoGeneratedKeys);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -406,8 +389,7 @@ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
throwIfClosed();
try {
return super.execute(sql, columnIndexes);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -417,8 +399,7 @@ public boolean execute(String sql, String[] columnNames) throws SQLException {
throwIfClosed();
try {
return super.execute(sql, columnNames);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -428,8 +409,7 @@ public int getResultSetHoldability() throws SQLException {
throwIfClosed();
try {
return super.getResultSetHoldability();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -449,8 +429,7 @@ public void setPoolable(boolean poolable) throws SQLException {
throwIfClosed();
try {
super.setPoolable(poolable);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -460,8 +439,7 @@ public boolean isPoolable() throws SQLException {
throwIfClosed();
try {
return super.isPoolable();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -489,8 +467,7 @@ public long executeLargeUpdate() throws SQLException {
throwIfClosed();
try {
return super.executeLargeUpdate();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -519,8 +496,7 @@ public void clearParameters() throws SQLException {
throwIfClosed();
try {
super.clearParameters();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -534,8 +510,7 @@ public boolean execute() throws SQLException {
throwIfClosed();
try {
return super.execute();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -545,8 +520,7 @@ public void addBatch() throws SQLException {
throwIfClosed();
try {
super.addBatch();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java
index b581ddc95c..ba7884737a 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java
@@ -94,8 +94,7 @@ private void throwIfClosed() throws AlreadyClosedSqlException,
hasPendingCancelationNotification = false;
throw new ExecutionCanceledSqlException(
"SQL statement execution canceled; ResultSet now closed." );
- }
- else {
+ } else {
throw new AlreadyClosedSqlException( "ResultSet is already closed." );
}
}
@@ -376,8 +375,7 @@ public String getCursorName() throws SQLException {
throwIfClosed();
try {
return super.getCursorName();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -472,8 +470,7 @@ public boolean isLast() throws SQLException {
throwIfClosed();
try {
return super.isLast();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -483,8 +480,7 @@ public void beforeFirst() throws SQLException {
throwIfClosed();
try {
super.beforeFirst();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -494,8 +490,7 @@ public void afterLast() throws SQLException {
throwIfClosed();
try {
super.afterLast();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -505,8 +500,7 @@ public boolean first() throws SQLException {
throwIfClosed();
try {
return super.first();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -516,8 +510,7 @@ public boolean last() throws SQLException {
throwIfClosed();
try {
return super.last();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -535,8 +528,7 @@ public boolean absolute( int row ) throws SQLException {
throwIfClosed();
try {
return super.absolute( row );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -546,8 +538,7 @@ public boolean relative( int rows ) throws SQLException {
throwIfClosed();
try {
return super.relative( rows );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -557,8 +548,7 @@ public boolean previous() throws SQLException {
throwIfClosed();
try {
return super.previous();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -629,8 +619,7 @@ public void updateNull( int columnIndex ) throws SQLException {
throwIfClosed();
try {
super.updateNull( columnIndex );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -640,8 +629,7 @@ public void updateBoolean( int columnIndex, boolean x ) throws SQLException {
throwIfClosed();
try {
super.updateBoolean( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -651,8 +639,7 @@ public void updateByte( int columnIndex, byte x ) throws SQLException {
throwIfClosed();
try {
super.updateByte( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -662,8 +649,7 @@ public void updateShort( int columnIndex, short x ) throws SQLException {
throwIfClosed();
try {
super.updateShort( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -673,8 +659,7 @@ public void updateInt( int columnIndex, int x ) throws SQLException {
throwIfClosed();
try {
super.updateInt( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -684,8 +669,7 @@ public void updateLong( int columnIndex, long x ) throws SQLException {
throwIfClosed();
try {
super.updateLong( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -695,8 +679,7 @@ public void updateFloat( int columnIndex, float x ) throws SQLException {
throwIfClosed();
try {
super.updateFloat( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -706,8 +689,7 @@ public void updateDouble( int columnIndex, double x ) throws SQLException {
throwIfClosed();
try {
super.updateDouble( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -718,8 +700,7 @@ public void updateBigDecimal( int columnIndex,
throwIfClosed();
try {
super.updateBigDecimal( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -729,8 +710,7 @@ public void updateString( int columnIndex, String x ) throws SQLException {
throwIfClosed();
try {
super.updateString( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -740,8 +720,7 @@ public void updateBytes( int columnIndex, byte[] x ) throws SQLException {
throwIfClosed();
try {
super.updateBytes( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -751,8 +730,7 @@ public void updateDate( int columnIndex, Date x ) throws SQLException {
throwIfClosed();
try {
super.updateDate( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -762,8 +740,7 @@ public void updateTime( int columnIndex, Time x ) throws SQLException {
throwIfClosed();
try {
super.updateTime( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -773,8 +750,7 @@ public void updateTimestamp( int columnIndex, Timestamp x ) throws SQLException
throwIfClosed();
try {
super.updateTimestamp( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -785,8 +761,7 @@ public void updateAsciiStream( int columnIndex, InputStream x,
throwIfClosed();
try {
super.updateAsciiStream( columnIndex, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -797,8 +772,7 @@ public void updateBinaryStream( int columnIndex, InputStream x,
throwIfClosed();
try {
super.updateBinaryStream( columnIndex, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -809,8 +783,7 @@ public void updateCharacterStream( int columnIndex, Reader x,
throwIfClosed();
try {
super.updateCharacterStream( columnIndex, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -821,8 +794,7 @@ public void updateObject( int columnIndex, Object x,
throwIfClosed();
try {
super.updateObject( columnIndex, x, scaleOrLength );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -832,8 +804,7 @@ public void updateObject( int columnIndex, Object x ) throws SQLException {
throwIfClosed();
try {
super.updateObject( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -843,8 +814,7 @@ public void updateNull( String columnLabel ) throws SQLException {
throwIfClosed();
try {
super.updateNull( columnLabel );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -854,8 +824,7 @@ public void updateBoolean( String columnLabel, boolean x ) throws SQLException {
throwIfClosed();
try {
super.updateBoolean( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -865,8 +834,7 @@ public void updateByte( String columnLabel, byte x ) throws SQLException {
throwIfClosed();
try {
super.updateByte( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -876,8 +844,7 @@ public void updateShort( String columnLabel, short x ) throws SQLException {
throwIfClosed();
try {
super.updateShort( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -887,8 +854,7 @@ public void updateInt( String columnLabel, int x ) throws SQLException {
throwIfClosed();
try {
super.updateInt( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -898,8 +864,7 @@ public void updateLong( String columnLabel, long x ) throws SQLException {
throwIfClosed();
try {
super.updateLong( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -909,8 +874,7 @@ public void updateFloat( String columnLabel, float x ) throws SQLException {
throwIfClosed();
try {
super.updateFloat( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -920,8 +884,7 @@ public void updateDouble( String columnLabel, double x ) throws SQLException {
throwIfClosed();
try {
super.updateDouble( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -932,8 +895,7 @@ public void updateBigDecimal( String columnLabel,
throwIfClosed();
try {
super.updateBigDecimal( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -943,8 +905,7 @@ public void updateString( String columnLabel, String x ) throws SQLException {
throwIfClosed();
try {
super.updateString( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -954,8 +915,7 @@ public void updateBytes( String columnLabel, byte[] x ) throws SQLException {
throwIfClosed();
try {
super.updateBytes( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -965,8 +925,7 @@ public void updateDate( String columnLabel, Date x ) throws SQLException {
throwIfClosed();
try {
super.updateDate( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -976,8 +935,7 @@ public void updateTime( String columnLabel, Time x ) throws SQLException {
throwIfClosed();
try {
super.updateTime( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -987,8 +945,7 @@ public void updateTimestamp( String columnLabel, Timestamp x ) throws SQLExcepti
throwIfClosed();
try {
super.updateTimestamp( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -999,8 +956,7 @@ public void updateAsciiStream( String columnLabel, InputStream x,
throwIfClosed();
try {
super.updateAsciiStream( columnLabel, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1011,8 +967,7 @@ public void updateBinaryStream( String columnLabel, InputStream x,
throwIfClosed();
try {
super.updateBinaryStream( columnLabel, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1023,8 +978,7 @@ public void updateCharacterStream( String columnLabel, Reader reader,
throwIfClosed();
try {
super.updateCharacterStream( columnLabel, reader, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1035,8 +989,7 @@ public void updateObject( String columnLabel, Object x,
throwIfClosed();
try {
super.updateObject( columnLabel, x, scaleOrLength );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1046,8 +999,7 @@ public void updateObject( String columnLabel, Object x ) throws SQLException {
throwIfClosed();
try {
super.updateObject( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1057,8 +1009,7 @@ public void insertRow() throws SQLException {
throwIfClosed();
try {
super.insertRow();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1068,8 +1019,7 @@ public void updateRow() throws SQLException {
throwIfClosed();
try {
super.updateRow();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1079,8 +1029,7 @@ public void deleteRow() throws SQLException {
throwIfClosed();
try {
super.deleteRow();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1090,8 +1039,7 @@ public void refreshRow() throws SQLException {
throwIfClosed();
try {
super.refreshRow();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1101,8 +1049,7 @@ public void cancelRowUpdates() throws SQLException {
throwIfClosed();
try {
super.cancelRowUpdates();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1112,8 +1059,7 @@ public void moveToInsertRow() throws SQLException {
throwIfClosed();
try {
super.moveToInsertRow();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1123,8 +1069,7 @@ public void moveToCurrentRow() throws SQLException {
throwIfClosed();
try {
super.moveToCurrentRow();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1253,8 +1198,7 @@ public void updateRef( int columnIndex, Ref x ) throws SQLException {
throwIfClosed();
try {
super.updateRef( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1264,8 +1208,7 @@ public void updateRef( String columnLabel, Ref x ) throws SQLException {
throwIfClosed();
try {
super.updateRef( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1275,8 +1218,7 @@ public void updateBlob( int columnIndex, Blob x ) throws SQLException {
throwIfClosed();
try {
super.updateBlob( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1286,8 +1228,7 @@ public void updateBlob( String columnLabel, Blob x ) throws SQLException {
throwIfClosed();
try {
super.updateBlob( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1297,8 +1238,7 @@ public void updateClob( int columnIndex, Clob x ) throws SQLException {
throwIfClosed();
try {
super.updateClob( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1308,8 +1248,7 @@ public void updateClob( String columnLabel, Clob x ) throws SQLException {
throwIfClosed();
try {
super.updateClob( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1319,8 +1258,7 @@ public void updateArray( int columnIndex, Array x ) throws SQLException {
throwIfClosed();
try {
super.updateArray( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1330,8 +1268,7 @@ public void updateArray( String columnLabel, Array x ) throws SQLException {
throwIfClosed();
try {
super.updateArray( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1342,8 +1279,7 @@ public RowId getRowId( int columnIndex ) throws SQLException {
throwIfClosed();
try {
return super.getRowId( columnIndex );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1353,8 +1289,7 @@ public RowId getRowId( String columnLabel ) throws SQLException {
throwIfClosed();
try {
return super.getRowId( columnLabel );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1364,8 +1299,7 @@ public void updateRowId( int columnIndex, RowId x ) throws SQLException {
throwIfClosed();
try {
super.updateRowId( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1375,8 +1309,7 @@ public void updateRowId( String columnLabel, RowId x ) throws SQLException {
throwIfClosed();
try {
super.updateRowId( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1398,8 +1331,7 @@ public void updateNString( int columnIndex, String nString ) throws SQLException
throwIfClosed();
try {
super.updateNString( columnIndex, nString );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1410,8 +1342,7 @@ public void updateNString( String columnLabel,
throwIfClosed();
try {
super.updateNString( columnLabel, nString );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1421,8 +1352,7 @@ public void updateNClob( int columnIndex, NClob nClob ) throws SQLException {
throwIfClosed();
try {
super.updateNClob( columnIndex, nClob );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1432,8 +1362,7 @@ public void updateNClob( String columnLabel, NClob nClob ) throws SQLException {
throwIfClosed();
try {
super.updateNClob( columnLabel, nClob );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1468,8 +1397,7 @@ public void updateSQLXML( int columnIndex,
throwIfClosed();
try {
super.updateSQLXML( columnIndex, xmlObject );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1480,8 +1408,7 @@ public void updateSQLXML( String columnLabel,
throwIfClosed();
try {
super.updateSQLXML( columnLabel, xmlObject );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1516,8 +1443,7 @@ public void updateNCharacterStream( int columnIndex, Reader x,
throwIfClosed();
try {
super.updateNCharacterStream( columnIndex, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1528,8 +1454,7 @@ public void updateNCharacterStream( String columnLabel, Reader reader,
throwIfClosed();
try {
super.updateNCharacterStream( columnLabel, reader, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1540,8 +1465,7 @@ public void updateAsciiStream( int columnIndex, InputStream x,
throwIfClosed();
try {
super.updateAsciiStream( columnIndex, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1552,8 +1476,7 @@ public void updateBinaryStream( int columnIndex, InputStream x,
throwIfClosed();
try {
super.updateBinaryStream( columnIndex, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1564,8 +1487,7 @@ public void updateCharacterStream( int columnIndex, Reader x,
throwIfClosed();
try {
super.updateCharacterStream( columnIndex, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1576,8 +1498,7 @@ public void updateAsciiStream( String columnLabel, InputStream x,
throwIfClosed();
try {
super.updateAsciiStream( columnLabel, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1588,8 +1509,7 @@ public void updateBinaryStream( String columnLabel, InputStream x,
throwIfClosed();
try {
super.updateBinaryStream( columnLabel, x, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1600,8 +1520,7 @@ public void updateCharacterStream( String columnLabel, Reader reader,
throwIfClosed();
try {
super.updateCharacterStream( columnLabel, reader, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1612,8 +1531,7 @@ public void updateBlob( int columnIndex, InputStream inputStream,
throwIfClosed();
try {
super.updateBlob( columnIndex, inputStream, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1624,8 +1542,7 @@ public void updateBlob( String columnLabel, InputStream inputStream,
throwIfClosed();
try {
super.updateBlob( columnLabel, inputStream, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1636,8 +1553,7 @@ public void updateClob( int columnIndex, Reader reader,
throwIfClosed();
try {
super.updateClob( columnIndex, reader, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1648,8 +1564,7 @@ public void updateClob( String columnLabel, Reader reader,
throwIfClosed();
try {
super.updateClob( columnLabel, reader, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1660,8 +1575,7 @@ public void updateNClob( int columnIndex, Reader reader,
throwIfClosed();
try {
super.updateNClob( columnIndex, reader, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1672,8 +1586,7 @@ public void updateNClob( String columnLabel, Reader reader,
throwIfClosed();
try {
super.updateNClob( columnLabel, reader, length );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1685,8 +1598,7 @@ public void updateNCharacterStream( int columnIndex,
throwIfClosed();
try {
super.updateNCharacterStream( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1697,8 +1609,7 @@ public void updateNCharacterStream( String columnLabel,
throwIfClosed();
try {
super.updateNCharacterStream( columnLabel, reader );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1709,8 +1620,7 @@ public void updateAsciiStream( int columnIndex,
throwIfClosed();
try {
super.updateAsciiStream( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1721,8 +1631,7 @@ public void updateBinaryStream( int columnIndex,
throwIfClosed();
try {
super.updateBinaryStream( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1733,8 +1642,7 @@ public void updateCharacterStream( int columnIndex,
throwIfClosed();
try {
super.updateCharacterStream( columnIndex, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1745,8 +1653,7 @@ public void updateAsciiStream( String columnLabel,
throwIfClosed();
try {
super.updateAsciiStream( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1757,8 +1664,7 @@ public void updateBinaryStream( String columnLabel,
throwIfClosed();
try {
super.updateBinaryStream( columnLabel, x );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1769,8 +1675,7 @@ public void updateCharacterStream( String columnLabel,
throwIfClosed();
try {
super.updateCharacterStream( columnLabel, reader );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1781,8 +1686,7 @@ public void updateBlob( int columnIndex,
throwIfClosed();
try {
super.updateBlob( columnIndex, inputStream );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1793,8 +1697,7 @@ public void updateBlob( String columnLabel,
throwIfClosed();
try {
super.updateBlob( columnLabel, inputStream );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1804,8 +1707,7 @@ public void updateClob( int columnIndex, Reader reader ) throws SQLException {
throwIfClosed();
try {
super.updateClob( columnIndex, reader );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1815,8 +1717,7 @@ public void updateClob( String columnLabel, Reader reader ) throws SQLException
throwIfClosed();
try {
super.updateClob( columnLabel, reader );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1826,8 +1727,7 @@ public void updateNClob( int columnIndex, Reader reader ) throws SQLException {
throwIfClosed();
try {
super.updateNClob( columnIndex, reader );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1837,8 +1737,7 @@ public void updateNClob( String columnLabel, Reader reader ) throws SQLExceptio
throwIfClosed();
try {
super.updateNClob( columnLabel, reader );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -1878,8 +1777,7 @@ protected DremioResultSetImpl execute() throws SQLException{
if (signature.cursorFactory != null) {
// Avatica accessors have to be wrapped to match Dremio behaviour regarding exception thrown
super.execute();
- }
- else {
+ } else {
DremioCursor cursor = new DremioCursor(connection, statement, signature);
super.execute2(cursor, this.signature.columns);
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java
index aaf66f95eb..502f7829a8 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java
@@ -83,8 +83,7 @@ private SQLException unwrapIfExtra( final SQLException superMethodException ) {
final Throwable cause = superMethodException.getCause();
if ( null != cause && cause instanceof SQLException ) {
result = (SQLException) cause;
- }
- else {
+ } else {
result = superMethodException;
}
return result;
@@ -95,8 +94,7 @@ public boolean execute( String sql ) throws SQLException {
throwIfClosed();
try {
return super.execute( sql );
- }
- catch ( final SQLException possiblyExtraWrapperException ) {
+ } catch ( final SQLException possiblyExtraWrapperException ) {
throw unwrapIfExtra( possiblyExtraWrapperException );
}
}
@@ -106,8 +104,7 @@ public ResultSet executeQuery( String sql ) throws SQLException {
try {
throwIfClosed();
return super.executeQuery( sql );
- }
- catch ( final SQLException possiblyExtraWrapperException ) {
+ } catch ( final SQLException possiblyExtraWrapperException ) {
throw unwrapIfExtra( possiblyExtraWrapperException );
}
}
@@ -117,8 +114,7 @@ public long executeLargeUpdate( String sql ) throws SQLException {
throwIfClosed();
try {
return super.executeLargeUpdate( sql );
- }
- catch ( final SQLException possiblyExtraWrapperException ) {
+ } catch ( final SQLException possiblyExtraWrapperException ) {
throw unwrapIfExtra( possiblyExtraWrapperException );
}
}
@@ -128,8 +124,7 @@ public int executeUpdate( String sql, int[] columnIndexes ) throws SQLException
throwIfClosed();
try {
return super.executeUpdate( sql, columnIndexes );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -139,8 +134,7 @@ public int executeUpdate( String sql, String[] columnNames ) throws SQLException
throwIfClosed();
try {
return super.executeUpdate( sql, columnNames );
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -181,8 +175,7 @@ public int getMaxFieldSize() throws SQLException {
throwIfClosed();
try {
return super.getMaxFieldSize();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -192,8 +185,7 @@ public void setMaxFieldSize(int max) throws SQLException {
throwIfClosed();
try {
super.setMaxFieldSize(max);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -221,8 +213,7 @@ public void setEscapeProcessing(boolean enable) throws SQLException {
throwIfClosed();
try {
super.setEscapeProcessing(enable);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -250,8 +241,7 @@ public void setCursorName(String name) throws SQLException {
throwIfClosed();
try {
super.setCursorName(name);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -273,8 +263,7 @@ public boolean getMoreResults() throws SQLException {
throwIfClosed();
try {
return super.getMoreResults();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -308,8 +297,7 @@ public int getResultSetConcurrency() throws SQLException {
throwIfClosed();
try {
return super.getResultSetConcurrency();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -319,8 +307,7 @@ public int getResultSetType() throws SQLException {
throwIfClosed();
try {
return super.getResultSetType();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -330,8 +317,7 @@ public void addBatch(String sql) throws SQLException {
throwIfClosed();
try {
super.addBatch(sql);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -341,8 +327,7 @@ public void clearBatch() throws SQLException {
throwIfClosed();
try {
super.clearBatch();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -352,8 +337,7 @@ public int[] executeBatch() throws SQLException {
throwIfClosed();
try {
return super.executeBatch();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -363,8 +347,7 @@ public boolean getMoreResults(int current) throws SQLException {
throwIfClosed();
try {
return super.getMoreResults(current);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -374,8 +357,7 @@ public ResultSet getGeneratedKeys() throws SQLException {
throwIfClosed();
try {
return super.getGeneratedKeys();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -385,8 +367,7 @@ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException
throwIfClosed();
try {
return super.executeUpdate(sql, autoGeneratedKeys);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -396,8 +377,7 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
throwIfClosed();
try {
return super.execute(sql, autoGeneratedKeys);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -407,8 +387,7 @@ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
throwIfClosed();
try {
return super.execute(sql, columnIndexes);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -418,8 +397,7 @@ public boolean execute(String sql, String[] columnNames) throws SQLException {
throwIfClosed();
try {
return super.execute(sql, columnNames);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -429,8 +407,7 @@ public int getResultSetHoldability() throws SQLException {
throwIfClosed();
try {
return super.getResultSetHoldability();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -440,8 +417,7 @@ public void setPoolable(boolean poolable) throws SQLException {
throwIfClosed();
try {
super.setPoolable(poolable);
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
@@ -451,8 +427,7 @@ public boolean isPoolable() throws SQLException {
throwIfClosed();
try {
return super.isPoolable();
- }
- catch (UnsupportedOperationException e) {
+ } catch (UnsupportedOperationException e) {
throw new SQLFeatureNotSupportedException(e.getMessage(), e);
}
}
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java
index 48170f3f04..9751366716 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java
@@ -64,8 +64,7 @@ void close() {
try {
logger.debug( "Auto-closing (via open-statements registry): " + statement );
statement.close();
- }
- catch ( SQLException e ) {
+ } catch ( SQLException e ) {
logger.error( "Error auto-closing statement " + statement + ": " + e, e );
// Otherwise ignore the error, to close which statements we can close.
}
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java
index 44a0060635..a89083a705 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java
@@ -67,12 +67,10 @@ private int getCurrentRecordNumber() throws SQLException {
if ( cursor.isAfterLast() ) {
throw new InvalidCursorStateSqlException(
"Result set cursor is already positioned past all rows." );
- }
- else if ( cursor.isBeforeFirst() ) {
+ } else if ( cursor.isBeforeFirst() ) {
throw new InvalidCursorStateSqlException(
"Result set cursor is positioned before all rows. Call next() first." );
- }
- else {
+ } else {
return cursor.getCurrentRecordNumber();
}
}
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java
index f4eab72670..12abdc8b17 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java
@@ -141,8 +141,7 @@ private String getObjectId( final Object object )
String id;
if ( null == object ) {
id = "n/a";
- }
- else {
+ } else {
id = objectsToIdsMap.get( object );
if ( null == id ) {
++lastObjNum;
@@ -170,21 +169,18 @@ private String formatType( final Class> type ) {
try {
Class.forName( p.getName() + "." + type.getSimpleName() );
sameSimpleNameCount++;
- }
- catch ( ClassNotFoundException e ) {
+ } catch ( ClassNotFoundException e ) {
// Nothing to do.
}
}
if ( 1 == sameSimpleNameCount ) {
result = type.getSimpleName();
- }
- else {
+ } else {
// Multiple classes with same simple name, so would be ambiguous to
// abbreviate, so use fully qualified name.
result = type.getName();
}
- }
- else {
+ } else {
result = type.getName();
}
}
@@ -218,13 +214,11 @@ private String formatValue( final Object value ) {
final String result;
if ( null == value ) {
result = "null";
- }
- else {
+ } else {
final Class> rawActualType = value.getClass();
if ( String.class == rawActualType ) {
result = formatString( (String) value );
- }
- else if ( rawActualType.isArray()
+ } else if ( rawActualType.isArray()
&& ! rawActualType.getComponentType().isPrimitive() ) {
// Array of non-primitive type
@@ -244,11 +238,9 @@ else if ( rawActualType.isArray()
}
buffer.append( " }" );
result = buffer.toString();
- }
- else if ( DriverPropertyInfo.class == rawActualType ) {
+ } else if ( DriverPropertyInfo.class == rawActualType ) {
result = formatDriverPropertyInfo( (DriverPropertyInfo) value );
- }
- else if (
+ } else if (
// Is type seen and whose toString() renders value well.
rawActualType == java.lang.Boolean.class
|| rawActualType == java.lang.Byte.class
@@ -263,16 +255,14 @@ else if (
|| rawActualType == java.sql.Timestamp.class
) {
result = value.toString();
- }
- else if (
+ } else if (
// Is type seen and whose toString() has rendered value well--in cases
// seen so far.
rawActualType == java.util.Properties.class
|| rawActualType.isEnum()
) {
result = value.toString();
- }
- else if (
+ } else if (
// Is type to warn about (one case).
rawActualType == com.dremio.jdbc.DremioResultSet.class
) {
@@ -281,8 +271,7 @@ else if (
+ " (While it's a class, it can't be proxied, and some methods can't"
+ " be traced.)" );
result = value.toString();
- }
- else if (
+ } else if (
// Is type to warn about (second case).
// Note: Using strings rather than compiled-in class references to
// avoid failing when run using JDBC-all Jar, which excludes
@@ -297,8 +286,7 @@ else if (
printWarningLine( "Should " + rawActualType
+ " be appearing at JDBC interface?" );
result = value.toString();
- }
- else {
+ } else {
// Is other type--unknown whether it already formats well.
// (No handled yet: byte[].)
printWarningLine( "Unnoted type encountered in formatting (value might"
@@ -326,8 +314,7 @@ private String formatTypeAndValue( Class> declaredType, Object value ) {
// Null--show no actual type or object ID.
actualTypePart = "";
actualValuePart = formatValue( value );
- }
- else {
+ } else {
// Non-null value--show at least some representation of value.
Class> rawActualType = value.getClass();
Class> origActualType =
@@ -336,14 +323,12 @@ private String formatTypeAndValue( Class> declaredType, Object value ) {
// String--show no actual type or object ID.
actualTypePart = "";
actualValuePart = formatValue( value );
- }
- else if ( origActualType.isPrimitive() ) {
+ } else if ( origActualType.isPrimitive() ) {
// Primitive type--show no actual type or object ID.
actualTypePart = "";
// (Remember--primitive type is wrapped here.)
actualValuePart = value.toString();
- }
- else {
+ } else {
// Non-primitive, non-String value--include object ID.
final String idPrefix = " ";
if ( declaredType.isInterface()
@@ -352,13 +337,11 @@ else if ( origActualType.isPrimitive() ) {
// (because object is proxied and therefore all uses will be traced).
actualTypePart = "";
actualValuePart = idPrefix + "...";
- }
- else if ( origActualType == declaredType ) {
+ } else if ( origActualType == declaredType ) {
// Actual type is same as declared--don't show redundant actual type.
actualTypePart = "";
actualValuePart = idPrefix + formatValue( value );
- }
- else {
+ } else {
// Other--show actual type and (try to) show value.
actualTypePart = "(" + formatType( rawActualType) + ") ";
actualValuePart = idPrefix + formatValue( value );
@@ -427,8 +410,7 @@ private String formatArgs( Class>[] declaredTypes, Object[] argValues )
final String result;
if ( null == argValues ) {
result = "()";
- }
- else {
+ } else {
final StringBuilder s = new StringBuilder();
s.append( "( " );
for ( int ax = 0; ax < argValues.length; ax++ ) {
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java
index 05b7d2cbd2..2190a8e58b 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java
@@ -81,8 +81,7 @@ public INTF getProxyInstanceForOriginal( final INTF originalInstance,
if ( null != existingProxy ) {
// Repeated occurrence of original--return same proxy instance as before.
proxyInstance = existingProxy;
- }
- else {
+ } else {
// Original we haven't seen yet--create proxy instance and return that.
Class proxyReturnClass = getProxyClassForInterface( declaredType );
@@ -101,8 +100,7 @@ public INTF getProxyInstanceForOriginal( final INTF originalInstance,
.newInstance( new Object[] { callHandler } );
proxiedsToProxiesMap.put( originalInstance, newProxyInstance );
proxyInstance = newProxyInstance;
- }
- catch ( InstantiationException | IllegalAccessException
+ } catch ( InstantiationException | IllegalAccessException
| IllegalArgumentException | InvocationTargetException
| NoSuchMethodException | SecurityException e ) {
throw new RuntimeException(
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java
index 78a0235e04..b191f56a49 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java
@@ -71,8 +71,7 @@ public Object invoke( Object proxy, Method method, Object[] args )
if ( null == rawReturnedResult ) {
netReturnedResult = null;
- }
- else {
+ } else {
Class> methodReturnType = method.getReturnType();
if ( ! methodReturnType.isInterface() ) {
@@ -80,20 +79,17 @@ public Object invoke( Object proxy, Method method, Object[] args )
// instance. (We could proxy and intercept some methods, but we can't
// intercept all, so intercepting only some would be misleading.)
netReturnedResult = rawReturnedResult;
- }
- else {
+ } else {
// Get the new or existing proxying instance for the returned instance.
netReturnedResult =
proxiesManager.getProxyInstanceForOriginal( rawReturnedResult,
methodReturnType );
}
}
- }
- catch ( IllegalAccessException | IllegalArgumentException e ) {
+ } catch ( IllegalAccessException | IllegalArgumentException e ) {
throw new RuntimeException(
"Unexpected/unhandled error calling proxied method: " + e, e );
- }
- catch ( InvocationTargetException e ) {
+ } catch ( InvocationTargetException e ) {
Throwable thrownResult = e.getCause();
// Report that method threw exception:
callReporter.methodThrew( proxiedObject, proxiedInterface, method, args,
diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java
index c8cef554fc..c2d56f5564 100644
--- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java
+++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java
@@ -106,15 +106,13 @@ public class TracingProxyDriver implements java.sql.Driver {
final Driver proxyDriver;
try {
proxyDriver = new TracingProxyDriver();
- }
- catch ( SQLException e ) {
+ } catch ( SQLException e ) {
throw new RuntimeException(
"Error in initializing " + TracingProxyDriver.class + ": " + e, e );
}
try {
DriverManager.registerDriver( proxyDriver );
- }
- catch ( SQLException e ) {
+ } catch ( SQLException e ) {
throw new RuntimeException(
"Error in registering " + TracingProxyDriver.class + ": " + e, e );
}
@@ -154,8 +152,7 @@ private static class UrlHandler {
if ( ! "".equals( classSpec ) ) {
try {
Class.forName( classSpec);
- }
- catch ( ClassNotFoundException e ) {
+ } catch ( ClassNotFoundException e ) {
throw new ProxySetupSQLException(
"Couldn't load class \"" + classSpec + "\""
+ " (from proxy driver URL \"" + url + "\" (between second and "
@@ -172,8 +169,7 @@ private static class UrlHandler {
"DriverManager.getDriver( \"" + proxiedURL + "\" ) returned a(n) "
+ proxiedDriverForProxiedUrl.getClass().getName() + ": "
+ proxiedDriverForProxiedUrl + "." );
- }
- catch ( SQLException e ) {
+ } catch ( SQLException e ) {
final String message =
"Error getting driver from DriverManager for proxied URL \""
+ proxiedURL + "\" (from proxy driver URL \"" + url + "\""
@@ -220,8 +216,7 @@ public boolean acceptsURL( String url ) throws SQLException {
final boolean accepted;
if ( null == url || ! url.startsWith( JDBC_URL_PREFIX ) ) {
accepted = false;
- }
- else {
+ } else {
UrlHandler urlHandler = new UrlHandler( proxiesManager, url );
setProxyDriver( urlHandler.getProxyDriver(), urlHandler.getProxiedDriver() );
@@ -242,16 +237,14 @@ public Connection connect( String url, Properties info )
if ( null == url || ! url.startsWith( JDBC_URL_PREFIX ) ) {
result = null; // (Not a URL understood by this driver.)
- }
- else {
+ } else {
UrlHandler urlHandler = new UrlHandler( proxiesManager, url );
setProxyDriver( urlHandler.getProxyDriver(), urlHandler.getProxiedDriver() );
// (Call connect() through proxy so it gets traced too.)
try {
result = proxyDriver.connect( urlHandler.getProxiedUrl(), info );
- }
- catch ( SQLException e ) {
+ } catch ( SQLException e ) {
throw new ProxySetupSQLException( "Exception from proxied driver: " + e,
e );
}
diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java b/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java
index 3fdd9be3df..83aae1dfcc 100644
--- a/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java
+++ b/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java
@@ -56,6 +56,7 @@ public Connection getConnection(ConnectionInfo info) throws Exception {
/**
* Closes all active connections in the cache.
*/
+ @Override
public void closeConnections() throws SQLException {
for (Connection conn : cache.values()) {
conn.close();
diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java b/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java
index 2250e2eb7a..4584d21c1d 100644
--- a/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java
+++ b/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java
@@ -52,218 +52,272 @@ public static NonClosableConnection of(final Connection inner) {
return new NonClosableConnection(inner);
}
+ @Override
public T unwrap(Class iface) throws SQLException {
return delegate.unwrap(iface);
}
+ @Override
public boolean isWrapperFor(Class> iface) throws SQLException {
return delegate.isWrapperFor(iface);
}
+ @Override
public Statement createStatement() throws SQLException {
return delegate.createStatement();
}
+ @Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
return delegate.prepareStatement(sql);
}
+ @Override
public CallableStatement prepareCall(String sql) throws SQLException {
return delegate.prepareCall(sql);
}
+ @Override
public String nativeSQL(String sql) throws SQLException {
return delegate.nativeSQL(sql);
}
+ @Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
delegate.setAutoCommit(autoCommit);
}
+ @Override
public boolean getAutoCommit() throws SQLException {
return delegate.getAutoCommit();
}
+ @Override
public void commit() throws SQLException {
delegate.commit();
}
+ @Override
public void rollback() throws SQLException {
delegate.rollback();
}
+ @Override
public void close() throws SQLException {
// noop here. this instance is non-closable.
}
+ @Override
public boolean isClosed() throws SQLException {
return delegate.isClosed();
}
+ @Override
public DatabaseMetaData getMetaData() throws SQLException {
return delegate.getMetaData();
}
+ @Override
public void setReadOnly(boolean readOnly) throws SQLException {
delegate.setReadOnly(readOnly);
}
+ @Override
public boolean isReadOnly() throws SQLException {
return delegate.isReadOnly();
}
+ @Override
public void setCatalog(String catalog) throws SQLException {
delegate.setCatalog(catalog);
}
+ @Override
public String getCatalog() throws SQLException {
return delegate.getCatalog();
}
+ @Override
public void setTransactionIsolation(int level) throws SQLException {
delegate.setTransactionIsolation(level);
}
+ @Override
public int getTransactionIsolation() throws SQLException {
return delegate.getTransactionIsolation();
}
+ @Override
public SQLWarning getWarnings() throws SQLException {
return delegate.getWarnings();
}
+ @Override
public void clearWarnings() throws SQLException {
delegate.clearWarnings();
}
+ @Override
public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException {
return delegate.createStatement(resultSetType, resultSetConcurrency);
}
+ @Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
return delegate.prepareStatement(sql, resultSetType, resultSetConcurrency);
}
+ @Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
return delegate.prepareCall(sql, resultSetType, resultSetConcurrency);
}
+ @Override
public Map> getTypeMap() throws SQLException {
return delegate.getTypeMap();
}
+ @Override
public void setTypeMap(Map> map) throws SQLException {
delegate.setTypeMap(map);
}
+ @Override
public void setHoldability(int holdability) throws SQLException {
delegate.setHoldability(holdability);
}
+ @Override
public int getHoldability() throws SQLException {
return delegate.getHoldability();
}
+ @Override
public Savepoint setSavepoint() throws SQLException {
return delegate.setSavepoint();
}
+ @Override
public Savepoint setSavepoint(String name) throws SQLException {
return delegate.setSavepoint(name);
}
+ @Override
public void rollback(Savepoint savepoint) throws SQLException {
delegate.rollback(savepoint);
}
+ @Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
delegate.releaseSavepoint(savepoint);
}
+ @Override
public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return delegate.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability);
}
+ @Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return delegate.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability);
}
+ @Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return delegate.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability);
}
+ @Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
return delegate.prepareStatement(sql, autoGeneratedKeys);
}
+ @Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
return delegate.prepareStatement(sql, columnIndexes);
}
+ @Override
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
return delegate.prepareStatement(sql, columnNames);
}
+ @Override
public Clob createClob() throws SQLException {
return delegate.createClob();
}
+ @Override
public Blob createBlob() throws SQLException {
return delegate.createBlob();
}
+ @Override
public NClob createNClob() throws SQLException {
return delegate.createNClob();
}
+ @Override
public SQLXML createSQLXML() throws SQLException {
return delegate.createSQLXML();
}
+ @Override
public boolean isValid(int timeout) throws SQLException {
return delegate.isValid(timeout);
}
+ @Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
delegate.setClientInfo(name, value);
}
+ @Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
delegate.setClientInfo(properties);
}
+ @Override
public String getClientInfo(String name) throws SQLException {
return delegate.getClientInfo(name);
}
+ @Override
public Properties getClientInfo() throws SQLException {
return delegate.getClientInfo();
}
+ @Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
return delegate.createArrayOf(typeName, elements);
}
+ @Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
return delegate.createStruct(typeName, attributes);
}
+ @Override
public void setSchema(String schema) throws SQLException {
delegate.setSchema(schema);
}
+ @Override
public String getSchema() throws SQLException {
return delegate.getSchema();
}
+ @Override
public void abort(Executor executor) throws SQLException {
delegate.abort(executor);
}
+ @Override
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
delegate.setNetworkTimeout(executor, milliseconds);
}
+ @Override
public int getNetworkTimeout() throws SQLException {
return delegate.getNetworkTimeout();
}
diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java
index 63df84a82f..ccf800205c 100644
--- a/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java
+++ b/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java
@@ -80,6 +80,7 @@ private static class TinyIntStubAccessor extends BaseStubAccessor {
super( Types.optional(MinorType.TINYINT), value );
}
+ @Override
public byte getByte( int rowOffset ) {
return (Byte) getValue();
}
@@ -92,6 +93,7 @@ private static class SmallIntStubAccessor extends BaseStubAccessor {
super(Types.optional(MinorType.SMALLINT), value);
}
+ @Override
public short getShort( int rowOffset ) {
return (Short) getValue();
}
@@ -104,6 +106,7 @@ private static class IntegerStubAccessor extends BaseStubAccessor {
super(Types.optional(MinorType.INT), value);
}
+ @Override
public int getInt( int rowOffset ) {
return (Integer) getValue();
}
@@ -117,6 +120,7 @@ private static class BigIntStubAccessor extends BaseStubAccessor {
super(Types.optional(MinorType.BIGINT), value);
}
+ @Override
public long getLong( int rowOffset ) {
return (Long) getValue();
}
@@ -129,6 +133,7 @@ private static class FloatStubAccessor extends BaseStubAccessor {
super( Types.optional(MinorType.FLOAT4), value);
}
+ @Override
public float getFloat( int rowOffset ) {
return (Float) getValue();
}
@@ -140,6 +145,7 @@ private static class DoubleStubAccessor extends BaseStubAccessor {
super(Types.optional(MinorType.FLOAT8), value);
}
+ @Override
public double getDouble( int rowOffset ) {
return (double) getValue();
}
diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java
index be1a552c23..4adfa520b8 100644
--- a/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java
+++ b/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java
@@ -106,8 +106,7 @@ public void testBasicReturnTrace() throws SQLException {
try {
nameThis.redirect();
proxyConnection.isClosed();
- }
- finally {
+ } finally {
nameThis.unredirect();
}
@@ -145,11 +144,9 @@ public void testBasicThrowTrace() throws SQLException {
try {
stdErrCapturer.redirect();
statement.execute( "" );
- }
- catch ( final SQLException e ) {
+ } catch ( final SQLException e ) {
// "already closed" is expected
- }
- finally {
+ } finally {
stdErrCapturer.unredirect();
}
diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java
index 086d3578fd..3bb5c5e37d 100644
--- a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java
+++ b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java
@@ -232,33 +232,24 @@ private static Object getDummyValueForType(Class> type) {
final Object result;
if (! type.isPrimitive()) {
result = null;
- }
- else {
+ } else {
if (type == boolean.class) {
result = false;
- }
- else if (type == byte.class) {
+ } else if (type == byte.class) {
result = (byte) 0;
- }
- else if (type == short.class) {
+ } else if (type == short.class) {
result = (short) 0;
- }
- else if (type == char.class) {
+ } else if (type == char.class) {
result = (char) 0;
- }
- else if (type == int.class) {
+ } else if (type == int.class) {
result = 0;
- }
- else if (type == long.class) {
+ } else if (type == long.class) {
result = (long) 0L;
- }
- else if (type == float.class) {
+ } else if (type == float.class) {
result = 0F;
- }
- else if (type == double.class) {
+ } else if (type == double.class) {
result = 0.0;
- }
- else {
+ } else {
fail("Test needs to be updated to handle type " + type);
result = null; // Not executed; for "final".
}
@@ -323,13 +314,11 @@ private void testOneMethod(Method method) {
if (isOkayNonthrowingMethod(method)) {
successLinesBuf.append(resultLine);
- }
- else {
+ } else {
logger.trace("Failure: " + resultLine);
failureLinesBuf.append(resultLine);
}
- }
- catch (InvocationTargetException e) {
+ } catch (InvocationTargetException e) {
final Throwable cause = e.getCause();
final String resultLine = "- " + methodLabel + " threw <" + cause + ">\n";
@@ -337,22 +326,19 @@ private void testOneMethod(Method method) {
&& normalClosedExceptionText.equals(cause.getMessage())) {
// Common good case--our preferred exception class with our message.
successLinesBuf.append(resultLine);
- }
- else if (NullPointerException.class == cause.getClass()
+ } else if (NullPointerException.class == cause.getClass()
&& (method.getName().equals("isWrapperFor")
|| method.getName().equals("unwrap"))) {
// Known good-enough case--these methods don't throw already-closed
// exception, but do throw NullPointerException because of the way
// we call them (with null) and the way Avatica code implements them.
successLinesBuf.append(resultLine);
- }
- else {
+ } else {
// Not a case that base-class code here recognizes, but subclass may
// know that it's okay.
if (isOkaySpecialCaseException(method, cause)) {
successLinesBuf.append(resultLine);
- }
- else {
+ } else {
final String badResultLine =
"- " + methodLabel + " threw <" + cause + "> instead"
+ " of " + AlreadyClosedSqlException.class.getSimpleName()
@@ -363,8 +349,7 @@ else if (NullPointerException.class == cause.getClass()
failureLinesBuf.append(badResultLine);
}
}
- }
- catch (IllegalAccessException | IllegalArgumentException e) {
+ } catch (IllegalAccessException | IllegalArgumentException e) {
fail("Unexpected exception: " + e + ", cause = " + e.getCause()
+ " from " + method);
}
@@ -422,13 +407,11 @@ protected boolean isOkayNonthrowingMethod(Method method) {
final boolean result;
if (super.isOkayNonthrowingMethod(method)) {
result = true;
- }
- else if ( method.getName().equals("beginRequest")
+ } else if ( method.getName().equals("beginRequest")
|| method.getName().equals("endRequest")) {
// TODO: New Java 9 methods not implemented in Avatica.
result = true;
- }
- else {
+ } else {
result = false;
}
return result;
@@ -439,29 +422,25 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) {
final boolean result;
if (super.isOkaySpecialCaseException(method, cause)) {
result = true;
- }
- else if (SQLClientInfoException.class == cause.getClass()
+ } else if (SQLClientInfoException.class == cause.getClass()
&& normalClosedExceptionText.equals(cause.getMessage())
&& ( method.getName().equals("setClientInfo")
|| method.getName().equals("getClientInfo")
)) {
// Special good case--we had to use SQLClientInfoException from those.
result = true;
- }
- else if (RuntimeException.class == cause.getClass()
+ } else if (RuntimeException.class == cause.getClass()
&& normalClosedExceptionText.equals(cause.getMessage())
&& ( method.getName().equals("getCatalog")
|| method.getName().equals("getSchema")
)) {
// Special good-enough case--we had to use RuntimeException for now.
result = true;
- }
- else if ( method.getName().equals("setShardingKeyIfValid")
+ } else if ( method.getName().equals("setShardingKeyIfValid")
|| method.getName().equals("setShardingKey")) {
// TODO: New Java 9 methods not implemented in Avatica.
result = true;
- }
- else {
+ } else {
result = false;
}
return result;
@@ -501,20 +480,17 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) {
final boolean result;
if (super.isOkaySpecialCaseException(method, cause)) {
result = true;
- }
- else if ( method.getName().equals("executeLargeBatch")
+ } else if ( method.getName().equals("executeLargeBatch")
|| method.getName().equals("executeLargeUpdate")) {
// TODO: New Java 8 methods not implemented in Avatica.
result = true;
- }
- else if ( method.getName().equals("enquoteLiteral")
+ } else if ( method.getName().equals("enquoteLiteral")
|| method.getName().equals("enquoteIdentifier")
|| method.getName().equals("enquoteNCharLiteral")
|| method.getName().equals("isSimpleIdentifier")) {
// TODO: New Java 9 methods not implemented in Avatica.
result = true;
- }
- else if (RuntimeException.class == cause.getClass()
+ } else if (RuntimeException.class == cause.getClass()
&& normalClosedExceptionText.equals(cause.getMessage())
&& ( method.getName().equals("getConnection")
|| method.getName().equals("getFetchDirection")
@@ -524,8 +500,7 @@ else if (RuntimeException.class == cause.getClass()
)) {
// Special good-enough case--we had to use RuntimeException for now.
result = true;
- }
- else {
+ } else {
result = false;
}
return result;
@@ -565,8 +540,7 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) {
final boolean result;
if (super.isOkaySpecialCaseException(method, cause)) {
result = true;
- }
- else if (RuntimeException.class == cause.getClass()
+ } else if (RuntimeException.class == cause.getClass()
&& normalClosedExceptionText.equals(cause.getMessage())
&& ( method.getName().equals("clearBatch")
|| method.getName().equals("getConnection")
@@ -577,23 +551,20 @@ else if (RuntimeException.class == cause.getClass()
)) {
// Special good-enough case--we had to use RuntimeException for now.
result = true;
- }
- else if ( method.getName().equals("setObject")
+ } else if ( method.getName().equals("setObject")
|| method.getName().equals("executeLargeUpdate")
|| method.getName().equals("executeLargeBatch")
|| method.getName().equals("getLargeMaxRows")
) {
// TODO: Java 8 methods not yet supported by Avatica.
result = true;
- }
- else if ( method.getName().equals("enquoteLiteral")
+ } else if ( method.getName().equals("enquoteLiteral")
|| method.getName().equals("enquoteIdentifier")
|| method.getName().equals("enquoteNCharLiteral")
|| method.getName().equals("isSimpleIdentifier")) {
// TODO: New Java 9 methods not implemented in Avatica.
result = true;
- }
- else {
+ } else {
result = false;
}
return result;
@@ -628,19 +599,16 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) {
final boolean result;
if (super.isOkaySpecialCaseException(method, cause)) {
result = true;
- }
- else if (RuntimeException.class == cause.getClass()
+ } else if (RuntimeException.class == cause.getClass()
&& normalClosedExceptionText.equals(cause.getMessage())
&& method.getName().equals("getStatement")) {
// Special good-enough case--we had to use RuntimeException for now.
result = true;
- }
- else if (SQLFeatureNotSupportedException.class == cause.getClass()
+ } else if (SQLFeatureNotSupportedException.class == cause.getClass()
&& (method.getName().equals("updateObject"))) {
// TODO: Java 8 methods not yet supported by Avatica.
result = true;
- }
- else {
+ } else {
result = false;
}
return result;
@@ -741,14 +709,12 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) {
final boolean result;
if (super.isOkaySpecialCaseException(method, cause)) {
result = true;
- }
- else if (RuntimeException.class == cause.getClass()
+ } else if (RuntimeException.class == cause.getClass()
&& normalClosedExceptionText.equals(cause.getMessage())
&& method.getName().equals("getResultSetHoldability")) {
// Special good-enough case--we had to use RuntimeException for now.
result = true;
- }
- else {
+ } else {
result = false;
}
return result;
diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java
index f5187d8285..2569b9dd60 100644
--- a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java
+++ b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java
@@ -87,15 +87,13 @@ public static void setUpConnection() throws SQLException {
try {
getConnection().prepareCall("VALUES 'CallableStatement query'");
fail("Test seems to be out of date. Was prepareCall(...) implemented?");
- }
- catch (SQLException | UnsupportedOperationException e) {
+ } catch (SQLException | UnsupportedOperationException e) {
// Expected.
}
try {
getConnection().createArrayOf("STRUCT", new Object[0]);
fail("Test seems to be out of date. Were arrays implemented?");
- }
- catch (SQLException | UnsupportedOperationException e) {
+ } catch (SQLException | UnsupportedOperationException e) {
// Expected.
}
@@ -153,33 +151,24 @@ private static Object getDummyValueForType(Class> type) {
final Object result;
if (type == String.class) {
result = "";
- }
- else if (! type.isPrimitive()) {
+ } else if (! type.isPrimitive()) {
result = null;
- }
- else {
+ } else {
if (type == boolean.class) {
result = false;
- }
- else if (type == byte.class) {
+ } else if (type == byte.class) {
result = (byte) 0;
- }
- else if (type == short.class) {
+ } else if (type == short.class) {
result = (short) 0;
- }
- else if (type == int.class) {
+ } else if (type == int.class) {
result = 0;
- }
- else if (type == long.class) {
+ } else if (type == long.class) {
result = (long) 0L;
- }
- else if (type == float.class) {
+ } else if (type == float.class) {
result = 0F;
- }
- else if (type == double.class) {
+ } else if (type == double.class) {
result = 0.0;
- }
- else {
+ } else {
fail("Test needs to be updated to handle type " + type);
result = null; // Not executed; for "final".
}
@@ -242,8 +231,7 @@ private void testOneMethod(Method method) {
final String resultLine = "- " + methodLabel + " didn't throw\n";
successLinesBuf.append(resultLine);
- }
- catch (InvocationTargetException wrapperEx) {
+ } catch (InvocationTargetException wrapperEx) {
final Throwable cause = wrapperEx.getCause();
final String resultLine = "- " + methodLabel + " threw <" + cause + ">\n";
@@ -254,28 +242,23 @@ private void testOneMethod(Method method) {
// Good case--almost any exception should be SQLException or subclass
// (but make sure not accidentally closed).
successLinesBuf.append(resultLine);
- }
- else if (NullPointerException.class == cause.getClass()
+ } else if (NullPointerException.class == cause.getClass()
&& (method.getName().equals("isWrapperFor")
|| method.getName().equals("unwrap"))) {
// Known good-enough case--these methods throw NullPointerException
// because of the way we call them (with null) and the way Avatica
// code implements them.
successLinesBuf.append(resultLine);
- }
- else if (isOkaySpecialCaseException(method, cause)) {
+ } else if (isOkaySpecialCaseException(method, cause)) {
successLinesBuf.append(resultLine);
- }
-
- else {
+ } else {
final String badResultLine =
"- " + methodLabel + " threw <" + cause + "> instead"
+ " of a " + SQLException.class.getSimpleName() + "\n";
logger.trace("Failure: " + resultLine);
failureLinesBuf.append(badResultLine);
}
- }
- catch (IllegalAccessException | IllegalArgumentException e) {
+ } catch (IllegalAccessException | IllegalArgumentException e) {
fail("Unexpected exception: " + e + ", cause = " + e.getCause()
+ " from " + method);
}
@@ -286,20 +269,18 @@ public void testMethods() {
final String methodLabel = makeLabel(method);
if ("close".equals(method.getName())) {
logger.debug("Skipping (because closes): " + methodLabel);
- }
/* Uncomment to suppress calling DatabaseMetaData.getColumns(...), which
sometimes takes about 2 minutes, and other DatabaseMetaData methods
that query, collectively taking a while too:
- else if (DatabaseMetaData.class == jdbcIntf
+ } else if (DatabaseMetaData.class == jdbcIntf
&& "getColumns".equals(method.getName())) {
logger.debug("Skipping (because really slow): " + methodLabel);
- }
- else if (DatabaseMetaData.class == jdbcIntf
+ } else if (DatabaseMetaData.class == jdbcIntf
&& ResultSet.class == method.getReturnType()) {
logger.debug("Skipping (because a bit slow): " + methodLabel);
}
*/
- else {
+ } else {
logger.debug("Testing method " + methodLabel);
testOneMethod(method);
}
diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java b/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java
index 1f96ed1328..6fcae5690a 100644
--- a/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java
+++ b/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java
@@ -30,6 +30,7 @@ public enum Hook {
public Closeable add(final Function handler) {
handlers.add(handler);
return new Closeable() {
+ @Override
public void close() {
remove(handler);
}
@@ -50,6 +51,7 @@ public void run(Object arg) {
/** Removes a Hook after use. */
public interface Closeable extends AutoCloseable {
+ @Override
void close(); // override, removing "throws"
}
}
diff --git a/client/pom.xml b/client/pom.xml
index a84ba1e83e..1355845ff8 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -22,7 +22,7 @@
com.dremio
dremio-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
com.dremio.client
diff --git a/common/pom.xml b/common/pom.xml
index 302c7032e0..b9f19a29a3 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -23,7 +23,7 @@
com.dremio
dremio-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-common
@@ -231,6 +231,10 @@
io.grpc
grpc-api
+
+ io.grpc
+ grpc-stub
+
software.amazon.awssdk
auth
diff --git a/common/src/main/java/com/dremio/common/VM.java b/common/src/main/java/com/dremio/common/VM.java
index 93679384a6..1c9082fedc 100644
--- a/common/src/main/java/com/dremio/common/VM.java
+++ b/common/src/main/java/com/dremio/common/VM.java
@@ -181,15 +181,19 @@ static long maxDirectMemory(final List inputArguments) {
case "t":
case "T":
multiplier *= 1024;
+ // fall through
case "g":
case "G":
multiplier *= 1024;
+ // fall through
case "m":
case "M":
multiplier *= 1024;
+ // fall through
case "k":
case "K":
multiplier *= 1024;
+ // fall through
default:
break;
}
diff --git a/common/src/main/java/com/dremio/common/WakeupHandler.java b/common/src/main/java/com/dremio/common/WakeupHandler.java
index cb2a5bbd8b..aad0dd8a67 100644
--- a/common/src/main/java/com/dremio/common/WakeupHandler.java
+++ b/common/src/main/java/com/dremio/common/WakeupHandler.java
@@ -20,6 +20,9 @@
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicBoolean;
+import javax.inject.Provider;
+
+import com.dremio.context.RequestContext;
import com.google.common.base.Preconditions;
/**
@@ -35,10 +38,17 @@ public class WakeupHandler {
private final Runnable manager;
private final ExecutorService executor;
+ private final Provider requestContextProvider;
public WakeupHandler(ExecutorService executor, Runnable manager) {
+ this(executor, manager, null);
+ }
+
+ public WakeupHandler(
+ ExecutorService executor, Runnable manager, Provider requestContextProvider) {
this.executor = Preconditions.checkNotNull(executor, "executor service required");
this.manager = Preconditions.checkNotNull(manager, "runnable manager required");
+ this.requestContextProvider = requestContextProvider;
}
public Future> handle(String reason) {
@@ -62,7 +72,11 @@ public void run() {
try {
wakeup.set(false);
- manager.run();
+ if (requestContextProvider != null) {
+ requestContextProvider.get().run(() -> manager.run());
+ } else {
+ manager.run();
+ }
} finally {
running.set(false);
}
diff --git a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java
index 996d5867e9..b116b74811 100644
--- a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java
+++ b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java
@@ -57,6 +57,20 @@ public ContextMigratingExecutorService(E delegate, Tracer tracer) {
this.tracer = tracer;
}
+ public static Runnable makeContextMigratingTask(Runnable runnable, String taskName) {
+ return new ContextMigratingRunnableTask() {
+ @Override
+ public String getSpanName() {
+ return taskName;
+ }
+
+ @Override
+ public void run() {
+ runnable.run();
+ }
+ };
+ }
+
@Override
public void shutdown() {
delegate.shutdown();
diff --git a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingRunnableTask.java b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingRunnableTask.java
new file mode 100644
index 0000000000..9aabc9e02e
--- /dev/null
+++ b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingRunnableTask.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.common.concurrent;
+
+/**
+ * Implements a runnable that also migrates context
+ */
+public interface ContextMigratingRunnableTask extends ContextMigratingTask, Runnable {
+}
diff --git a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java
index 87e5c6572f..2993b360aa 100644
--- a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java
+++ b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java
@@ -24,4 +24,5 @@ public interface ContextMigratingTask {
* @return the name of the child span created while running the task
*/
String getSpanName();
+
}
diff --git a/common/src/main/java/com/dremio/common/config/SabotConfig.java b/common/src/main/java/com/dremio/common/config/SabotConfig.java
index 338e469f42..6252c18e23 100644
--- a/common/src/main/java/com/dremio/common/config/SabotConfig.java
+++ b/common/src/main/java/com/dremio/common/config/SabotConfig.java
@@ -15,6 +15,7 @@
*/
package com.dremio.common.config;
+import java.io.File;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.Collection;
@@ -30,6 +31,7 @@
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.typesafe.config.Config;
+import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigRenderOptions;
import com.typesafe.config.ConfigValue;
@@ -247,6 +249,26 @@ private static SabotConfig create(String overrideFileResourcePathname,
}
}
+ private static SabotConfig createFromSavedSabotConfig() {
+ final String savedSabotConfigFile = System.getProperty("com.dremio.savedSabotConfig");
+ if (savedSabotConfigFile == null) {
+ return null;
+ }
+
+ final File savedSabotConfig = new File(savedSabotConfigFile);
+ if (savedSabotConfig.exists()) {
+ try {
+ Config config = ConfigFactory.parseFile(savedSabotConfig);
+ return new SabotConfig(config);
+ } catch (ConfigException e) {
+ logger.warn("Unable to read saved SabotConfig from '{}' (proceeding to slow path): {}",
+ savedSabotConfigFile, e.toString());
+ }
+ }
+
+ return null;
+ }
+
/**
* @param overrideFileResourcePathname
* see {@link #create(String)}'s {@code overrideFileResourcePathname}
@@ -264,6 +286,11 @@ private static SabotConfig doCreate(String overrideFileResourcePathname,
? CommonConstants.CONFIG_OVERRIDE_RESOURCE_PATHNAME
: overrideFileResourcePathname;
+ final SabotConfig preCreated = createFromSavedSabotConfig();
+ if (preCreated != null) {
+ return preCreated;
+ }
+
// 1. Load defaults configuration file.
Config fallback = null;
final ClassLoader[] classLoaders = ClasspathHelper.classLoaders();
diff --git a/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java b/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java
index c37a23eb37..e7de35cc84 100644
--- a/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java
+++ b/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java
@@ -16,6 +16,7 @@
package com.dremio.common.exceptions;
+import java.security.AccessControlException;
import java.util.Optional;
import org.slf4j.Logger;
@@ -29,8 +30,10 @@
import com.google.rpc.Status;
import io.grpc.Status.Code;
+import io.grpc.StatusException;
import io.grpc.StatusRuntimeException;
import io.grpc.protobuf.StatusProto;
+import io.grpc.stub.StreamObserver;
/**
* Utility functions related to grpc errors.
@@ -109,6 +112,81 @@ public static StatusRuntimeException toStatusRuntimeException(String message, Co
.build());
}
+ /**
+ * Handles unknown {@link Throwable} by passing it in the {@link StreamObserver} as a Status* exception
+ * Only use this method after handling the throwable as accurately as possible, and when no other information about the throwable is available
+ * @param responseObserver responseObserver
+ * @param t unknown exception
+ * @param message High level description of what failed (can be found from the method name)
+ */
+ public static void fallbackHandleException(StreamObserver responseObserver, Throwable t, String message) {
+ logger.warn("Using fallback to handle unknown exception", t);
+ if (t instanceof UserException) {
+ responseObserver.onError(toStatusRuntimeException((UserException) t));
+ } else if (t instanceof StatusException) {
+ responseObserver.onError((StatusException) t);
+ } else if (t instanceof StatusRuntimeException) {
+ responseObserver.onError(statusRuntimeExceptionMapper(t));
+ } else if (t instanceof IllegalArgumentException) {
+ responseObserver.onError(io.grpc.Status.INVALID_ARGUMENT
+ .withCause(t)
+ .withDescription(message)
+ .asRuntimeException());
+ } else if (t instanceof IllegalStateException) {
+ responseObserver.onError(io.grpc.Status.INTERNAL
+ .withCause(t)
+ .withDescription(message)
+ .asRuntimeException());
+ } else if (t instanceof AccessControlException) {
+ responseObserver.onError(io.grpc.Status.PERMISSION_DENIED
+ .withCause(t)
+ .withDescription(message)
+ .asRuntimeException());
+ } else if (t instanceof RuntimeException) {
+ responseObserver.onError(io.grpc.Status.UNKNOWN
+ .withCause(t)
+ .withDescription(message)
+ .asRuntimeException()
+ );
+ } else {
+ responseObserver.onError(io.grpc.Status.UNKNOWN
+ .withCause(t)
+ .withDescription(message)
+ .asException()
+ );
+ }
+ }
+
+ private static StatusRuntimeException statusRuntimeExceptionMapper(Throwable t) {
+ if (!(t instanceof StatusRuntimeException)) {
+ return new StatusRuntimeException(
+ io.grpc.Status.UNKNOWN.withDescription(
+ "The server encountered an unexpected error. Please retry your request.")
+ .withCause(t));
+ }
+
+ StatusRuntimeException sre = (StatusRuntimeException) t;
+ // UNAVAILABLE error is shown as "UNAVAILABLE: no healthy upstream" to the user.
+ // Provide a readable error message to user.
+ if (sre.getStatus().getCode() == io.grpc.Status.Code.UNAVAILABLE) {
+ return new StatusRuntimeException(
+ io.grpc.Status.UNAVAILABLE.withDescription(
+ "The service is temporarily unavailable. Please retry your request.")
+ .withCause(t));
+ }
+ return sre;
+ }
+
+ /**
+ * Handles unknown {@link Throwable} by passing it in the {@link StreamObserver} as a Status* exception
+ * Only use this method after handling the throwable as accurately as possible, and when no other information about the throwable is available
+ * @param responseObserver responseObserver
+ * @param t unknown exception
+ */
+ public static void fallbackHandleException(StreamObserver responseObserver, Throwable t) {
+ fallbackHandleException(responseObserver, t, t.getMessage());
+ }
+
/**
* Converts the given {@link StatusRuntimeException} to a {@link UserException}, if possible.
*
diff --git a/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java b/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java
index 457b1e3652..d43fb373db 100644
--- a/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java
+++ b/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java
@@ -56,6 +56,7 @@ protected static T fromUserException(Clas
}
}
+ @Override
public ByteString toByteString() {
try {
return ProtobufByteStringSerDe.writeValue(contextMapper, this,
diff --git a/common/src/main/java/com/dremio/common/exceptions/UserException.java b/common/src/main/java/com/dremio/common/exceptions/UserException.java
index 02724e0286..a48556ef3e 100644
--- a/common/src/main/java/com/dremio/common/exceptions/UserException.java
+++ b/common/src/main/java/com/dremio/common/exceptions/UserException.java
@@ -55,6 +55,19 @@ public class UserException extends RuntimeException {
public static final String REFRESH_METADATA_FAILED_CONCURRENT_UPDATE_MSG = "Unable to refresh metadata for the dataset (due to concurrent updates). Please retry.";
+ // reasons for cancelling a query using UserException
+ public enum AttemptCompletionState {
+ SUCCESS, // attempt is successful (has UT)
+ CLIENT_CANCELLED, // cancelled by user (has UT)
+ PLANNING_TIMEOUT, // query cancelled because it exceeded planning time
+ ENGINE_TIMEOUT, // timeout waiting for an engine slot (has UT)
+ RUNTIME_EXCEEDED, // query camcelled because runtime exceeded (has UT)
+ HEAP_MONITOR_C, // query cancelled by coordinator heap monitor (has UT)
+ HEAP_MONITOR_E, // query cancelled by executor heap monitor
+ UNKNOWN, // Query cancellation reason is unknown
+ DREMIO_PB_ERROR, // DremioPB.ErrorType contains the error type
+ }
+
/**
* Creates a new INVALID_DATASET_METADATA exception builder.
*
@@ -675,6 +688,7 @@ public static final class Builder {
private ByteString rawAdditionalContext;
private boolean fixedMessage; // if true, calls to message() are a no op
+ private AttemptCompletionState attemptCompletionState = AttemptCompletionState.UNKNOWN;
/**
* Wraps an existing exception inside a user exception.
@@ -871,6 +885,11 @@ public Builder setAdditionalExceptionContext(AdditionalExceptionContext addition
return this;
}
+ public Builder attemptCompletionState(AttemptCompletionState attemptCompletionState) {
+ this.attemptCompletionState = attemptCompletionState;
+ return this;
+ }
+
/**
* builds a user exception or returns the wrapped one. If the error is a system error, the error message is logged
* to the given {@link Logger}.
@@ -981,6 +1000,8 @@ public UserException buildSilently() {
private final ByteString rawAdditionalContext;
+ private final AttemptCompletionState attemptCompletionState;
+
protected UserException(final DremioPBError.ErrorType errorType, final String message, final Throwable cause,
final ByteString rawAdditionalContext) {
super(message, cause);
@@ -988,6 +1009,7 @@ protected UserException(final DremioPBError.ErrorType errorType, final String me
this.errorType = errorType;
this.context = new UserExceptionContext();
this.rawAdditionalContext = rawAdditionalContext;
+ this.attemptCompletionState = AttemptCompletionState.UNKNOWN;
}
private UserException(final Builder builder) {
@@ -995,6 +1017,7 @@ private UserException(final Builder builder) {
this.errorType = builder.errorType;
this.context = builder.context;
this.rawAdditionalContext = builder.rawAdditionalContext;
+ this.attemptCompletionState = builder.attemptCompletionState;
}
/**
@@ -1038,6 +1061,10 @@ public String getVerboseMessage(boolean includeErrorIdAndIdentity) {
return generateMessage(includeErrorIdAndIdentity) + "\n\n" + ErrorHelper.buildCausesMessage(getCause());
}
+ public AttemptCompletionState getAttemptCompletionState() {
+ return attemptCompletionState;
+ }
+
/**
* returns or creates a DremioPBError object corresponding to this user exception.
*
diff --git a/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java b/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java
index 7f0279f8e7..e7267e7165 100644
--- a/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java
+++ b/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java
@@ -97,10 +97,12 @@ public void setDefaultLogLevelThreshold(String defaultLogLevelThreshold) {
this.defaultLogLevelThreshold = Level.toLevel(defaultLogLevelThreshold);
}
+ @Override
public void stop() {
this.start = false;
}
+ @Override
public void start() {
if (this.defaultLogLevelThreshold != null) {
super.start();
diff --git a/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java b/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java
index 7e8b90ef26..fa2fd19318 100644
--- a/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java
+++ b/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java
@@ -15,18 +15,22 @@
*/
package com.dremio.common.scanner;
+import java.io.File;
+import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.List;
import com.dremio.common.config.SabotConfig;
import com.dremio.common.scanner.persistence.ScanResult;
+import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Utility to scan classpath at runtime
*
*/
public class RunTimeScan {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RunTimeScan.class);
/** result of prescan */
private static final ScanResult PRESCANNED = BuildTimeScan.load();
@@ -43,13 +47,38 @@ static Collection getNonPrescannedMarkedPaths() {
return markedPaths;
}
+ private static ScanResult createFromSavedScanResults() {
+ final String savedScanResults = System.getProperty("com.dremio.savedScanResults");
+ if (savedScanResults == null) {
+ return null;
+ }
+
+ final File scanResultsFile = new File(savedScanResults);
+ if (scanResultsFile.exists()) {
+ try {
+ return new ObjectMapper().readValue(scanResultsFile, ScanResult.class);
+ } catch (IOException e) {
+ logger.warn("Unable to read scan result from {} (proceeding to slow path): {}",
+ scanResultsFile.getName(), e.toString());
+ }
+ }
+ return null;
+ }
+
/**
* loads prescanned classpath info and scans for extra ones based on configuration.
* (unless prescan is disabled with {@see ClassPathScanner#IMPLEMENTATIONS_SCAN_CACHE}=falses)
+ * If ScanResult was generated at build time and is indicated by com.dremio.savedScanResults,
+ * then short circuit to just load that and return
* @param config to retrieve the packages to scan
* @return the scan result
*/
public static ScanResult fromPrescan(SabotConfig config) {
+ final ScanResult preCreated = createFromSavedScanResults();
+ if (preCreated != null) {
+ return preCreated;
+ }
+
List packagePrefixes = ClassPathScanner.getPackagePrefixes(config);
List scannedBaseClasses = ClassPathScanner.getScannedBaseClasses(config);
List scannedAnnotations = ClassPathScanner.getScannedAnnotations(config);
diff --git a/common/src/main/java/com/dremio/common/util/Retryer.java b/common/src/main/java/com/dremio/common/util/Retryer.java
index 813bf32d7c..06d17b10ef 100644
--- a/common/src/main/java/com/dremio/common/util/Retryer.java
+++ b/common/src/main/java/com/dremio/common/util/Retryer.java
@@ -27,19 +27,18 @@
import com.dremio.io.ExponentialBackoff;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
+import com.google.errorprone.annotations.CheckReturnValue;
/**
* Simple retrying utility
- *
- * @param
*/
@SuppressWarnings("checkstyle:FinalClass")
-public class Retryer implements ExponentialBackoff {
+public class Retryer implements ExponentialBackoff {
private static final Logger logger = LoggerFactory.getLogger(Retryer.class);
public enum WaitStrategy {EXPONENTIAL, FLAT} //Can be extended
- private Set> retryableExceptionClasses = new HashSet<>();
+ private final Set> retryableExceptionClasses = new HashSet<>();
private WaitStrategy waitStrategy = WaitStrategy.EXPONENTIAL;
private int maxRetries = 4; // default
private int baseMillis = 250;
@@ -52,7 +51,7 @@ public enum WaitStrategy {EXPONENTIAL, FLAT} //Can be extended
private Retryer() {
}
- public T call(Callable callable) {
+ public T call(Callable callable) {
for (int attemptNo = 1; infiniteRetries || (attemptNo <= maxRetries); attemptNo++) {
try {
return callable.call();
@@ -139,47 +138,56 @@ public int getMaxRetries() {
return maxRetries;
}
- public static class Builder {
- private Retryer retryer = new Retryer<>();
+ @CheckReturnValue
+ public static Builder newBuilder() {
+ return new Retryer.Builder();
+ }
+
+ public static class Builder {
+ private final Retryer retryer = new Retryer();
+
+ private Builder() {
+ // use static factory method newBuilder instead
+ }
- public Builder retryIfExceptionOfType(Class extends Exception> clazz) {
+ public Builder retryIfExceptionOfType(Class extends Exception> clazz) {
Preconditions.checkState(retryer.isRetriable == retryer.isExceptionClassRetriable,
"Retryer does not support mix of exception class and exception function");
retryer.retryableExceptionClasses.add(clazz);
return this;
}
- public Builder retryOnExceptionFunc(Function function) {
+ public Builder retryOnExceptionFunc(Function function) {
Preconditions.checkState(retryer.retryableExceptionClasses.isEmpty(),
"Retryer does not support mix of exception class and exception function");
retryer.isRetriable = function;
return this;
}
- public Builder setWaitStrategy(WaitStrategy waitStrategy, int baseMillis, int maxMillis) {
+ public Builder setWaitStrategy(WaitStrategy waitStrategy, int baseMillis, int maxMillis) {
retryer.waitStrategy = waitStrategy;
retryer.baseMillis = baseMillis;
retryer.maxMillis = maxMillis;
return this;
}
- public Builder setMaxRetries(int maxRetries) {
+ public Builder setMaxRetries(int maxRetries) {
retryer.maxRetries = maxRetries;
return this;
}
- public Builder setInfiniteRetries(boolean infiniteRetries) {
+ public Builder setInfiniteRetries(boolean infiniteRetries) {
retryer.infiniteRetries = infiniteRetries;
return this;
}
- public Retryer build() {
+ public Retryer build() {
return retryer;
}
}
- public Retryer copy() {
- Retryer copy = new Retryer<>();
+ public Retryer copy() {
+ Retryer copy = new Retryer();
copy.waitStrategy = waitStrategy;
copy.baseMillis = baseMillis;
copy.maxMillis = maxMillis;
@@ -199,7 +207,8 @@ public static class OperationFailedAfterRetriesException extends RuntimeExceptio
}
public T getWrappedCause(Class clazz, Function conversionFunc) {
- return clazz.isInstance(getCause()) ? (T)getCause() : conversionFunc.apply(getCause());
+ Throwable cause = getCause();
+ return clazz.isInstance(cause) ? clazz.cast(cause) : conversionFunc.apply(cause);
}
}
}
diff --git a/common/src/main/java/com/dremio/common/utils/ReservedCharacters.java b/common/src/main/java/com/dremio/common/utils/ReservedCharacters.java
new file mode 100644
index 0000000000..75af4c0d63
--- /dev/null
+++ b/common/src/main/java/com/dremio/common/utils/ReservedCharacters.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.common.utils;
+
+/**
+ * Set of characters that Dremio reserves to use and considers illegal in identifiers
+ */
+public enum ReservedCharacters {
+ INFORMATION_SEPARATOR_ONE('\u001F'); //https://www.fileformat.info/info/unicode/char/1f/index.htm
+ private char reservedChar;
+
+ ReservedCharacters(char c) {
+ this.reservedChar = c;
+ }
+
+ public char getReservedChar() {
+ return reservedChar;
+ }
+
+ public static String getInformationSeparatorOne() {
+ return String.valueOf(INFORMATION_SEPARATOR_ONE.reservedChar);
+ }
+}
diff --git a/common/src/main/java/com/dremio/context/ExecutorToken.java b/common/src/main/java/com/dremio/context/ExecutorToken.java
index a5e90d9fc2..de412ef24e 100644
--- a/common/src/main/java/com/dremio/context/ExecutorToken.java
+++ b/common/src/main/java/com/dremio/context/ExecutorToken.java
@@ -15,13 +15,25 @@
*/
package com.dremio.context;
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.collect.ImmutableMap;
+
+import io.grpc.Metadata;
+
/**
* Request Context Holder for executor token
*/
-public class ExecutorToken {
+public class ExecutorToken implements SerializableContext {
public static final RequestContext.Key CTX_KEY = RequestContext.newKey(
"executor_token_key");
+ // Note: Public due to usage in some interceptors which do not need deserialization.
+ public static final Metadata.Key TOKEN_HEADER_KEY =
+ Metadata.Key.of("x-dremio-token-key", Metadata.ASCII_STRING_MARSHALLER);
+
private final String executorToken;
public ExecutorToken(String executorToken) {
@@ -32,4 +44,21 @@ public String getExecutorToken() {
return executorToken;
}
+ @Override
+ public void serialize(ImmutableMap.Builder builder) {
+ builder.put(TOKEN_HEADER_KEY.name(), executorToken);
+ }
+
+ public static class Transformer implements SerializableContextTransformer {
+ @Override
+ public RequestContext deserialize(final Map headers, RequestContext builder) {
+ if (headers.containsKey(TOKEN_HEADER_KEY.name()) && StringUtils.isNotEmpty(headers.get(TOKEN_HEADER_KEY.name()))) {
+ return builder.with(
+ ExecutorToken.CTX_KEY,
+ new ExecutorToken(headers.get(TOKEN_HEADER_KEY.name())));
+ }
+
+ return builder;
+ }
+ }
}
diff --git a/common/src/main/java/com/dremio/context/SerializableContext.java b/common/src/main/java/com/dremio/context/SerializableContext.java
new file mode 100644
index 0000000000..954365fc02
--- /dev/null
+++ b/common/src/main/java/com/dremio/context/SerializableContext.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.context;
+
+import com.google.common.collect.ImmutableMap;
+
+/**
+ * Context object that can be serialized and deserialized for transport over the network.
+ */
+public interface SerializableContext {
+ /**
+ * Serializes the contents of this context into the provided map builder.
+ */
+ void serialize(ImmutableMap.Builder builder);
+}
diff --git a/common/src/main/java/com/dremio/context/SerializableContextTransformer.java b/common/src/main/java/com/dremio/context/SerializableContextTransformer.java
new file mode 100644
index 0000000000..f8b22613bc
--- /dev/null
+++ b/common/src/main/java/com/dremio/context/SerializableContextTransformer.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.context;
+
+import java.util.Map;
+
+import com.google.common.collect.ImmutableMap;
+
+import io.grpc.Metadata;
+
+/**
+ * Transformer for deserializing context objects from gRPC headers.
+ */
+public interface SerializableContextTransformer {
+ /**
+ * Helper for converting a set of gRPC headers into a map which this transformer can consume.
+ */
+ static Map convert(final Metadata headers) {
+ final ImmutableMap.Builder builder = ImmutableMap.builder();
+ headers.keys().forEach((key) -> {
+ final String value = headers.get(Metadata.Key.of(key, Metadata.ASCII_STRING_MARSHALLER));
+ if (value != null) {
+ builder.put(key, value);
+ }
+ });
+ return builder.build();
+ }
+
+ /**
+ * Constructs a context object from the provided map.
+ * If the headers are not present, the builder should be returned without modification.
+ * @param builder A RequestContext object to extend off of.
+ */
+ RequestContext deserialize(final Map headers, RequestContext builder);
+}
diff --git a/common/src/main/java/com/dremio/context/SupportContext.java b/common/src/main/java/com/dremio/context/SupportContext.java
index 16f6c9915d..4aa3f7bbf8 100644
--- a/common/src/main/java/com/dremio/context/SupportContext.java
+++ b/common/src/main/java/com/dremio/context/SupportContext.java
@@ -16,15 +16,22 @@
package com.dremio.context;
import java.util.Arrays;
+import java.util.Map;
+
+import com.google.common.collect.ImmutableMap;
/**
* Support context.
*/
-public class SupportContext {
+public class SupportContext implements SerializableContext {
public static final RequestContext.Key CTX_KEY = RequestContext.newKey("support_ctx_key");
private static final String ROLES_DELIMITER = ",";
+ private static final String SUPPORT_TICKET_HEADER_KEY = "x-dremio-support-ticket-key";
+ private static final String SUPPORT_EMAIL_HEADER_KEY = "x-dremio-support-email-key";
+ private static final String SUPPORT_ROLES_HEADER_KEY = "x-dremio-support-roles-key";
+
// Note: This refers to the UserID field held within the UserContext,
// but this constant only appears if the SupportContext is set.
public static final String SUPPORT_USER_ID = "$Dremio-Support-Super-Admin-User$";
@@ -35,7 +42,8 @@ public enum SupportRole {
BASIC_SUPPORT_ROLE("basic-support"),
BILLING_ROLE("billing"),
ORG_DELETE_ROLE("org-delete"),
- CONSISTENCY_FIXER_ROLE("consistency-fixer");
+ CONSISTENCY_FIXER_ROLE("consistency-fixer"),
+ DEBUG_ROLE("debug-role");
private final String value;
@@ -95,6 +103,10 @@ public static boolean isSupportUserWithConsistencyFixerRole() {
return isSupportUser() && isSupportUserHasRole(SupportRole.CONSISTENCY_FIXER_ROLE);
}
+ public static boolean isSupportUserWithDebugRole() {
+ return isSupportUser() && isSupportUserHasRole(SupportRole.DEBUG_ROLE);
+ }
+
public static boolean doesSupportUserHaveRole(SupportContext supportContext, SupportRole role) {
return supportContext.roles.length > 0 && Arrays.stream(supportContext.roles).anyMatch(r -> r.equals(role.value));
}
@@ -110,12 +122,37 @@ private static boolean isSupportUserHasRole(SupportRole role) {
return doesSupportUserHaveRole(RequestContext.current().get(SupportContext.CTX_KEY), role);
}
- public static String serializeSupportRoles(String[] rolesArr) {
+ @Override
+ public void serialize(ImmutableMap.Builder builder) {
+ builder.put(SUPPORT_TICKET_HEADER_KEY, ticket);
+ builder.put(SUPPORT_EMAIL_HEADER_KEY, email);
+ builder.put(SUPPORT_ROLES_HEADER_KEY, serializeSupportRoles(roles));
+ }
+
+ private static String serializeSupportRoles(String[] rolesArr) {
return rolesArr != null ? String.join(ROLES_DELIMITER, rolesArr) : "";
}
- public static String[] deserializeSupportRoles(String rolesStr) {
- return rolesStr != null ? rolesStr.split(ROLES_DELIMITER) : new String[0];
+ public static class Transformer implements SerializableContextTransformer {
+ @Override
+ public RequestContext deserialize(final Map headers, RequestContext builder) {
+ if (headers.containsKey(SUPPORT_TICKET_HEADER_KEY)
+ && headers.containsKey(SUPPORT_EMAIL_HEADER_KEY)
+ && headers.containsKey(SUPPORT_ROLES_HEADER_KEY))
+ {
+ return builder.with(
+ SupportContext.CTX_KEY,
+ new SupportContext(
+ headers.get(SUPPORT_TICKET_HEADER_KEY),
+ headers.get(SUPPORT_EMAIL_HEADER_KEY),
+ deserializeSupportRoles(headers.get(SUPPORT_ROLES_HEADER_KEY))));
+ }
+
+ return builder;
+ }
}
+ private static String[] deserializeSupportRoles(String rolesStr) {
+ return rolesStr != null ? rolesStr.split(ROLES_DELIMITER) : new String[0];
+ }
}
diff --git a/common/src/main/java/com/dremio/context/TenantContext.java b/common/src/main/java/com/dremio/context/TenantContext.java
index 2211480fce..7d6609132f 100644
--- a/common/src/main/java/com/dremio/context/TenantContext.java
+++ b/common/src/main/java/com/dremio/context/TenantContext.java
@@ -15,12 +15,17 @@
*/
package com.dremio.context;
+import java.util.Map;
import java.util.UUID;
+import com.google.common.collect.ImmutableMap;
+
+import io.grpc.Metadata;
+
/**
* Tenant context.
*/
-public class TenantContext {
+public class TenantContext implements SerializableContext {
public static final RequestContext.Key CTX_KEY = RequestContext.newKey("tenant_ctx_key");
// The default tenant id used in product.
public static final String DEFAULT_PRODUCT_PROJECT_ID = "77a89f85-c936-4f42-ab21-2ee90e9609b8";
@@ -31,6 +36,12 @@ public class TenantContext {
public static final TenantContext DEFAULT_SERVICE_CONTEXT =
new TenantContext(DEFAULT_SERVICE_PROJECT_ID, DEFAULT_SERVICE_ORG_ID);
+ // Note: These are public for use in annotating traces.
+ public static final Metadata.Key PROJECT_ID_HEADER_KEY =
+ Metadata.Key.of("x-dremio-project-id-key", Metadata.ASCII_STRING_MARSHALLER);
+ public static final Metadata.Key ORG_ID_HEADER_KEY =
+ Metadata.Key.of("x-dremio-org-id-key", Metadata.ASCII_STRING_MARSHALLER);
+
private final UUID projectId;
private final UUID orgId;
@@ -47,4 +58,25 @@ public UUID getOrgId() {
return orgId;
}
+ @Override
+ public void serialize(ImmutableMap.Builder builder) {
+ builder.put(PROJECT_ID_HEADER_KEY.name(), projectId.toString());
+ builder.put(ORG_ID_HEADER_KEY.name(), orgId.toString());
+ }
+
+ public static class Transformer implements SerializableContextTransformer {
+ @Override
+ public RequestContext deserialize(final Map headers, RequestContext builder) {
+ if (headers.containsKey(PROJECT_ID_HEADER_KEY.name())
+ && headers.containsKey(ORG_ID_HEADER_KEY.name())) {
+ return builder.with(
+ TenantContext.CTX_KEY,
+ new TenantContext(
+ headers.get(PROJECT_ID_HEADER_KEY.name()),
+ headers.get(ORG_ID_HEADER_KEY.name())));
+ }
+
+ return builder;
+ }
+ }
}
diff --git a/common/src/main/java/com/dremio/context/UserContext.java b/common/src/main/java/com/dremio/context/UserContext.java
index 99b5417a91..ba2e8fc598 100644
--- a/common/src/main/java/com/dremio/context/UserContext.java
+++ b/common/src/main/java/com/dremio/context/UserContext.java
@@ -15,15 +15,22 @@
*/
package com.dremio.context;
+import java.util.Map;
+
+import com.google.common.collect.ImmutableMap;
+
/**
* User context.
*/
-public class UserContext {
+public class UserContext implements SerializableContext {
public static final RequestContext.Key CTX_KEY = RequestContext.newKey("user_ctx_key");
public static final UserContext DEFAULT_SERVICE_CONTEXT = new UserContext("77a89f85-c936-4f42-ab21-2ee90e9609b8");
// represents the Dremio System User ($dremio$)
public static final UserContext SYSTEM_USER_CONTEXT = new UserContext("678cc92c-01ed-4db3-9a28-d1f871042d9f");
+ // TODO(DX-63584): Change to private once the use in proxy handlers is removed.
+ public static final String USER_HEADER_KEY = "x-dremio-user-key";
+
private final String userId;
public UserContext(String userId) {
@@ -34,12 +41,26 @@ public String getUserId() {
return userId;
}
- public String serialize() {
- return userId;
- }
-
public static boolean isSystemUser() {
return RequestContext.current().get(UserContext.CTX_KEY) != null
&& SYSTEM_USER_CONTEXT.getUserId().equals(RequestContext.current().get(UserContext.CTX_KEY).getUserId());
}
+
+ @Override
+ public void serialize(ImmutableMap.Builder builder) {
+ builder.put(USER_HEADER_KEY, userId);
+ }
+
+ public static class Transformer implements SerializableContextTransformer {
+ @Override
+ public RequestContext deserialize(final Map headers, RequestContext builder) {
+ if (headers.containsKey(USER_HEADER_KEY)) {
+ return builder.with(
+ UserContext.CTX_KEY,
+ new UserContext(headers.get(USER_HEADER_KEY)));
+ }
+
+ return builder;
+ }
+ }
}
diff --git a/common/src/main/java/com/dremio/context/UsernameContext.java b/common/src/main/java/com/dremio/context/UsernameContext.java
new file mode 100644
index 0000000000..afb3755310
--- /dev/null
+++ b/common/src/main/java/com/dremio/context/UsernameContext.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.context;
+
+/**
+ * The Username of the User.
+ *
+ * TODO:
+ * Note that there is ongoing working to add the user's username to UserContext;
+ * Refer to DX-51988: Introduce username to UserContext.
+ * Once the ticket is completed, this class is unnecessary and should be removed in lieu of
+ * simply using the username in the UserContext.
+ * Refer to DX-59840: Remove UsernameContext once username is included in UserContext.
+ */
+public class UsernameContext {
+ public static final RequestContext.Key CTX_KEY = RequestContext.newKey("user_name_ctx_key");
+
+ private final String userName;
+
+ public UsernameContext(String userName) {
+ this.userName = userName;
+ }
+
+ public String getUserName() {
+ return userName;
+ }
+}
diff --git a/common/src/main/java/com/hubspot/jackson/datatype/protobuf/builtin/serializers/MessageSerializer.java b/common/src/main/java/com/hubspot/jackson/datatype/protobuf/builtin/serializers/MessageSerializer.java
deleted file mode 100644
index 829faa0c89..0000000000
--- a/common/src/main/java/com/hubspot/jackson/datatype/protobuf/builtin/serializers/MessageSerializer.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Copyright (C) 2017-2019 Dremio Corporation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.hubspot.jackson.datatype.protobuf.builtin.serializers;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.fasterxml.jackson.annotation.JsonInclude.Include;
-import com.fasterxml.jackson.core.JsonGenerator;
-import com.fasterxml.jackson.databind.PropertyNamingStrategy.PropertyNamingStrategyBase;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.fasterxml.jackson.databind.SerializerProvider;
-import com.fasterxml.jackson.databind.util.NameTransformer;
-import com.google.protobuf.Descriptors.Descriptor;
-import com.google.protobuf.Descriptors.FieldDescriptor;
-import com.google.protobuf.Descriptors.FieldDescriptor.JavaType;
-import com.google.protobuf.Descriptors.FileDescriptor.Syntax;
-import com.google.protobuf.ExtensionRegistry.ExtensionInfo;
-import com.google.protobuf.GeneratedMessageV3.ExtendableMessageOrBuilder;
-import com.google.protobuf.MessageOrBuilder;
-import com.hubspot.jackson.datatype.protobuf.ExtensionRegistryWrapper;
-import com.hubspot.jackson.datatype.protobuf.PropertyNamingStrategyWrapper;
-import com.hubspot.jackson.datatype.protobuf.ProtobufJacksonConfig;
-import com.hubspot.jackson.datatype.protobuf.ProtobufSerializer;
-
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-
-/**
- * A Modified MessageSerializer to add support for Unwrapped Serialization.
- *
- * This file submitted as a PR to the jackson protobuf library.
- * PR - https://github.com/HubSpot/jackson-datatype-protobuf/pull/79
- */
-public class MessageSerializer extends ProtobufSerializer {
- @SuppressFBWarnings(value="SE_BAD_FIELD")
- private final ProtobufJacksonConfig config;
- private final boolean unwrappingSerializer;
- private final NameTransformer nameTransformer;
-
- /**
- * @deprecated use {@link #MessageSerializer(ProtobufJacksonConfig)} instead
- */
- @Deprecated
- public MessageSerializer(ExtensionRegistryWrapper extensionRegistry) {
- this(ProtobufJacksonConfig.builder().extensionRegistry(extensionRegistry).build());
- }
-
- public MessageSerializer(ProtobufJacksonConfig config) {
- this(config, false);
- }
-
- public MessageSerializer(ProtobufJacksonConfig config, boolean unwrappingSerializer) {
- this(config, null, unwrappingSerializer);
- }
-
- public MessageSerializer(ProtobufJacksonConfig config, NameTransformer nameTransformer, boolean unwrappingSerializer) {
- super(MessageOrBuilder.class);
- this.config = config;
- this.unwrappingSerializer = unwrappingSerializer;
- if (nameTransformer == null) {
- this.nameTransformer = NameTransformer.NOP;
- } else {
- this.nameTransformer = nameTransformer;
- }
- }
-
- @Override
- public void serialize(
- MessageOrBuilder message,
- JsonGenerator generator,
- SerializerProvider serializerProvider
- ) throws IOException {
- if (!isUnwrappingSerializer()) {
- generator.writeStartObject();
- }
-
- boolean proto3 = message.getDescriptorForType().getFile().getSyntax() == Syntax.PROTO3;
- Include include = serializerProvider.getConfig().getDefaultPropertyInclusion().getValueInclusion();
- boolean writeDefaultValues = proto3 && include != Include.NON_DEFAULT;
- boolean writeEmptyCollections = include != Include.NON_DEFAULT && include != Include.NON_EMPTY;
-
- //If NamingTransformer is provided (in case of UnwrappingSerializer), we chain it on top of
- // the namingStrategy.
- final PropertyNamingStrategyBase namingStrategy = new PropertyNamingStrategyBase() {
- @Override
- public String translate(String fieldName) {
- PropertyNamingStrategyBase configuredNamingStrategy =
- new PropertyNamingStrategyWrapper(serializerProvider.getConfig().getPropertyNamingStrategy());
- return nameTransformer.transform(configuredNamingStrategy.translate(fieldName));
- }
- };
-
-
- Descriptor descriptor = message.getDescriptorForType();
- List fields = new ArrayList<>(descriptor.getFields());
- if (message instanceof ExtendableMessageOrBuilder>) {
- for (ExtensionInfo extensionInfo : config.extensionRegistry().getExtensionsByDescriptor(descriptor)) {
- fields.add(extensionInfo.descriptor);
- }
- }
-
- for (FieldDescriptor field : fields) {
- if (field.isRepeated()) {
- List> valueList = (List>) message.getField(field);
-
- if (!valueList.isEmpty() || writeEmptyCollections) {
- if (field.isMapField()) {
- generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName())));
- writeMap(field, valueList, generator, serializerProvider);
- } else if (valueList.size() == 1 && writeSingleElementArraysUnwrapped(serializerProvider)) {
- generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName())));
- writeValue(field, valueList.get(0), generator, serializerProvider);
- } else {
- generator.writeArrayFieldStart(nameTransformer.transform(namingStrategy.translate(field.getName())));
- for (Object subValue : valueList) {
- writeValue(field, subValue, generator, serializerProvider);
- }
- generator.writeEndArray();
- }
- }
- } else if (message.hasField(field) || (writeDefaultValues && !supportsFieldPresence(field) && field.getContainingOneof() == null)) {
- generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName())));
- writeValue(field, message.getField(field), generator, serializerProvider);
- } else if (include == Include.ALWAYS && field.getContainingOneof() == null) {
- generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName())));
- generator.writeNull();
- }
- }
-
- if (!isUnwrappingSerializer()) {
- generator.writeEndObject();
- }
- }
-
- @Override
- public boolean isUnwrappingSerializer() {
- return unwrappingSerializer;
- }
-
- @Override
- public MessageSerializer unwrappingSerializer(NameTransformer nameTransformer) {
- return new MessageSerializer(config, nameTransformer, true);
- }
-
- private static boolean supportsFieldPresence(FieldDescriptor field) {
- // messages still support field presence in proto3
- return field.getJavaType() == JavaType.MESSAGE;
- }
-
- private static boolean writeEmptyArrays(SerializerProvider config) {
- return config.isEnabled(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS);
- }
-
- private static boolean writeSingleElementArraysUnwrapped(SerializerProvider config) {
- return config.isEnabled(SerializationFeature.WRITE_SINGLE_ELEM_ARRAYS_UNWRAPPED);
- }
-}
diff --git a/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java b/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java
index e2cc07adc5..f1928c2b10 100644
--- a/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java
+++ b/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java
@@ -33,18 +33,26 @@
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.read.ListAppender;
+/**
+ * expectations in this test depend on the log configuration in the logback-test.xml resource
+ */
public class TestBlockLevelLogging {
- boolean isLowestLevelPresentInLogList(List logsList, Level level)
+
+ private static void assertLowestLogLevel(List logsList, Level level)
{
- for (int i=0;i testLogFilteringUtil(ch.qos.logback.classic.Logger logger) {
+
+ public static List testLogFilteringUtil(org.slf4j.Logger slf4jLogger) {
+ ch.qos.logback.classic.Logger logger = (ch.qos.logback.classic.Logger) slf4jLogger;
ListAppender listAppender = new ListAppender<>();
listAppender.start();
logger.addAppender(listAppender);
@@ -56,86 +64,60 @@ public static List testLogFilteringUtil(ch.qos.logback.classic.Lo
List logsList = listAppender.list;
return logsList;
}
+
@Test
public void testAFirst() {
AFirst aFirst = new AFirst();
List logsList = aFirst.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.ERROR));
- for (int i=0;i logsList = aSecond.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.WARN));
- for (int i=0;i logsList = aThird.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.WARN));
- for (int i=0;i logsList = bFirst.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.INFO));
- for (int i=0;i logsList = bSecond.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.ERROR));
- for (int i=0;i logsList = bThird.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.DEBUG));
- for (int i=0;i logsList = cFirst.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.DEBUG));
- for (int i=0;i logsList = cSecond.testLogFiltering();
- Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.TRACE));
- Assert.assertTrue(Level.toLevel("TRACE").isGreaterOrEqual(Level.toLevel("TRACE")));
-
- for (int i=0;i callable = () -> RequestContext.current().get(UserContext.CTX_KEY).serialize();
+ Callable callable = () -> RequestContext.current().get(UserContext.CTX_KEY).getUserId();
Future future = RequestContext.empty()
.with(UserContext.CTX_KEY, new UserContext(testUser))
.call(() -> pool.submit(callable));
@@ -161,7 +161,7 @@ public void testContextWithRunnable() throws Exception {
final String testUser = "testUser2";
final Pointer foundUser = new Pointer<>();
- Runnable runnable = () -> foundUser.value = RequestContext.current().get(UserContext.CTX_KEY).serialize();
+ Runnable runnable = () -> foundUser.value = RequestContext.current().get(UserContext.CTX_KEY).getUserId();
Future> future = RequestContext.empty()
.with(UserContext.CTX_KEY, new UserContext(testUser))
.call(() -> pool.submit(runnable));
diff --git a/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java b/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java
index 2426908426..baba650e16 100644
--- a/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java
+++ b/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java
@@ -49,6 +49,7 @@
public class TestStructuredLogging extends DremioTest {
private static final String LOGGER_NAME = "STRUCTURED-LOG-TEST";
private LoggerContext localLoggerContext;
+ @SuppressWarnings("Slf4jLoggerShouldBeFinal")
private Logger logger;
@Rule
public TemporaryFolder tempLogFolder = new TemporaryFolder();
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java
index d495cde26c..75e7214c44 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java
@@ -19,14 +19,13 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
/**
* class for testing custom log filtering
*/
public class AFirst {
- private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(AFirst.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AFirst.class);
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java
index 80a936b081..4183bde283 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java
@@ -19,14 +19,13 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
/**
* class for testing custom log filtering
*/
public class ASecond {
- private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(ASecond.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ASecond.class);
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java
index 0d8ce7b3ac..55a5eacd5d 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java
@@ -19,14 +19,13 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
/**
* class for testing custom log filtering
*/
public class AThird {
- private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(AThird.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AThird.class);
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java
index e1222a3712..7ad9f1fedf 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java
@@ -19,18 +19,15 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
/**
* class for testing custom log filtering
*/
public class BFirst {
-
- private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(BFirst.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BFirst.class);
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
-
}
}
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java
index 83074f25de..2c140e994e 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java
@@ -19,14 +19,13 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
/**
*class for testing custom log filtering
*/
public class BSecond {
- private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(BSecond.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BSecond.class);
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java
index 0e81bb6bd1..d3db7f1702 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java
@@ -19,15 +19,13 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
/**
* class for testing custom log filtering
*/
public class BThird {
-
- private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(BThird.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BThird.class);
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java
index 27152ce298..2a67572f8d 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java
@@ -19,14 +19,14 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
/*
*class for testing custom log filtering
*/
public class CFirst {
- private static final ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(CFirst.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CFirst.class);
+
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
}
diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java
index c78b225bd6..3822b81ba9 100644
--- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java
+++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java
@@ -20,11 +20,10 @@
import com.dremio.TestBlockLevel.TestBlockLevelLogging;
-import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
public class CSecond {
- private static final ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(CSecond.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CSecond.class);
public List testLogFiltering() {
return TestBlockLevelLogging.testLogFilteringUtil(logger);
}
diff --git a/common/src/test/java/com/dremio/common/util/TestRetryer.java b/common/src/test/java/com/dremio/common/util/TestRetryer.java
index 16749b74e3..7d946a73af 100644
--- a/common/src/test/java/com/dremio/common/util/TestRetryer.java
+++ b/common/src/test/java/com/dremio/common/util/TestRetryer.java
@@ -38,7 +38,7 @@ public class TestRetryer {
@Test
public void testMaxRetries() {
- Retryer retryer = new Retryer.Builder()
+ Retryer retryer = Retryer.newBuilder()
.setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1)
.retryIfExceptionOfType(RuntimeException.class)
.setMaxRetries(MAX_RETRIES).build();
@@ -57,7 +57,7 @@ public void testMaxRetries() {
@Test
public void testNoRetryAfterSuccess() {
- Retryer retryer = new Retryer.Builder()
+ Retryer retryer = Retryer.newBuilder()
.setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1)
.retryIfExceptionOfType(RuntimeException.class)
.setMaxRetries(MAX_RETRIES).build();
@@ -81,7 +81,7 @@ public void testNoRetryAfterSuccess() {
public void testFlatWaitStrategy() {
final int expectedWait = 100;
- Retryer retryer = spy(new Retryer.Builder()
+ Retryer retryer = spy(Retryer.newBuilder()
.setWaitStrategy(Retryer.WaitStrategy.FLAT, expectedWait, expectedWait)
.retryIfExceptionOfType(RuntimeException.class)
.setMaxRetries(MAX_RETRIES).build());
@@ -100,7 +100,7 @@ public void testFlatWaitStrategy() {
@Test(expected = RuntimeException.class)
public void testRetryIfException() {
- Retryer retryer = new Retryer.Builder()
+ Retryer retryer = Retryer.newBuilder()
.setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1)
.retryIfExceptionOfType(IOException.class)
.retryIfExceptionOfType(SQLException.class)
@@ -127,7 +127,7 @@ public void testRetryIfException() {
@Test(expected = RuntimeException.class)
public void testRetryIfExceptionFunc() {
- Retryer retryer = new Retryer.Builder()
+ Retryer retryer = Retryer.newBuilder()
.setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1)
.retryOnExceptionFunc(ex -> ex instanceof IOException || ex instanceof SQLException)
.setMaxRetries(MAX_RETRIES).build();
diff --git a/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java b/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java
index 7087553048..c60156b236 100644
--- a/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java
+++ b/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java
@@ -19,6 +19,7 @@
import java.nio.file.Paths;
import org.junit.Assert;
+import org.junit.ComparisonFailure;
import org.junit.Test;
/**
@@ -27,7 +28,7 @@
public final class GoldenFileMetaTests {
@Test
public void testSuccessScenario() {
- new GoldenFileTestBuilder(input -> input.left + input.right)
+ GoldenFileTestBuilder.create(input -> input.left + input.right)
.add("3 plus 5", new Input(3, 5))
.add("5 plus 8", new Input(5, 8))
.runTests();
@@ -35,7 +36,7 @@ public void testSuccessScenario() {
@Test
public void testExpectedExceptionScenario() {
- new GoldenFileTestBuilder<>(GoldenFileMetaTests::addWithException)
+ GoldenFileTestBuilder.create(GoldenFileMetaTests::addWithException)
.allowExceptions()
.add("3 plus 5", new Input(3, 5))
.add("5 plus 8", new Input(5, 8))
@@ -45,7 +46,7 @@ public void testExpectedExceptionScenario() {
@Test
public void testUnexpectedExceptionScenario() {
try {
- new GoldenFileTestBuilder<>(GoldenFileMetaTests::addWithException)
+ GoldenFileTestBuilder.create(GoldenFileMetaTests::addWithException)
.add("3 plus 5", new Input(3, 5))
.runTests();
Assert.fail();
@@ -54,11 +55,28 @@ public void testUnexpectedExceptionScenario() {
}
}
+ @Test
+ public void testIgnoreScenario() {
+ GoldenFileTestBuilder.create(input -> input.left + input.right)
+ .add("Correct Output And Ignore = false", new Input(3, 5))
+ .addButIgnore("Correct Output And Ignore = true", new Input(3, 5))
+ .addButIgnore("Incorrect Output And Ignore = true", new Input(3, 5))
+ .runTests();
+ }
+
+ @Test(expected = ComparisonFailure.class)
+ public void testIncorrectOutput() {
+ GoldenFileTestBuilder.create(input -> input.left + input.right)
+ .allowExceptions()
+ .add("Incorrect Output And Ignore = false", new Input(3, 5))
+ .runTests();
+ }
+
@SuppressWarnings("AssertionFailureIgnored")
@Test
public void testFirstRun() {
try {
- new GoldenFileTestBuilder<>((Integer i) -> i)
+ GoldenFileTestBuilder.create((Integer i) -> i)
.add("Example Test", 1)
.runTests();
Assert.fail();
@@ -81,7 +99,7 @@ public void testFirstRun() {
@Test
public void testNotCasesAdded() {
try {
- new GoldenFileTestBuilder<>((Integer i) -> i)
+ GoldenFileTestBuilder.create((Integer i) -> i)
.runTests();
Assert.fail();
} catch (IllegalStateException error) {
diff --git a/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java b/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java
index 2de54f0667..cd06ee0a1e 100644
--- a/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java
+++ b/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java
@@ -17,8 +17,6 @@
import java.io.File;
import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileAlreadyExistsException;
@@ -28,8 +26,10 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
+import java.util.Map;
import java.util.function.Function;
import org.apache.commons.lang3.tuple.Pair;
@@ -37,6 +37,7 @@
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.LoaderOptions;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
@@ -67,37 +68,36 @@
/**
* Class generating golden files used for baseline / data-driven testing
*/
-public final class GoldenFileTestBuilder {
+public final class GoldenFileTestBuilder {
private static final Logger LOGGER = LoggerFactory.getLogger(GoldenFileTestBuilder.class);
private static final Path LICENSE_HEADER_PATH = Paths.get(Resources.getResource("goldenfiles/header.txt").getPath());
- private static final ObjectMapper objectMapper = getObjectMapper();
+ private static final ObjectMapper objectMapper = createObjectMapper();
+ private final List> descriptionAndInputs = new ArrayList<>();
private final ThrowingFunction executeTestFunction;
- private final List> descriptionAndInputs;
- private boolean allowExceptions;
- private boolean showFullStackTrace;
+ private Function exceptionSerializer;
+ private Function inputSerializer;
+ private boolean allowUnorderedMatch;
- public GoldenFileTestBuilder(ThrowingFunction executeTestFunction) {
+ public GoldenFileTestBuilder(
+ ThrowingFunction executeTestFunction,
+ Function inputSerializer) {
this.executeTestFunction = executeTestFunction;
- this.descriptionAndInputs = new ArrayList<>();
+ this.inputSerializer = inputSerializer;
+ this.allowUnorderedMatch = false;
}
- public GoldenFileTestBuilder allowExceptions() {
- this.allowExceptions = true;
+ public GoldenFileTestBuilder add(String description, I input) {
+ this.descriptionAndInputs.add(new DescriptionAndInput(description, input, false));
return this;
}
- public GoldenFileTestBuilder showFullStackTrace() {
- this.showFullStackTrace = true;
+ public GoldenFileTestBuilder addButIgnore(String description, I input) {
+ this.descriptionAndInputs.add(new DescriptionAndInput(description, input, true));
return this;
}
- public GoldenFileTestBuilder add(String description, I input) {
- this.descriptionAndInputs.add(new DescriptionAndInput(description, input));
- return this;
- }
-
- public GoldenFileTestBuilder addListByRule(List list, Function> rule) {
+ public GoldenFileTestBuilder addListByRule(List list, Function> rule) {
for (T item : list) {
Pair output = rule.apply(item);
String description = output.getLeft();
@@ -108,40 +108,54 @@ public GoldenFileTestBuilder addListByRule(List list, Function setExceptionSerializer(Function exceptionSerializer) {
+ this.exceptionSerializer = exceptionSerializer;
+ return this;
+ }
+
+ public GoldenFileTestBuilder allowExceptions() {
+ return setExceptionSerializer(GoldenFileTestBuilder::defaultExceptionSerializer);
+ }
+
+ public GoldenFileTestBuilder allowUnorderedMatch() {
+ allowUnorderedMatch = true;
+ return this;
+ }
+
public void runTests() {
try {
Preconditions.checkState(!descriptionAndInputs.isEmpty(), "No test cases found.");
// Generate the Input and Output pairs
- List> actualInputAndOutputList = new ArrayList<>();
- for (DescriptionAndInput descriptionAndInput : this.descriptionAndInputs) {
- InputAndOutput inputAndOutput;
+ List> actualInputAndOutputList = new ArrayList<>();
+ for (DescriptionAndInput descriptionAndInput : descriptionAndInputs) {
+ InputAndOutput inputAndOutput;
+ I_W inputForSerialization = inputSerializer.apply(descriptionAndInput.input);
+
try {
inputAndOutput = InputAndOutput.createSuccess(
descriptionAndInput.description,
- descriptionAndInput.input,
- this.executeTestFunction.apply(descriptionAndInput.input));
+ inputForSerialization,
+ executeTestFunction.apply(descriptionAndInput.input));
} catch (Throwable t) {
- if (this.allowExceptions) {
- inputAndOutput = InputAndOutput.createFailure(
- descriptionAndInput.description,
- descriptionAndInput.input,
- t,
- this.showFullStackTrace);
- } else {
+ if (exceptionSerializer == null) {
throw new RuntimeException(t);
}
+
+ inputAndOutput = InputAndOutput.createFailure(
+ descriptionAndInput.description,
+ inputForSerialization,
+ t,
+ exceptionSerializer);
}
actualInputAndOutputList.add(inputAndOutput);
}
-
-
// Write the actual values, so user's can diff with the expected and overwrite the golden file if the change is acceptable.
Path goldenFileActualPath = getGoldenFileActualPath();
writeActualGoldenFile(goldenFileActualPath, actualInputAndOutputList);
- List> expectedInputAndOutputList = readExpectedFile();
+ List> expectedInputAndOutputList = readExpectedFile();
// Assert equality
assertGoldenFilesAreEqual(expectedInputAndOutputList, actualInputAndOutputList);
@@ -150,40 +164,43 @@ public void runTests() {
}
}
- private List> readExpectedFile() {
+ private List> readExpectedFile() {
String path = goldenFileResource();
try {
return objectMapper.readValue(
Resources.getResource(path),
- new TypeReference>>(){});
+ new TypeReference>>(){});
} catch(IllegalArgumentException|IOException ex) {
LOGGER.error("Exception while read expected file", ex);
return ImmutableList.of(); //Return empty list so file is generated for the first run.
}
}
- public static String findFileName() {
- Pair callingClassAndMethod = GoldenFileTestBuilder.findCallingTestClassAndMethod();
-
+ public String findFileName() {
+ Pair callingClassAndMethod = findCallingTestClassAndMethod();
return callingClassAndMethod.getLeft() + "." + callingClassAndMethod.getRight();
}
- private static Pair findCallingTestClassAndMethod() {
+ private Pair findCallingTestClassAndMethod() {
StackTraceElement[] stElements = Thread.currentThread().getStackTrace();
- for (int i=1; i clazz = Class.forName(ste.getClassName());
- for(Method method : clazz.getMethods()) {
- if(method.getName().equals(ste.getMethodName())
+ for (Method method : clazz.getMethods()) {
+ if (method.getName().equals(ste.getMethodName())
&& method.getDeclaredAnnotation(Test.class) != null) {
String[] classNamespaceTokens = ste.getClassName().split("\\.");
- return Pair.of(classNamespaceTokens[classNamespaceTokens.length - 1], ste.getMethodName());
+ String testClassName = classNamespaceTokens[classNamespaceTokens.length - 1];
+ String methodName = ste.getMethodName();
+
+ return Pair.of(testClassName, methodName);
}
}
} catch (ClassNotFoundException e) {
@@ -193,19 +210,19 @@ private static Pair findCallingTestClassAndMethod() {
throw new RuntimeException("No @Test method found");
}
- private static Path getGoldenFileActualPath() throws IOException {
+ private Path getGoldenFileActualPath() throws IOException {
return Paths.get("target","goldenfiles", "actual", findFileName() + ".yaml");
}
- public static String goldenFileResource() {
+ public String goldenFileResource() {
return "goldenfiles/expected/" + findFileName() + ".yaml";
}
- public static String inputFileResource() {
+ public String inputFileResource() {
return "goldenfiles/input/" + findFileName() + ".yaml";
}
- private static String messageToFix() {
+ private String messageToFix() {
try {
String actualPath = getGoldenFileActualPath().toString();
String goldenPath = "src/test/resources/" + goldenFileResource();
@@ -219,6 +236,10 @@ private static String messageToFix() {
}
}
+ public static GoldenFileTestBuilder create(ThrowingFunction executeTestFunction) {
+ return new GoldenFileTestBuilder<>(executeTestFunction, i -> i);
+ }
+
private static void writeActualGoldenFile(
Path goldenFileActualPath,
List> actualInputAndOutputList) throws IOException {
@@ -245,41 +266,82 @@ private static void writeActualGoldenFile(
Files.write(goldenFileActualPath, fileContentWithLicence.getBytes(StandardCharsets.UTF_8));
}
- private static void assertGoldenFilesAreEqual(
- List> expectedInputAndOutputList,
- List> actualInputAndOutputList) throws JsonProcessingException {
+ private void assertGoldenFilesAreEqual(
+ List> expectedInputAndOutputList,
+ List> actualInputAndOutputList) throws JsonProcessingException {
String messageToFix = messageToFix();
Assert.assertEquals(messageToFix, expectedInputAndOutputList.size(), actualInputAndOutputList.size());
for (int i = 0; i < expectedInputAndOutputList.size(); i++) {
InputAndOutput expectedInputAndOutput = expectedInputAndOutputList.get(i);
InputAndOutput actualInputAndOutput = actualInputAndOutputList.get(i);
+ DescriptionAndInput descriptionAndInput = descriptionAndInputs.get(i);
+
+ if (!descriptionAndInput.ignore) {
+ Assert.assertEquals(
+ "Descriptions differ,\n" + messageToFix,
+ expectedInputAndOutput.description,
+ actualInputAndOutput.description);
+ String expectedInputString = objectMapper.writeValueAsString(expectedInputAndOutput.input);
+ String actualInputString = objectMapper.writeValueAsString(actualInputAndOutput.input);
+ Assert.assertEquals(
+ "Inputs for baseline differ,\n" + messageToFix,
+ expectedInputString,
+ actualInputString);
+
+ Assert.assertEquals(
+ "Exception Message for baselines differ, \n" + messageToFix + " with input " + expectedInputString,
+ expectedInputAndOutput.exceptionMessage,
+ actualInputAndOutput.exceptionMessage);
+
+ String expectedOutputString = objectMapper.writeValueAsString(expectedInputAndOutput.output);
+ String actualOutputString = objectMapper.writeValueAsString(actualInputAndOutput.output);
+ if (!expectedOutputString.equals(actualOutputString)) {
+ if (allowUnorderedMatch) {
+ if (!isPermutation(expectedInputString, actualInputString)) {
+ Assert.assertEquals(
+ "Outputs for baselines differ,\n" + messageToFix + " with input " + expectedInputString,
+ expectedOutputString,
+ actualOutputString);
+ }
+ } else {
+ Assert.assertEquals(
+ "Outputs for baselines differ,\n" + messageToFix + " with input " + expectedInputString,
+ expectedOutputString,
+ actualOutputString);
+ }
+ }
- Assert.assertEquals(
- "Descriptions differ,\n" + messageToFix,
- expectedInputAndOutput.description,
- actualInputAndOutput.description);
- String expectedInputString = objectMapper.writeValueAsString(expectedInputAndOutput.input);
- String actualInputString = objectMapper.writeValueAsString(actualInputAndOutput.input);
- Assert.assertEquals(
- "Inputs for baseline differ,\n" + messageToFix,
- expectedInputString,
- actualInputString);
-
- String expectedOutputString = objectMapper.writeValueAsString(expectedInputAndOutput.output);
- String actualOutputString = objectMapper.writeValueAsString(actualInputAndOutput.output);
- Assert.assertEquals(
- "Outputs for baselines differ,\n" + messageToFix + " with input " + expectedInputString,
- expectedOutputString,
- actualOutputString);
-
- Assert.assertEquals(
- "Exceptions for baselines differ,\n" + messageToFix+ " with input " + expectedInputString,
- expectedInputAndOutput.exceptionMessage,
- actualInputAndOutput.exceptionMessage);
+ Assert.assertEquals(
+ "Exceptions for baselines differ,\n" + messageToFix+ " with input " + expectedInputString,
+ expectedInputAndOutput.exceptionMessage,
+ actualInputAndOutput.exceptionMessage);
+ }
}
}
+ public static boolean isPermutation(String str1, String str2) {
+ if (str1.length() != str2.length()) {
+ return false;
+ }
+
+ Map map1 = new HashMap<>();
+ Map map2 = new HashMap<>();
+
+ for (int i = 0; i < str1.length(); i++) {
+ char c1 = str1.charAt(i);
+ char c2 = str2.charAt(i);
+ map1.put(c1, map1.getOrDefault(c1, 0) + 1);
+ map2.put(c2, map2.getOrDefault(c2, 0) + 1);
+ }
+
+ return map1.equals(map2);
+ }
+
+ private static String defaultExceptionSerializer(Throwable throwable) {
+ return throwable.getMessage();
+ }
+
@FunctionalInterface
public interface ThrowingFunction {
R apply(T t) throws Exception;
@@ -288,13 +350,15 @@ public interface ThrowingFunction {
private static final class DescriptionAndInput {
private final String description;
private final I input;
+ private final boolean ignore;
- private DescriptionAndInput(String description, I input) {
+ private DescriptionAndInput(String description, I input, boolean ignore) {
assert description != null;
assert input != null;
this.description = description;
this.input = input;
+ this.ignore = ignore;
}
}
@@ -307,14 +371,14 @@ public static final class InputAndOutput {
public final O output;
@JsonInclude(JsonInclude.Include.NON_NULL)
- public final String exceptionMessage;
+ public final MultiLineString exceptionMessage;
@JsonCreator
private InputAndOutput(
@JsonProperty("description") String description,
@JsonProperty("input") I input,
@JsonProperty("output") O output,
- @JsonProperty("exceptionMessage") String exceptionMessage) {
+ @JsonProperty("exceptionMessage") MultiLineString exceptionMessage) {
this.description = description;
this.input = input;
this.output = output;
@@ -325,24 +389,16 @@ public static InputAndOutput createSuccess(String description, I input, O
return new InputAndOutput(description, input, output, null);
}
- public static InputAndOutput createFailure(String description, I input, Throwable throwable, boolean showFullStackTrace) {
- String exceptionMessage;
- if (showFullStackTrace) {
- StringWriter sw = new StringWriter();
- PrintWriter pw = new PrintWriter(sw);
- throwable.printStackTrace(pw);
-
- exceptionMessage = sw.toString().replace("\t", "");
- } else {
- exceptionMessage = throwable.getMessage();
-
- }
-
- if(exceptionMessage == null) {
- exceptionMessage = throwable.toString();
- }
-
- return new InputAndOutput(description, input, null, exceptionMessage);
+ public static InputAndOutput createFailure(
+ String description, I input,
+ Throwable throwable,
+ Function exceptionSerializer) {
+ String exceptionMessage = exceptionSerializer.apply(throwable);
+ return new InputAndOutput(
+ description,
+ input,
+ null,
+ MultiLineString.create(exceptionMessage));
}
}
@@ -471,12 +527,16 @@ public void serialize(
}
}
- private static ObjectMapper getObjectMapper(){
+ private static ObjectMapper createObjectMapper(){
+ LoaderOptions loaderOptions = new LoaderOptions();
+ loaderOptions.setCodePointLimit(10 * 1024 * 1024); // Set loader option to load a file as large as 10 MB
return new ObjectMapper(
- new YAMLFactory()
+ YAMLFactory.builder()
+ .loaderOptions(loaderOptions)
.disable(YAMLGenerator.Feature.SPLIT_LINES)
.disable(YAMLGenerator.Feature.CANONICAL_OUTPUT)
- .enable(YAMLGenerator.Feature.INDENT_ARRAYS))
+ .enable(YAMLGenerator.Feature.INDENT_ARRAYS)
+ .build())
.enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS)
.enable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY)
.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
@@ -505,7 +565,7 @@ public MultiLineString deserialize(JsonParser jp, DeserializationContext ctxt)
} else if (node.isArray()) {
List lines = new ArrayList<>();
Iterator iterator = node.iterator();
- while(iterator.hasNext()) {
+ while (iterator.hasNext()) {
JsonNode element = iterator.next();
lines.add(element.asText());
}
diff --git a/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java b/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java
index e0c92d5d10..ec6b753d83 100644
--- a/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java
+++ b/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java
@@ -39,29 +39,29 @@ public void get() {
@Test
public void getAsText() {
- assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1526173261000L)));
- assertTrue("Monday".equalsIgnoreCase(instance.getAsText(1526259661000L)));
- assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(1526086861000L)));
+ assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1526173261000L, Locale.US)));
+ assertTrue("Monday".equalsIgnoreCase(instance.getAsText(1526259661000L, Locale.US)));
+ assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(1526086861000L, Locale.US)));
}
@Test
public void getAsShortText() {
- assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1526173261000L)));
- assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(1526259661000L)));
- assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(1526086861000L)));
+ assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1526173261000L, Locale.US)));
+ assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(1526259661000L, Locale.US)));
+ assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(1526086861000L, Locale.US)));
}
@Test
public void getAsTextFieldValue() {
- assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1, Locale.getDefault())));
- assertTrue("Monday".equalsIgnoreCase(instance.getAsText(2, Locale.getDefault())));
- assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(7, Locale.getDefault())));
+ assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1, Locale.US)));
+ assertTrue("Monday".equalsIgnoreCase(instance.getAsText(2, Locale.US)));
+ assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(7, Locale.US)));
}
@Test
public void getAsShortTextFieldValue() {
- assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1, Locale.getDefault())));
- assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(2, Locale.getDefault())));
- assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(7, Locale.getDefault())));
+ assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1, Locale.US)));
+ assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(2, Locale.US)));
+ assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(7, Locale.US)));
}
}
diff --git a/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIgnoreScenario.yaml b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIgnoreScenario.yaml
new file mode 100644
index 0000000000..61b8e0fdea
--- /dev/null
+++ b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIgnoreScenario.yaml
@@ -0,0 +1,36 @@
+#
+# Copyright (C) 2017-2019 Dremio Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+---
+ -
+ description: "Correct Output And Ignore = false"
+ input:
+ left: 3
+ right: 5
+ output: 8
+ -
+ description: "Correct Output And Ignore = true"
+ input:
+ left: 3
+ right: 5
+ output: 8
+ -
+ description: "Incorrect Output And Ignore = true"
+ input:
+ left: 3
+ right: 5
+ output: "Intentionally Incorrect Output"
diff --git a/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIncorrectOutput.yaml b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIncorrectOutput.yaml
new file mode 100644
index 0000000000..fc29bf79c2
--- /dev/null
+++ b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIncorrectOutput.yaml
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2017-2019 Dremio Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+---
+ -
+ description: "Incorrect Output And Ignore = false"
+ input:
+ left: 3
+ right: 5
+ output: "Intentionally Incorrect Output"
diff --git a/common/src/test/resources/logback-test.xml b/common/src/test/resources/logback-test.xml
index ab31612bd6..4a1f32d9cb 100644
--- a/common/src/test/resources/logback-test.xml
+++ b/common/src/test/resources/logback-test.xml
@@ -27,7 +27,6 @@
com.dremio.common.logging.obfuscation.TestBlockLevel.B.Second,error
com.dremio.common.logging.obfuscation.TestBlockLevel.B.Third,debug
com.dremio.common.logging.obfuscation.TestBlockLevel.C.Second,trace
-
diff --git a/connector/pom.xml b/connector/pom.xml
index 3ac0607ecd..f5bee18938 100644
--- a/connector/pom.xml
+++ b/connector/pom.xml
@@ -23,7 +23,7 @@
com.dremio
dremio-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-connector
diff --git a/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java b/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java
index 270b2337b3..c7a75628b4 100644
--- a/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java
+++ b/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java
@@ -55,14 +55,27 @@ public interface SupportsIcebergMetadata {
/**
* Provides statistics for number of position/equality delete records.
*
- * @return a DatasetStats instance with the count of delete records.
+ * @return a DatasetStats instance with the count of total delete records.
*/
DatasetStats getDeleteStats();
+ /**
+ * Provides statistics for number of equality delete records.
+ *
+ * @return a DatasetStats instance with the count of equality delete records.
+ */
+ DatasetStats getEqualityDeleteStats();
+
/**
* Provides statistics for number of delete files.
*
* @return a DatasetStats instance with the count of delete files.
*/
DatasetStats getDeleteManifestStats();
+
+ /**
+ * modification time for the snapshot
+ */
+ long getMtime();
+
}
diff --git a/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java b/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java
index ac5f832a95..511492ac59 100644
--- a/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java
+++ b/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java
@@ -118,6 +118,7 @@ List getPartitionChunks() {
return partitionChunks;
}
+ @Override
public List getPartitionColumns() {
return datasetMetadata == null ?
partitionColumns :
diff --git a/contrib/hive2-exec-shade/pom.xml b/contrib/hive2-exec-shade/pom.xml
index 95ba95c5e0..4d4f1b5833 100644
--- a/contrib/hive2-exec-shade/pom.xml
+++ b/contrib/hive2-exec-shade/pom.xml
@@ -22,7 +22,7 @@
com.dremio.contrib
dremio-contrib-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-hive2-exec-shaded
@@ -124,7 +124,6 @@
org.apache.avro
avro
- compile
@@ -135,7 +134,6 @@
org.apache.hive
hive-exec
${hive.version}
- compile
org.apache.hive
@@ -201,6 +199,10 @@
org.apache.calcite.avatica
*
+
+ org.pentaho
+ pentaho-aggdesigner-algorithm
+
@@ -224,6 +226,11 @@
org.apache.hadoop
hadoop-common
+
+
+ com.google.protobuf
+ protobuf-java
+
@@ -256,9 +263,9 @@
org.apache.commons:commons-lang3
commons-codec:commons-codec
com.google.guava:guava
+ com.google.protobuf:protobuf-java
- com.google.protobuf:protobuf-java
com.fasterxml.jackson.*:*
@@ -279,6 +286,9 @@
org/apache/thrift/**
org/apache/calcite/**
org/slf4j/**
+
+ com/google/protobuf/**
+ META-INF/maven/com.google.protobuf/**
diff --git a/contrib/hive3-exec-shade/pom.xml b/contrib/hive3-exec-shade/pom.xml
index d565f2debb..b760a7b62e 100644
--- a/contrib/hive3-exec-shade/pom.xml
+++ b/contrib/hive3-exec-shade/pom.xml
@@ -22,7 +22,7 @@
com.dremio.contrib
dremio-contrib-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-hive3-exec-shaded
@@ -40,7 +40,6 @@
org.apache.hive
hive-exec
${hive.version}
- compile
org.apache.logging.log4j
@@ -108,6 +107,11 @@
+
+
+ com.google.protobuf
+ protobuf-java
+
@@ -131,6 +135,7 @@
com.fasterxml.jackson.core:jackson-annotations
com.fasterxml.jackson.core:jackson-core
com.fasterxml.jackson.core:jackson-databind
+ com.google.protobuf:protobuf-java
false
@@ -147,6 +152,9 @@
org/apache/hadoop/hive/llap/**
org/apache/thrift/**
+
+ com/google/protobuf/**
+ META-INF/maven/com.google.protobuf/**
diff --git a/contrib/maprfs-shade/pom.xml b/contrib/maprfs-shade/pom.xml
index ba7c2e1b27..59c4a7b2c4 100644
--- a/contrib/maprfs-shade/pom.xml
+++ b/contrib/maprfs-shade/pom.xml
@@ -22,7 +22,7 @@
com.dremio.contrib
dremio-contrib-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-maprfs-shaded
diff --git a/contrib/pom.xml b/contrib/pom.xml
index d8f60d550e..e92da6a237 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -22,7 +22,7 @@
com.dremio
dremio-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
com.dremio.contrib
diff --git a/dac/backend/pom.xml b/dac/backend/pom.xml
index 3e9a37e1af..9fe3bde8b3 100644
--- a/dac/backend/pom.xml
+++ b/dac/backend/pom.xml
@@ -22,7 +22,7 @@
com.dremio
dremio-dac-parent
- 24.0.0-202302100528110223-3a169b7c
+ 24.1.0-202306130653310132-d30779f6
dremio-dac-backend
@@ -73,6 +73,12 @@
proto
${project.version}
+
+ com.dremio.sabot
+ dremio-sabot-serializer
+ ${project.version}
+ test
+
com.dremio.services
dremio-services-datastore
@@ -182,18 +188,11 @@
com.dremio.services
dremio-services-execselector
-
+
org.apache.curator
curator-test
- ${curator-test.version}
test
-
-
- log4j
- log4j
-
-
com.dremio.services
@@ -261,16 +260,6 @@
org.apache.zookeeper
zookeeper
-
-
- org.slf4j
- slf4j-log4j12
-
-
- log4j
- log4j
-
-
org.apache.commons
@@ -280,7 +269,6 @@
com.dremio.services
dremio-services-configuration
${project.version}
- compile
com.dremio.services
@@ -398,7 +386,6 @@
com.dremio.services
dremio-services-jobtelemetry-server
${project.version}
- compile
com.dremio.services
@@ -419,6 +406,11 @@
com.dremio.services
dremio-services-nessie-grpc-client
+
+ com.dremio.services
+ dremio-services-nessie-proxy
+ ${project.version}
+
com.dremio.services
dremio-services-orphanagecleaner
@@ -433,13 +425,11 @@
com.dremio.services
dremio-services-userpreferences
${project.version}
- compile
com.dremio.services
dremio-services-autocomplete
${project.version}
- compile
com.dremio.services
@@ -456,6 +446,34 @@
dremio-ce-jdbc-plugin
${project.version}
+
+ io.findify
+ s3mock_2.12
+ test
+
+
+ org.projectnessie.nessie
+ nessie-compatibility-common
+ test
+
+
+ org.projectnessie.nessie
+ nessie-versioned-tests
+ test
+
+
+ org.projectnessie.nessie
+ nessie-jaxrs-tests
+ test
+
+
+ com.dremio.services
+ dremio-services-nessie-proxy
+ tests
+ test-jar
+ ${project.version}
+ test
+
@@ -693,7 +711,27 @@
+
+ maven-failsafe-plugin
+
+
+
+ integration-test
+ verify
+
+
+
+
+
+
+ ${project.basedir}/src/test/resources
+
+
+ ${project.basedir}/src/test/resources-nessie
+ true
+
+
@@ -711,12 +749,65 @@
com/dremio/dac/service/admin/TestKVStoreReportService.java
+
+ com/dremio/dac/explore/TestAccelerationSettingsFromAPI.java
+ com/dremio/dac/explore/TestDatasetResource.java
+ com/dremio/dac/resource/TestNessieSourceResource.java
+ com/dremio/dac/resource/TestNessieSourceApi.java
+ com/dremio/dac/service/TestCatalogServiceHelperForVersioned.java
+ com/dremio/dac/service/source/TestSourceService.java
+ com/dremio/exec/catalog/dataplane/ITDataplanePlugin.java
+ com/dremio/exec/catalog/dataplane/ITDatasetVersionContextTestCases.java
+ com/dremio/exec/planner/sql/handlers/TestShowTagsHandler.java
+ com/dremio/exec/planner/sql/handlers/TestDropTagHandler.java
+ com/dremio/exec/planner/sql/handlers/TestShowLogsHandler.java
+ com/dremio/exec/planner/sql/handlers/TestUseVersionHandler.java
+ com/dremio/exec/planner/sql/handlers/TestCreateBranchHandler.java
+ com/dremio/exec/planner/sql/handlers/TestAssignBranchHandler.java
+ com/dremio/exec/planner/sql/handlers/TestCreateTagHandler.java
+ com/dremio/exec/planner/sql/handlers/TestMergeBranchHandler.java
+ com/dremio/exec/planner/sql/handlers/TestShowBranchesHandler.java
+ com/dremio/exec/planner/sql/handlers/TestShowTablesHandler.java
+ com/dremio/exec/planner/sql/handlers/TestShowViewsHandler.java
+ com/dremio/exec/planner/sql/handlers/TestDropBranchHandler.java
+ com/dremio/exec/planner/sql/handlers/TestAssignTagHandler.java
+ com/dremio/exec/planner/sql/handlers/TestCreateFolderHandler.java
+ com/dremio/dac/service/datasets/ITTestDatasetMutatorForVersionedViews.java
+
+
+ default-testCompile
+ test-compile
+
+
+ com/dremio/exec/catalog/dataplane/*.java
+
+
+
+ testCompile
+
+
+
+
+ notmapr
+
+
+ !mapr
+
+
+
+
+ com.dremio.plugins
+ dremio-dataplane-plugin
+ ${project.version}
+ test
+
+
+
-
diff --git a/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java b/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java
index d7a3fea7b9..70bb6d4573 100644
--- a/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java
+++ b/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java
@@ -113,9 +113,9 @@ public void backup(String path, String binaryStr, String includeProfilesStr) thr
final FileSystem fs = HadoopFileSystem.get(backupDir, new Configuration());
BackupRestoreUtil.checkOrCreateDirectory(fs, backupDir);
BackupRestoreUtil.BackupOptions options = new BackupRestoreUtil.BackupOptions(path,
- Boolean.parseBoolean(binaryStr), Boolean.parseBoolean(includeProfilesStr));
+ Boolean.parseBoolean(binaryStr), Boolean.parseBoolean(includeProfilesStr), "");
BackupRestoreUtil.BackupStats backupStats = BackupRestoreUtil.createBackup(fs, options,
- getKVStoreProvider().unwrap(LocalKVStoreProvider.class), LocalAdmin.getInstance().getHomeFileTool().getConf(),
+ getKVStoreProvider().unwrap(LocalKVStoreProvider.class), LocalAdmin.getInstance().getHomeFileTool().getConfForBackup(),
null);
System.out.println(format("Backup created at %s, dremio tables %d, uploaded files %d",
backupStats.getBackupPath(), backupStats.getTables(), backupStats.getFiles()));
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java b/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java
index d26150191d..77a4f3aaba 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java
@@ -20,7 +20,7 @@
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
- * Represents a entity in the Dremio catalog
+ * Represents an entity in the Dremio catalog
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXTERNAL_PROPERTY, property = "entityType", visible = true)
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java b/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java
index 4febf527cd..edcf46ff03 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java
@@ -100,6 +100,10 @@ public static CatalogItem fromSourceConfig(SourceConfig sourceConfig) {
return fromSourceConfig(sourceConfig, null);
}
+ public static CatalogItem fromSource(Source source) {
+ return fromSourceConfig(source.getSourceConfig(), null);
+ }
+
public static CatalogItem fromHomeConfig(HomeConfig homeConfig) {
return new Builder()
.setId(homeConfig.getId().getId())
@@ -126,7 +130,7 @@ private static CatalogItem fromSpaceConfig(SpaceConfig spaceConfig, Collaboratio
private static CatalogItem fromFunctionConfig(FunctionConfig functionConfig, CollaborationTag tags) {
return new Builder()
.setId(functionConfig.getId().getId())
- .setPath(Lists.newArrayList(functionConfig.getName()))
+ .setPath(Lists.newArrayList(functionConfig.getFullPathList()))
.setTag(String.valueOf(functionConfig.getTag()))
.setType(CatalogItemType.CONTAINER)
.setContainerType(ContainerSubType.FUNCTION)
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java b/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java
index 2608fe2047..3215281e17 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java
@@ -141,54 +141,27 @@ public void refreshCatalogItem(@PathParam("id") String id) {
}
}
- @POST
- @Path("/{id}/metadata/refresh")
- public MetadataRefreshResponse refreshCatalogItemMetadata(@PathParam("id") String id,
- @QueryParam("deleteWhenMissing") Boolean delete,
- @QueryParam("forceUpdate") Boolean force,
- @QueryParam("autoPromotion") Boolean promotion) {
- try {
- boolean changed = false;
- boolean deleted = false;
- switch(catalogServiceHelper.refreshCatalogItemMetadata(id, delete, force, promotion)) {
- case CHANGED:
- changed = true;
- break;
- case UNCHANGED:
- break;
- case DELETED:
- changed = true;
- deleted = true;
- break;
- default:
- throw new IllegalStateException();
- }
-
- return new MetadataRefreshResponse(changed, deleted);
- } catch (IllegalArgumentException e) {
- throw new NotFoundException(e.getMessage());
- } catch (UnsupportedOperationException e) {
- throw new BadRequestException(e.getMessage());
- }
- }
-
@GET
@Path("/by-path/{segment:.*}")
public CatalogEntity getCatalogItemByPath(
- @PathParam("segment") List segments,
- @QueryParam("include") final List include,
- @QueryParam("exclude") final List exclude
- ) throws NamespaceException, BadRequestException {
+ @PathParam("segment") List segments,
+ @QueryParam("include") final List include,
+ @QueryParam("exclude") final List exclude,
+ @QueryParam("versionType") final String versionType,
+ @QueryParam("versionValue") final String versionValue)
+ throws NamespaceException, BadRequestException {
List pathList = new ArrayList<>();
for (PathSegment segment : segments) {
- // with query parameters we may get a empty final segment
+ // with query parameters we may get an empty final segment
if (!segment.getPath().isEmpty()) {
pathList.add(segment.getPath());
}
}
- Optional entity = catalogServiceHelper.getCatalogEntityByPath(pathList, include, exclude);
+ final Optional entity =
+ catalogServiceHelper.getCatalogEntityByPath(
+ pathList, include, exclude, versionType, versionValue);
if (!entity.isPresent()) {
throw new NotFoundException(String.format("Could not find entity with path [%s]", pathList));
@@ -200,9 +173,7 @@ public CatalogEntity getCatalogItemByPath(
@GET
@Path("/search")
public ResponseList search(@QueryParam("query") String query) throws NamespaceException {
- ResponseList catalogItems = new ResponseList<>(catalogServiceHelper.search(query));
-
- return catalogItems;
+ return new ResponseList<>(catalogServiceHelper.search(query));
}
/**
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java b/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java
index 2715a8a503..d6993d2a62 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java
@@ -23,7 +23,6 @@
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
-import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@@ -57,12 +56,7 @@ public CollaborationResource(CollaborationHelper collaborationHelper) {
@Path("/tag")
public Tags getTagsForEntity(@PathParam("id") String id) throws NamespaceException {
Optional tags = collaborationHelper.getTags(id);
-
- if (!tags.isPresent()) {
- throw new NotFoundException(String.format("Entity [%s] does not have any tags set.", id));
- }
-
- return tags.get();
+ return tags.orElseGet(() -> new Tags(null, null));
}
@POST
@@ -77,12 +71,7 @@ public Tags setTagsForEntity(@PathParam("id") String id, Tags tags) throws Names
@Path("/wiki")
public Wiki getWikiForEntity(@PathParam("id") String id) throws NamespaceException {
Optional wiki = collaborationHelper.getWiki(id);
-
- if (!wiki.isPresent()) {
- throw new NotFoundException(String.format("Entity [%s] does not have a wiki set.", id));
- }
-
- return wiki.get();
+ return wiki.orElseGet(() -> new Wiki("", null));
}
@POST
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java b/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java
index c1d91c1ebc..27ec11ae3a 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java
@@ -100,6 +100,7 @@ public Dataset(
this(id, type, path, null, createdAt, tag, accelerationRefreshPolicy, sql, sqlContext, format, approximateStatisticsAllowed);
}
+ @Override
public String getId() {
return id;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/SourceResource.java b/dac/backend/src/main/java/com/dremio/dac/api/DepreciatedSourceResource.java
similarity index 88%
rename from dac/backend/src/main/java/com/dremio/dac/api/SourceResource.java
rename to dac/backend/src/main/java/com/dremio/dac/api/DepreciatedSourceResource.java
index c90d26889b..782fb894e3 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/SourceResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/DepreciatedSourceResource.java
@@ -16,7 +16,8 @@
package com.dremio.dac.api;
import static com.dremio.dac.server.UIOptions.ALLOW_HIVE_SOURCE;
-import static com.dremio.exec.store.jdbc.JdbcPluginOptions.JDBC_DB2_ENABLED;
+import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED;
+import static com.dremio.exec.store.jdbc.JdbcPluginOptions.JDBC_OPENSEARCH_ENABLED;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import java.util.List;
@@ -35,9 +36,6 @@
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import com.dremio.common.exceptions.ExecutionSetupException;
import com.dremio.dac.annotations.APIResource;
import com.dremio.dac.annotations.Secured;
@@ -56,16 +54,15 @@
import com.google.common.annotations.VisibleForTesting;
/**
- * Resource for information about sources.
+ * Depreciated resource for information about sources.
*/
@APIResource
@Secured
@Path("/source")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
-public class SourceResource {
- private static final Logger logger = LoggerFactory.getLogger(SourceResource.class);
-
+@Deprecated
+public class DepreciatedSourceResource {
/**
* 1.5 changed _type to entityType, this class provides backwards compatibility
*/
@@ -106,7 +103,7 @@ public String getDeprecatedEntityType() {
protected final SabotContext sabotContext;
@Inject
- public SourceResource(SourceService sourceService, SabotContext sabotContext) {
+ public DepreciatedSourceResource(SourceService sourceService, SabotContext sabotContext) {
this.sourceService = sourceService;
this.sabotContext = sabotContext;
}
@@ -188,27 +185,39 @@ public ResponseList getSourceTypes() {
final ConnectionReader connectionReader = sabotContext.getConnectionReaderProvider().get();
final ResponseList types = new ResponseList<>();
- final boolean showHive = sabotContext.getOptionManager().getOption(ALLOW_HIVE_SOURCE);
- final boolean showDb2 = sabotContext.getOptionManager().getOption(JDBC_DB2_ENABLED);
-
for(Class extends ConnectionConf, ?>> input : connectionReader.getAllConnectionConfs().values()) {
// We can't use isInternal as it's not a static method, instead we only show listable sources.
if (isListable(input)) {
String sourceType = input.getAnnotation(SourceType.class).value();
- if ((!showHive && "HIVE".equals(sourceType)) ||
- (!showDb2 && "DB2".equals(sourceType))) {
- continue;
- }
-
- if (sabotContext.getSourceVerifierProvider().get().isSourceSupported(sourceType, sabotContext.getSystemOptionManager())) {
- types.add(SourceTypeTemplate.fromSourceClass(input, false));
- }
+ if (isSourceTypeVisible(sourceType) &&
+ sabotContext
+ .getSourceVerifierProvider()
+ .get()
+ .isSourceSupported(sourceType, sabotContext.getSystemOptionManager())) {
+ types.add(SourceTypeTemplate.fromSourceClass(input, false));
+ }
}
}
return types;
}
+ private boolean isSourceTypeVisible(String sourceType) {
+ if ("HIVE".equals(sourceType)) {
+ return sabotContext.getOptionManager().getOption(ALLOW_HIVE_SOURCE);
+ }
+
+ if ("OPENSEARCH".equals(sourceType)) {
+ return sabotContext.getOptionManager().getOption(JDBC_OPENSEARCH_ENABLED);
+ }
+
+ if ("NESSIE".equals(sourceType)) {
+ return sabotContext.getOptionManager().getOption(NESSIE_PLUGIN_ENABLED);
+ }
+
+ return true;
+ }
+
// Returns the specified source type with all its properties expanded
@GET
@RolesAllowed({"admin", "user"})
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/File.java b/dac/backend/src/main/java/com/dremio/dac/api/File.java
index e3cd8246f5..2849fcfecb 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/File.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/File.java
@@ -35,6 +35,7 @@ public File(@JsonProperty("id") String id, @JsonProperty("path") List pa
this.path = path;
}
+ @Override
public String getId() {
return id;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Folder.java b/dac/backend/src/main/java/com/dremio/dac/api/Folder.java
index 77ba94ca8e..7a109707cc 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/Folder.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/Folder.java
@@ -49,6 +49,7 @@ public String getName() {
return Iterables.getLast(getPath());
}
+ @Override
public String getId() {
return id;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Home.java b/dac/backend/src/main/java/com/dremio/dac/api/Home.java
index 5ca148b3f4..047c77b2f1 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/Home.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/Home.java
@@ -42,6 +42,7 @@ public Home(
this.children = children;
}
+ @Override
public String getId() {
return id;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java b/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java
index 16fd9a7550..f3503a50f9 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java
@@ -19,7 +19,6 @@
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import java.util.List;
-import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import javax.annotation.security.RolesAllowed;
@@ -41,13 +40,12 @@
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
-import com.dremio.context.RequestContext;
-import com.dremio.context.UserContext;
import com.dremio.dac.annotations.RestResource;
import com.dremio.dac.annotations.Secured;
import com.dremio.dac.model.scripts.PaginatedResponse;
import com.dremio.dac.model.scripts.ScriptData;
import com.dremio.service.script.DuplicateScriptNameException;
+import com.dremio.service.script.MaxScriptsLimitReachedException;
import com.dremio.service.script.ScriptNotAccessible;
import com.dremio.service.script.ScriptNotFoundException;
import com.dremio.service.script.ScriptService;
@@ -66,7 +64,6 @@ public class ScriptResource {
private static final org.slf4j.Logger logger =
org.slf4j.LoggerFactory.getLogger(ScriptResource.class);
private final ScriptService scriptService;
- private final SecurityContext securityContext;
private final UserService userService;
@Inject
@@ -74,7 +71,6 @@ public ScriptResource(ScriptService scriptService,
@Context SecurityContext securityContext,
UserService userService) {
this.scriptService = scriptService;
- this.securityContext = securityContext;
this.userService = userService;
}
@@ -91,16 +87,12 @@ public PaginatedResponse getScripts(@QueryParam("offset") Integer of
final String finalOrderBy = (orderBy == null) ? "" : orderBy;
try {
-
- Long totalScripts = runWithUserContext(() -> scriptService.getCountOfMatchingScripts(
- finalSearch, "", createdBy));
+ Long totalScripts = scriptService.getCountOfMatchingScripts(finalSearch, "", createdBy);
List scripts =
- runWithUserContext(() -> scriptService.getScripts(finalOffset,
- finalMaxResults, finalSearch,
- finalOrderBy, "", createdBy)
+ scriptService.getScripts(finalOffset, finalMaxResults, finalSearch, finalOrderBy, "", createdBy)
.parallelStream()
.map(this::fromScript)
- .collect(Collectors.toList()));
+ .collect(Collectors.toList());
return new PaginatedResponse<>(totalScripts, scripts);
} catch (Exception exception) {
logger.error("GET on scripts failed.", exception);
@@ -111,9 +103,8 @@ public PaginatedResponse getScripts(@QueryParam("offset") Integer of
@POST
public ScriptData postScripts(ScriptData scriptData) {
try {
- return fromScript(runWithUserContext(() -> scriptService.createScript(ScriptData.toScriptRequest(
- scriptData))));
- } catch (DuplicateScriptNameException exception) {
+ return fromScript(scriptService.createScript(ScriptData.toScriptRequest(scriptData)));
+ } catch (DuplicateScriptNameException | MaxScriptsLimitReachedException exception) {
logger.error(exception.getMessage(), exception);
throw new BadRequestException(exception.getMessage());
} catch (Exception exception) {
@@ -126,7 +117,7 @@ public ScriptData postScripts(ScriptData scriptData) {
@Path("/{id}")
public ScriptData getScript(@PathParam("id") String scriptId) {
try {
- return fromScript(runWithUserContext(() -> scriptService.getScriptById(scriptId)));
+ return fromScript(scriptService.getScriptById(scriptId));
} catch (ScriptNotFoundException exception) {
logger.error(exception.getMessage(), exception);
throw new NotFoundException(exception.getMessage());
@@ -145,9 +136,7 @@ public ScriptData updateScript(@PathParam("id") String scriptId, ScriptData scri
// check if script exists with given scriptId
try {
// update the script
- return fromScript(runWithUserContext(() -> scriptService.updateScript(scriptId,
- ScriptData.toScriptRequest(
- scriptData))));
+ return fromScript(scriptService.updateScript(scriptId, ScriptData.toScriptRequest(scriptData)));
} catch (ScriptNotFoundException exception) {
logger.error(exception.getMessage(), exception);
throw new NotFoundException(exception.getMessage());
@@ -167,10 +156,7 @@ public ScriptData updateScript(@PathParam("id") String scriptId, ScriptData scri
@Path(("/{id}"))
public Response deleteScript(@PathParam("id") String scriptId) {
try {
- runWithUserContext(() -> {
- scriptService.deleteScriptById(scriptId);
- return null;
- });
+ scriptService.deleteScriptById(scriptId);
return Response.noContent().build();
} catch (ScriptNotFoundException exception) {
logger.error(exception.getMessage(), exception);
@@ -199,25 +185,4 @@ private ScriptData fromScript(ScriptProto.Script script) {
getUserInfoById(script.getCreatedBy()),
getUserInfoById(script.getModifiedBy()));
}
-
- private String getCurrentUserId() {
- try {
- return userService.getUser(securityContext.getUserPrincipal().getName())
- .getUID()
- .getId();
- } catch (UserNotFoundException exception) {
- // ideally this case should never be reached.
- logger.error("Couldn't find current logged in user : {}. Error {}",
- securityContext.getUserPrincipal().getName(),
- exception.getMessage());
- throw new InternalServerErrorException(exception.getMessage());
- }
- }
-
- private V runWithUserContext(Callable callable) throws Exception {
- return RequestContext.current()
- .with(UserContext.CTX_KEY, new UserContext(getCurrentUserId()))
- .call(callable);
- }
-
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Source.java b/dac/backend/src/main/java/com/dremio/dac/api/Source.java
index 0cc6af8c6a..ccd4374cd3 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/Source.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/Source.java
@@ -130,6 +130,7 @@ void setReader(ConnectionReader reader) {
this.reader = reader;
}
+ @Override
public String getId() {
return this.id;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Space.java b/dac/backend/src/main/java/com/dremio/dac/api/Space.java
index 59989a3b78..e9c2302990 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/Space.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/Space.java
@@ -54,6 +54,7 @@ public String getName() {
return name;
}
+ @Override
public String getId() {
return id;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java b/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java
index ea4c1260cb..bbb70ae609 100644
--- a/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java
@@ -31,6 +31,7 @@
import javax.ws.rs.DefaultValue;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.GET;
+import javax.ws.rs.NotAuthorizedException;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
@@ -52,6 +53,7 @@
import com.dremio.service.userpreferences.EntityThresholdReachedException;
import com.dremio.service.userpreferences.UserPreferenceService;
import com.dremio.service.userpreferences.proto.UserPreferenceProto;
+import com.dremio.service.users.UserNotFoundException;
import com.google.protobuf.util.Timestamps;
@APIResource
@@ -60,8 +62,6 @@
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
public class UserPreferenceResource {
- private static final org.slf4j.Logger logger =
- org.slf4j.LoggerFactory.getLogger(UserPreferenceResource.class);
private final UserPreferenceService userPreferenceService;
private final NamespaceService namespaceService;
@@ -79,38 +79,41 @@ public PreferenceData getPreferenceByType(
@PathParam("preferenceType") String preferenceType,
@QueryParam("showCatalogInfo") @DefaultValue("false") boolean showCatalogInfo)
throws NamespaceNotFoundException {
+ try {
+ UserPreferenceProto.Preference preference = userPreferenceService.getPreferenceByType(validatePreferenceType(preferenceType));
+
+ if (showCatalogInfo) {
+ Map entityIdToEntityMap = preference.getEntitiesList()
+ .stream()
+ .collect(Collectors.toMap(UserPreferenceProto.Entity::getEntityId, entity -> entity));
+
+ List entities =
+ namespaceService.getEntitiesByIds(preference.getEntitiesList()
+ .stream()
+ .map(UserPreferenceProto.Entity::getEntityId)
+ .collect(
+ Collectors.toList()));
+ return new PreferenceData(preference.getType(),
+ entities.parallelStream()
+ .map(container -> getEntityFromNameSpaceContainer(
+ container,
+ entityIdToEntityMap))
+ .collect(
+ Collectors.toList()));
+ }
- UserPreferenceProto.Preference preference =
- userPreferenceService.getPreferenceByType(validatePreferenceType(preferenceType));
- if (showCatalogInfo) {
-
- Map entityIdToEntityMap = preference.getEntitiesList()
- .stream()
- .collect(Collectors.toMap(UserPreferenceProto.Entity::getEntityId, entity -> entity));
-
- List entities =
- namespaceService.getEntitiesByIds(preference.getEntitiesList()
- .stream()
- .map(UserPreferenceProto.Entity::getEntityId)
- .collect(
- Collectors.toList()));
return new PreferenceData(preference.getType(),
- entities.parallelStream()
- .map(container -> getEntityFromNameSpaceContainer(
- container,
- entityIdToEntityMap))
- .collect(
- Collectors.toList()));
- }
- return new PreferenceData(preference.getType(),
- preference.getEntitiesList().stream().map(
- entity -> new Entity(entity.getEntityId(),
- null,
- null,
- null,
- Timestamps.toMillis(entity.getTimestamp()))
+ preference.getEntitiesList().stream().map(
+ entity -> new Entity(entity.getEntityId(),
+ null,
+ null,
+ null,
+ Timestamps.toMillis(entity.getTimestamp()))
).collect(
Collectors.toList()));
+ } catch (UserNotFoundException e) {
+ throw new NotAuthorizedException(e.getMessage());
+ }
}
@PUT
@@ -134,6 +137,8 @@ public PreferenceData addEntityToPreference(@PathParam("preferenceType") String
throw new BadRequestException(exception.getMessage());
} catch (EntityThresholdReachedException | IllegalAccessException exception) {
throw new ForbiddenException(exception.getMessage());
+ } catch (UserNotFoundException e) {
+ throw new NotAuthorizedException(e.getMessage());
}
}
@@ -156,6 +161,8 @@ public PreferenceData removeEntityFromPreference(@PathParam("preferenceType") St
Collectors.toList()));
} catch (EntityNotFoundInPreferenceException exception) {
throw new NotFoundException(exception.getMessage());
+ } catch (UserNotFoundException e) {
+ throw new NotAuthorizedException(e.getMessage());
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java b/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java
index 43eadd56a1..43c777e8a2 100644
--- a/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java
+++ b/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java
@@ -19,6 +19,7 @@
* Helper class to manage AWS configuration server
*/
public class ConfigurationModuleImpl implements ConfigurationModule {
+ @Override
public void run() throws Exception {
throw new UnsupportedOperationException("Configuration mode is not supported in this Dremio edition");
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java b/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java
index 08251d6a27..3b93dfc864 100644
--- a/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java
+++ b/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java
@@ -56,6 +56,7 @@
import com.dremio.dac.server.DremioServer;
import com.dremio.dac.server.DremioServlet;
import com.dremio.dac.server.LivenessService;
+import com.dremio.dac.server.NessieProxyRestServer;
import com.dremio.dac.server.RestServerV2;
import com.dremio.dac.server.WebServer;
import com.dremio.dac.service.admin.KVStoreReportService;
@@ -227,6 +228,7 @@
import com.dremio.service.flight.DremioFlightAuthProvider;
import com.dremio.service.flight.DremioFlightAuthProviderImpl;
import com.dremio.service.flight.DremioFlightService;
+import com.dremio.service.flight.FlightRequestContextDecorator;
import com.dremio.service.grpc.GrpcChannelBuilderFactory;
import com.dremio.service.grpc.GrpcServerBuilderFactory;
import com.dremio.service.grpc.MultiTenantGrpcServerBuilderFactory;
@@ -478,6 +480,12 @@ public NodeEndpoint get() {
bootstrap.getExecutor()
));
+ // Bind Credentials Service, this will select between Simple and Executor Credentials
+ final com.dremio.services.credentials.CredentialsService credentialsService =
+ com.dremio.services.credentials.CredentialsService.newInstance(config, scanResult);
+ registry.bind(com.dremio.services.credentials.CredentialsService.class,
+ credentialsService);
+
DremioCredentialProviderFactory.configure(
registry.provider(com.dremio.services.credentials.CredentialsService.class));
@@ -491,6 +499,7 @@ public NodeEndpoint get() {
"conduit"
);
+ // TODO DX-66220: Make AzureVaultCredentialsProvider available before resolving secret URIs in SSL config
conduitSslEngineFactory = SSLEngineFactory.create(
conduitSslConfigurator.getSSLConfig(false, fabricAddress));
} catch (Exception e) {
@@ -544,11 +553,7 @@ public NodeEndpoint get() {
registry.bindProvider(NessieApiV1.class, () -> createNessieClientProvider(config, registry));
- // Bind base credentials on both coordinator and executor
- com.dremio.services.credentials.CredentialsService credentialsService =
- com.dremio.services.credentials.CredentialsService.newInstance(config, scanResult);
- registry.bind(com.dremio.services.credentials.CredentialsService.class, credentialsService);
-
+ // Bind gRPC service for remote lookups
if (isCoordinator) {
conduitServiceRegistry.registerService(new CredentialsServiceImpl(
registry.provider(com.dremio.services.credentials.CredentialsService.class)));
@@ -823,7 +828,7 @@ public void close() throws Exception {
registry.bindSelf(new SystemTablePluginConfigProvider());
- registry.bind(SysFlightPluginConfigProvider.class, new SysFlightPluginConfigProvider(registry.provider(NodeEndpoint.class)));
+ registry.bind(SysFlightPluginConfigProvider.class, new SysFlightPluginConfigProvider());
final MetadataRefreshInfoBroadcaster metadataRefreshInfoBroadcaster =
new MetadataRefreshInfoBroadcaster(
@@ -852,8 +857,6 @@ public void close() throws Exception {
});
}
-
-
registry.bind(CatalogService.class, new CatalogServiceImpl(
registry.provider(SabotContext.class),
registry.provider(SchedulerService.class),
@@ -1023,7 +1026,8 @@ public Collection getNodes() {
registry.provider(MaestroForwarder.class),
bootstrapRegistry.lookup(Tracer.class),
registry.provider(RuleBasedEngineSelector.class),
- jobResultsAllocator);
+ jobResultsAllocator,
+ registry.provider(RequestContext.class));
if (config.getBoolean(DremioConfig.JOBS_ENABLED_BOOL)) {
registerJobsServices(conduitServiceRegistry, registry, bootstrap, jobResultsAllocator, optionManagerProvider);
@@ -1115,7 +1119,8 @@ public Collection getNodes() {
bootstrap.getExecutor(),
registry.provider(ForemenWorkManager.class),
isDistributedMaster,
- bootstrap.getAllocator());
+ bootstrap.getAllocator(),
+ registry.provider(RequestContext.class));
registry.bind(ReflectionService.class, reflectionService);
registry.bind(ReflectionAdministrationService.Factory.class, (context) -> reflectionService);
@@ -1123,7 +1128,7 @@ public Collection getNodes() {
registry.replace(AccelerationManager.class, new AccelerationManagerImpl(
registry.provider(ReflectionService.class),
registry.provider(ReflectionAdministrationService.Factory.class),
- namespaceServiceProvider));
+ registry.provider(CatalogService.class)));
final StatisticsServiceImpl statisticsService = new StatisticsServiceImpl(
registry.provider(LegacyKVStoreProvider.class),
@@ -1140,7 +1145,6 @@ public Collection getNodes() {
registry.bind(ReflectionStatusService.class, new ReflectionStatusServiceImpl(
nodeEndpointsProvider,
- namespaceServiceProvider,
registry.provider(CatalogService.class),
registry.provider(LegacyKVStoreProvider.class),
reflectionService.getCacheViewerProvider()
@@ -1277,6 +1281,7 @@ public Collection getNodes() {
));
registry.bind(RestServerV2.class, new RestServerV2(bootstrap.getClasspathScan()));
+ registry.bind(NessieProxyRestServer.class, new NessieProxyRestServer());
registry.bind(APIServer.class, new APIServer(bootstrap.getClasspathScan()));
registry.bind(DremioServlet.class, new DremioServlet(dacConfig.getConfig(),
@@ -1322,6 +1327,7 @@ public Collection getNodes() {
registry.provider(UserService.class),
registry.provider(TokenManager.class)
));
+ registry.bind(FlightRequestContextDecorator.class, FlightRequestContextDecorator.DEFAULT);
registry.bindSelf(new DremioFlightService(
registry.provider(DremioConfig.class),
@@ -1332,6 +1338,7 @@ public Collection getNodes() {
registry.provider(OptionManager.class),
registry.provider(UserSessionService.class),
registry.provider(DremioFlightAuthProvider.class),
+ registry.provider(FlightRequestContextDecorator.class),
registry.provider(com.dremio.services.credentials.CredentialsService.class)
));
} else {
@@ -1395,6 +1402,7 @@ public Collection getNodes() {
registry.provider(com.dremio.services.credentials.CredentialsService.class),
registry.provider(RestServerV2.class),
registry.provider(APIServer.class),
+ registry.provider(NessieProxyRestServer.class),
registry.provider(DremioServer.class),
new DremioBinder(registry),
"ui",
diff --git a/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java b/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java
index dc061db2f0..afd41e43b2 100644
--- a/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java
+++ b/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java
@@ -47,8 +47,8 @@
* Starts the SampleDataPopulator
*/
public class SampleDataPopulatorService implements Service {
- private final Provider contextProvider;
- private final Provider userService;
+ private final Provider sabotContextProvider;
+ private final Provider userServiceProvider;
private final Provider kvStore;
private final Provider init;
private final Provider jobsService;
@@ -63,9 +63,9 @@ public class SampleDataPopulatorService implements Service {
private final boolean addDefaultUser;
public SampleDataPopulatorService(
- Provider contextProvider,
+ Provider sabotContextProvider,
Provider kvStore,
- Provider userService,
+ Provider userServiceProvider,
Provider init,
Provider jobsService,
Provider catalogService,
@@ -74,9 +74,9 @@ public SampleDataPopulatorService(
Provider optionManager,
boolean prepopulate,
boolean addDefaultUser) {
- this.contextProvider = contextProvider;
+ this.sabotContextProvider = sabotContextProvider;
this.kvStore = kvStore;
- this.userService = userService;
+ this.userServiceProvider = userServiceProvider;
this.init = init;
this.jobsService = jobsService;
this.catalogService = catalogService;
@@ -94,24 +94,25 @@ public Provider getOptionManager() {
@Override
public void start() throws Exception {
final LegacyKVStoreProvider kv = kvStore.get();
- final NamespaceService ns = contextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME);
+ final NamespaceService systemUserNamespaceService = sabotContextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME);
addDefaultUser();
if (prepopulate) {
- final ReflectionServiceHelper reflectionServiceHelper = new SampleReflectionServiceHelper(ns, kvStore);
+ final ReflectionServiceHelper reflectionServiceHelper = new SampleReflectionServiceHelper(systemUserNamespaceService, kvStore, optionManager);
- final DatasetVersionMutator data = new DatasetVersionMutator(init.get(), kv, ns, jobsService.get(),
+ final DatasetVersionMutator data = new DatasetVersionMutator(init.get(), kv, systemUserNamespaceService, jobsService.get(),
catalogService.get(), optionManager.get());
- SecurityContext context = new DACSecurityContext(new UserName(SystemUser.SYSTEM_USERNAME), SystemUser.SYSTEM_USER, null);
- final SourceService ss = new SourceService(contextProvider.get(), ns, data, catalogService.get(), reflectionServiceHelper, null, connectionReader.get(), context);
+ SecurityContext securityContext = new DACSecurityContext(new UserName(SystemUser.SYSTEM_USERNAME), SystemUser.SYSTEM_USER, null);
+ final SourceService ss = new SourceService(sabotContextProvider.get(), systemUserNamespaceService, data, catalogService.get(), reflectionServiceHelper, null, connectionReader.get(), securityContext);
+ final UserService userService = sabotContextProvider.get().getUserService();
sample = new SampleDataPopulator(
- contextProvider.get(),
+ sabotContextProvider.get(),
ss,
data,
- userService.get(),
- contextProvider.get().getNamespaceService(SampleDataPopulator.DEFAULT_USER_NAME),
+ userServiceProvider.get(),
+ sabotContextProvider.get().getNamespaceService(SampleDataPopulator.DEFAULT_USER_NAME),
SampleDataPopulator.DEFAULT_USER_NAME,
- new CollaborationHelper(kv, contextProvider.get(), ns, context, searchService.get())
+ new CollaborationHelper(kv, systemUserNamespaceService, securityContext, searchService.get(), userService)
);
sample.populateInitialData();
@@ -119,10 +120,10 @@ public void start() throws Exception {
}
public void addDefaultUser() throws Exception {
- final NamespaceService ns = contextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME);
+ final NamespaceService ns = sabotContextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME);
if (addDefaultUser) {
- addDefaultDremioUser(userService.get(), ns);
+ addDefaultDremioUser(userServiceProvider.get(), ns);
}
}
@@ -138,15 +139,17 @@ class SampleReflectionServiceHelper extends ReflectionServiceHelper {
private final NamespaceService namespace;
private final Provider storeProvider;
- public SampleReflectionServiceHelper(NamespaceService namespace, Provider storeProvider) {
- super(null, null);
+ public SampleReflectionServiceHelper(NamespaceService namespace,
+ Provider storeProvider,
+ Provider optionManagerProvider) {
+ super(null, null, optionManagerProvider.get());
this.namespace = namespace;
this.storeProvider = storeProvider;
}
@Override
public ReflectionSettings getReflectionSettings() {
- return new ReflectionSettingsImpl(() -> namespace, storeProvider);
+ return new ReflectionSettingsImpl(() -> namespace, catalogService, storeProvider);
}
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/AsyncMetadataJobStatusListener.java b/dac/backend/src/main/java/com/dremio/dac/explore/AsyncMetadataJobStatusListener.java
new file mode 100644
index 0000000000..0f4ec47473
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/AsyncMetadataJobStatusListener.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.explore;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import com.dremio.service.jobs.JobStatusListener;
+import com.dremio.service.jobs.metadata.proto.QueryMetadata;
+
+class AsyncMetadataJobStatusListener implements JobStatusListener {
+ private List listeners;
+
+ interface MetaDataListener {
+ void metadataCollected(QueryMetadata metadata);
+ }
+
+ AsyncMetadataJobStatusListener(MetaDataListener listener) {
+ listeners = new ArrayList<>();
+ listeners.add(listener);
+ }
+
+ void addMetadataListener(MetaDataListener listener) {
+ listeners.add(listeners.size(), listener);
+ }
+
+ @Override
+ public void metadataCollected(QueryMetadata metadata) {
+ Thread t = new Thread(() -> {
+ for (MetaDataListener l: listeners) {
+ l.metadataCollected(metadata);
+ }
+ });
+ t.start();
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java
index 3b1f1c9b08..b916541ea9 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java
@@ -47,6 +47,7 @@
import com.dremio.dac.explore.model.InitialDataPreviewResponse;
import com.dremio.dac.explore.model.VersionContextReq;
import com.dremio.dac.explore.model.VersionContextReq.VersionContextType;
+import com.dremio.dac.explore.model.VersionContextUtils;
import com.dremio.dac.model.job.JobData;
import com.dremio.dac.model.job.JobDataWrapper;
import com.dremio.dac.proto.model.dataset.VirtualDatasetUI;
@@ -60,10 +61,14 @@
import com.dremio.dac.service.reflection.ReflectionServiceHelper;
import com.dremio.dac.util.JobRequestUtil;
import com.dremio.exec.catalog.Catalog;
+import com.dremio.exec.catalog.CatalogEntityKey;
import com.dremio.exec.catalog.CatalogUtil;
+import com.dremio.exec.catalog.DremioTable;
import com.dremio.exec.catalog.ResolvedVersionContext;
+import com.dremio.exec.catalog.TableVersionContext;
import com.dremio.exec.catalog.VersionContext;
import com.dremio.exec.physical.base.ViewOptions;
+import com.dremio.exec.planner.sql.parser.SqlGrant;
import com.dremio.exec.record.BatchSchema;
import com.dremio.service.job.QueryType;
import com.dremio.service.job.SqlQuery;
@@ -95,10 +100,8 @@
import io.protostuff.ByteString;
-
/**
* Serves the datasets
- *
*/
@RestResource
@Secured
@@ -134,13 +137,6 @@ public DatasetResource(
this.collaborationService = collaborationService;
}
- @GET
- @Path("descendants/count")
- @Produces(APPLICATION_JSON)
- public long getDescendantsCount() {
- return datasetService.getDescendantsCount(datasetPath.toNamespaceKey());
- }
-
@GET
@Path("descendants")
@Produces(APPLICATION_JSON)
@@ -155,8 +151,30 @@ public List> getDescendants() throws NamespaceException {
@GET
@Path("acceleration/settings")
@Produces(APPLICATION_JSON)
- public AccelerationSettingsDescriptor getAccelerationSettings() throws NamespaceException {
- final DatasetConfig config = namespaceService.getDataset(datasetPath.toNamespaceKey());
+ public AccelerationSettingsDescriptor getAccelerationSettings(
+ @QueryParam("versionType") String versionType,
+ @QueryParam("versionValue") String versionValue)
+ throws DatasetNotFoundException, NamespaceException {
+ final CatalogEntityKey.Builder builder =
+ CatalogEntityKey.newBuilder().keyComponents(datasetPath.toPathList());
+
+ if (isDatasetVersioned()) {
+ final VersionContext versionContext = generateVersionContext(versionType, versionValue);
+ final TableVersionContext tableVersionContext = TableVersionContext.of(versionContext);
+
+ builder.tableVersionContext(tableVersionContext);
+ }
+
+ final Catalog catalog = datasetService.getCatalog();
+ final CatalogEntityKey catalogEntityKey = builder.build();
+ final DremioTable table = CatalogUtil.getTable(catalogEntityKey, catalog);
+
+ if (table == null) {
+ throw new DatasetNotFoundException(datasetPath);
+ }
+
+ final DatasetConfig config = table.getDatasetConfig();
+
if (config.getType() == DatasetType.VIRTUAL_DATASET) {
final String msg = String.format("acceleration settings apply only to physical dataset. %s is a virtual dataset",
datasetPath.toPathString());
@@ -164,37 +182,54 @@ public AccelerationSettingsDescriptor getAccelerationSettings() throws Namespace
}
final ReflectionSettings reflectionSettings = reflectionServiceHelper.getReflectionSettings();
- final AccelerationSettings settings = reflectionSettings.getReflectionSettings(datasetPath.toNamespaceKey());
- final AccelerationSettingsDescriptor descriptor = new AccelerationSettingsDescriptor()
- .setAccelerationRefreshPeriod(settings.getRefreshPeriod())
- .setAccelerationGracePeriod(settings.getGracePeriod())
- .setMethod(settings.getMethod())
- .setRefreshField(settings.getRefreshField())
- .setAccelerationNeverExpire(settings.getNeverExpire())
- .setAccelerationNeverRefresh(settings.getNeverRefresh());
-
+ final AccelerationSettings settings =
+ reflectionSettings.getReflectionSettings(catalogEntityKey);
+
+ final AccelerationSettingsDescriptor descriptor =
+ new AccelerationSettingsDescriptor()
+ .setAccelerationRefreshPeriod(settings.getRefreshPeriod())
+ .setAccelerationGracePeriod(settings.getGracePeriod())
+ .setMethod(settings.getMethod())
+ .setRefreshField(settings.getRefreshField())
+ .setAccelerationNeverExpire(settings.getNeverExpire())
+ .setAccelerationNeverRefresh(settings.getNeverRefresh());
final ByteString schemaBytes = DatasetHelper.getSchemaBytes(config);
+
if (schemaBytes != null) {
final BatchSchema schema = BatchSchema.deserialize(schemaBytes.toByteArray());
- descriptor.setFieldList(FluentIterable
- .from(schema)
- .transform(new Function() {
- @Nullable
- @Override
- public String apply(@Nullable final Field field) {
- return field.getName();
- }
- })
- .toList()
- );
+ descriptor.setFieldList(
+ FluentIterable.from(schema)
+ .transform(
+ new Function() {
+ @Nullable
+ @Override
+ public String apply(@Nullable final Field field) {
+ return field.getName();
+ }
+ })
+ .toList());
}
+
return descriptor;
}
+ private VersionContext generateVersionContext(String versionType, String versionValue) {
+ final VersionContext versionContext = VersionContextUtils.parse(versionType, versionValue);
+ if (versionContext.getType() == VersionContext.Type.UNSPECIFIED) {
+ throw new ClientErrorException(
+ "Missing a versionType/versionValue pair for versioned dataset");
+ }
+ return versionContext;
+ }
+
@PUT
@Path("acceleration/settings")
@Produces(APPLICATION_JSON)
- public void updateAccelerationSettings(final AccelerationSettingsDescriptor descriptor) throws NamespaceException {
+ public void updateAccelerationSettings(
+ final AccelerationSettingsDescriptor descriptor,
+ @QueryParam("versionType") String versionType,
+ @QueryParam("versionValue") String versionValue)
+ throws DatasetNotFoundException, NamespaceException {
Preconditions.checkArgument(descriptor != null, "acceleration settings descriptor is required");
Preconditions.checkArgument(descriptor.getAccelerationRefreshPeriod() != null, "refreshPeriod is required");
Preconditions.checkArgument(descriptor.getAccelerationGracePeriod() != null, "gracePeriod is required");
@@ -202,7 +237,26 @@ public void updateAccelerationSettings(final AccelerationSettingsDescriptor desc
Preconditions.checkArgument(descriptor.getAccelerationNeverExpire() //we are good here
|| descriptor.getAccelerationNeverRefresh() //user never want to refresh, assume they just want to let it expire anyway
|| descriptor.getAccelerationRefreshPeriod() <= descriptor.getAccelerationGracePeriod() , "refreshPeriod must be less than gracePeriod");
- final DatasetConfig config = namespaceService.getDataset(datasetPath.toNamespaceKey());
+
+ final CatalogEntityKey.Builder builder =
+ CatalogEntityKey.newBuilder().keyComponents(datasetPath.toPathList());
+
+ if (isDatasetVersioned()) {
+ final VersionContext versionContext = generateVersionContext(versionType, versionValue);
+ final TableVersionContext tableVersionContext = TableVersionContext.of(versionContext);
+
+ builder.tableVersionContext(tableVersionContext);
+ }
+
+ final Catalog catalog = datasetService.getCatalog();
+ final CatalogEntityKey catalogEntityKey = builder.build();
+ final DremioTable table = CatalogUtil.getTable(catalogEntityKey, catalog);
+
+ if (table == null) {
+ throw new DatasetNotFoundException(datasetPath);
+ }
+
+ final DatasetConfig config = table.getDatasetConfig();
if (config.getType() == DatasetType.VIRTUAL_DATASET) {
final String msg = String.format("acceleration settings apply only to physical dataset. %s is a virtual dataset",
@@ -211,7 +265,13 @@ public void updateAccelerationSettings(final AccelerationSettingsDescriptor desc
}
if (descriptor.getMethod() == RefreshMethod.INCREMENTAL) {
- if (config.getType() == DatasetType.PHYSICAL_DATASET) {
+ if (CatalogUtil.requestedPluginSupportsVersionedTables(table.getPath(), catalog)) {
+ // Validate Iceberg tables in Nessie Catalog
+ final String msg = "refresh field is required for incremental updates on Iceberg tables";
+ Preconditions.checkArgument(descriptor.getRefreshField() != null, msg);
+ } else if (config.getType() == DatasetType.PHYSICAL_DATASET) {
+ // Validate Iceberg tables outside of Nessie Catalog
+ // Validate non-directory datasets such as RDBMS tables, MongoDB, elasticsearch, etc.
final String msg = "refresh field is required for incremental updates on non-filesystem datasets";
Preconditions.checkArgument(descriptor.getRefreshField() != null, msg);
} else {
@@ -224,26 +284,32 @@ public void updateAccelerationSettings(final AccelerationSettingsDescriptor desc
}
final ReflectionSettings reflectionSettings = reflectionServiceHelper.getReflectionSettings();
- final AccelerationSettings settings = reflectionSettings.getReflectionSettings(datasetPath.toNamespaceKey());
- final AccelerationSettings descriptorSettings = new AccelerationSettings()
- .setAccelerationTTL(settings.getAccelerationTTL()) // needed to use protobuf equals
- .setTag(settings.getTag()) // needed to use protobuf equals
- .setRefreshPeriod(descriptor.getAccelerationRefreshPeriod())
- .setGracePeriod(descriptor.getAccelerationGracePeriod())
- .setMethod(descriptor.getMethod())
- .setRefreshField(descriptor.getRefreshField())
- .setNeverExpire(descriptor.getAccelerationNeverExpire())
- .setNeverRefresh(descriptor.getAccelerationNeverRefresh());
- final boolean settingsUpdated = !settings.equals(descriptorSettings);
- if (settingsUpdated) {
- settings.setRefreshPeriod(descriptor.getAccelerationRefreshPeriod())
+ final AccelerationSettings settings =
+ reflectionSettings.getReflectionSettings(catalogEntityKey);
+ final AccelerationSettings descriptorSettings =
+ new AccelerationSettings()
+ .setAccelerationTTL(settings.getAccelerationTTL()) // needed to use protobuf equals
+ .setTag(settings.getTag()) // needed to use protobuf equals
+ .setRefreshPeriod(descriptor.getAccelerationRefreshPeriod())
+ .setGracePeriod(descriptor.getAccelerationGracePeriod())
+ .setMethod(descriptor.getMethod())
+ .setRefreshField(descriptor.getRefreshField())
+ .setNeverExpire(descriptor.getAccelerationNeverExpire())
+ .setNeverRefresh(descriptor.getAccelerationNeverRefresh());
+
+ if (settings.equals(descriptorSettings)) {
+ return;
+ }
+
+ settings
+ .setRefreshPeriod(descriptor.getAccelerationRefreshPeriod())
.setGracePeriod(descriptor.getAccelerationGracePeriod())
.setMethod(descriptor.getMethod())
.setRefreshField(descriptor.getRefreshField())
.setNeverExpire(descriptor.getAccelerationNeverExpire())
.setNeverRefresh(descriptor.getAccelerationNeverRefresh());
- reflectionSettings.setReflectionSettings(datasetPath.toNamespaceKey(), settings);
- }
+
+ reflectionSettings.setReflectionSettings(catalogEntityKey, settings);
}
/**
@@ -285,6 +351,8 @@ public DatasetUI deleteDataset(
DatasetUI datasetUI = null;
if (versioned) {
final Catalog catalog = datasetService.getCatalog();
+ //TODO: Once DX-65418 is fixed, injected catalog will validate the right entity accordingly
+ catalog.validatePrivilege(new NamespaceKey(datasetPath.toPathList()), SqlGrant.Privilege.ALTER);
final ResolvedVersionContext resolvedVersionContext =
CatalogUtil.resolveVersionContext(
catalog, datasetPath.getRoot().getName(), VersionContext.ofBranch(refValue));
@@ -293,10 +361,13 @@ public DatasetUI deleteDataset(
catalog.dropView(new NamespaceKey(datasetPath.toPathList()), viewOptions);
} else {
- final VirtualDatasetUI virtualDataset = datasetService.get(datasetPath);
-
- datasetUI = newDataset(virtualDataset);
- datasetService.deleteDataset(datasetPath, savedTag);
+ try {
+ final VirtualDatasetUI virtualDataset = datasetService.get(datasetPath);
+ datasetUI = newDataset(virtualDataset);
+ datasetService.deleteDataset(datasetPath, savedTag);
+ } catch (DatasetVersionNotFoundException e) {
+ datasetService.deleteDataset(datasetPath, null);
+ }
}
final ReflectionSettings reflectionSettings = reflectionServiceHelper.getReflectionSettings();
@@ -306,7 +377,7 @@ public DatasetUI deleteDataset(
}
private boolean isDatasetVersioned() {
- final NamespaceKey namespaceKey = new NamespaceKey(datasetPath.toPathList());
+ final NamespaceKey namespaceKey = datasetPath.toNamespaceKey();
final Catalog catalog = datasetService.getCatalog();
return CatalogUtil.requestedPluginSupportsVersionedTables(namespaceKey, catalog);
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java
index da1ca6ec1f..adaa9ba15b 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java
@@ -20,8 +20,6 @@
import java.util.List;
import java.util.Map;
-import org.apache.commons.collections4.MapUtils;
-
import com.dremio.dac.explore.model.VersionContextReq;
import com.dremio.dac.proto.model.dataset.SourceVersionReference;
import com.dremio.dac.proto.model.dataset.VersionContextType;
@@ -31,12 +29,16 @@
/**
* Utility classes for DatasetResource
*/
-public class DatasetResourceUtils {
+public final class DatasetResourceUtils {
+
+ private DatasetResourceUtils() {
+ // utils class
+ }
- public static Map createSourceVersionMapping(Map references) {
+ public static Map createSourceVersionMapping(final Map references) {
final Map sourceVersionMapping = new HashMap<>();
- if (MapUtils.isNotEmpty(references)) {
+ if (references != null) {
for (Map.Entry entry: references.entrySet()) {
VersionContextReq.VersionContextType versionContextType = entry.getValue().getType();
switch (versionContextType) {
@@ -60,7 +62,7 @@ public static Map createSourceVersionMapping(Map createSourceVersionReferenceList(Map references) {
List sourceVersionReferenceList = new ArrayList<>();
- if (MapUtils.isNotEmpty(references)) {
+ if (references != null) {
for (Map.Entry entry: references.entrySet()) {
VersionContextReq versionContextReq = entry.getValue();
VersionContextReq.VersionContextType versionContextType = versionContextReq.getType();
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java
index 9517d83218..c83d452d69 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java
@@ -131,7 +131,7 @@ public String getDatasetAlias() {
private void shouldNotExist(String colName) {
if (findCol(colName) != null) {
throw new IllegalArgumentException(
- format("Invalid new col name %s. It is already in the current schema", colName));
+ format("Invalid new column name %s. It is already in the current schema", colName));
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java
index 089bdf328d..a184dd9aec 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java
@@ -20,11 +20,9 @@
import java.security.AccessControlException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -82,14 +80,12 @@
import com.dremio.dac.util.InvalidQueryErrorConverter;
import com.dremio.exec.catalog.Catalog;
import com.dremio.exec.catalog.CatalogUtil;
+import com.dremio.exec.catalog.VersionContext;
import com.dremio.exec.record.BatchSchema;
import com.dremio.exec.record.RecordBatchHolder;
import com.dremio.exec.util.ViewFieldsHelper;
import com.dremio.service.job.JobDetails;
import com.dremio.service.job.JobDetailsRequest;
-import com.dremio.service.job.JobSummary;
-import com.dremio.service.job.SearchJobsRequest;
-import com.dremio.service.job.VersionedDatasetPath;
import com.dremio.service.job.proto.JobId;
import com.dremio.service.job.proto.JobInfo;
import com.dremio.service.job.proto.JobState;
@@ -98,7 +94,6 @@
import com.dremio.service.job.proto.SessionId;
import com.dremio.service.jobs.JobDataClientUtils;
import com.dremio.service.jobs.JobNotFoundException;
-import com.dremio.service.jobs.JobStatusListener;
import com.dremio.service.jobs.JobsProtoUtil;
import com.dremio.service.jobs.JobsService;
import com.dremio.service.jobs.JobsVersionContext;
@@ -106,7 +101,6 @@
import com.dremio.service.jobs.metadata.proto.QueryMetadata;
import com.dremio.service.namespace.NamespaceException;
import com.dremio.service.namespace.NamespaceKey;
-import com.dremio.service.namespace.NamespaceUtils;
import com.dremio.service.namespace.dataset.DatasetVersion;
import com.dremio.service.namespace.dataset.proto.DatasetType;
import com.dremio.service.namespace.dataset.proto.FieldOrigin;
@@ -116,6 +110,7 @@
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.protostuff.ByteString;
/**
@@ -149,21 +144,28 @@ public DatasetTool(
* @throws DatasetVersionNotFoundException
*/
InitialPreviewResponse createPreviewResponseForExistingDataset (
- BufferAllocator allocator,
VirtualDatasetUI newDataset,
DatasetVersionResourcePath tipVersion,
- Integer limit,
String engineName,
String sessionId,
String triggerJob
- ) throws DatasetVersionNotFoundException, NamespaceException, JobNotFoundException {
+ ) throws DatasetVersionNotFoundException, NamespaceException {
+ JobId jobId = null;
if (shouldTriggerJob(triggerJob)) {
SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId);
- JobData jobData = executor.runQueryWithListener(query, QueryType.UI_PREVIEW, tipVersion.getDataset(), newDataset.getVersion(), JobStatusListener.NO_OP);
- return createPreviewResponse(newDataset, jobData, tipVersion, allocator, limit, true);
- } else {
- return getInitialPreviewResponse(newDataset, null, new SessionId().setId(sessionId), tipVersion, null, null);
+ MetadataJobStatusListener listener = new MetadataJobStatusListener(this, newDataset, null);
+ // The saved dataset is incomplete, we want save the dataset again once the metadata is collected.
+ if (newDataset.getSqlFieldsList() == null) {
+ listener.waitToApplyMetadataAndSaveDataset();
+ }
+
+ JobData jobData = executor.runQueryWithListener(query, QueryType.UI_PREVIEW, tipVersion.getDataset(), newDataset.getVersion(), listener);
+ jobId = jobData.getJobId();
+ if (newDataset.getSqlFieldsList() == null) {
+ listener.setJobId(jobId);
+ }
}
+ return getInitialPreviewResponse(newDataset, jobId, new SessionId().setId(sessionId), tipVersion, null, null);
}
// Convert String to boolean, but with default as true.
@@ -187,8 +189,8 @@ private String username(){
* Helper method to create {@link InitialPreviewResponse} from given inputs
* @param datasetUI
* @param job
- * @param tipVersion a combination of dataset verion + path to a dataset. It represent a top history version. Path here
- * could differs from path that {@code datasetUI} has, as {@code datasetUI} could be
+ * @param tipVersion a combination of dataset version + path to a dataset. It represents a top history version. Path here
+ * could differ from path that {@code datasetUI} has, as {@code datasetUI} could be
* a history version, that references on other dataset with different path.
* @param maxRecords
* @param catchExecutionError
@@ -231,6 +233,7 @@ InitialPreviewResponse createPreviewResponse(VirtualDatasetUI datasetUI, JobData
}
if (ex instanceof UserException) {
+ // TODO - Why is this not thrown?
toInvalidQueryException((UserException) ex, datasetUI.getSql(), ImmutableList.of(), job.getJobId(), job.getSessionId());
}
error = new ApiErrorModel(ApiErrorModel.ErrorType.INITIAL_PREVIEW_ERROR, ex.getMessage(), GenericErrorMessage.printStackTrace(ex), null);
@@ -250,8 +253,7 @@ private InitialPreviewResponse getInitialPreviewResponse(VirtualDatasetUI datase
JobDataFragment dataLimited,
ApiErrorModel> error) throws DatasetVersionNotFoundException, NamespaceException {
final History history = getHistory(tipVersion.getDataset(), datasetUI.getVersion(), tipVersion.getVersion());
- // VBesschetnov 2019-01-08
- // this is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history
+ // This is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history
// of a dataset reference on a version for other dataset. And a user navigate to that version and tries to preview it,
// we would not be resolve a tip version and preview will fail. We should always send requests to original dataset
// path (tip version path) to be able to get a preview/run data
@@ -559,6 +561,7 @@ public InitialPreviewResponse newUntitled(
* @throws DatasetVersionNotFoundException
* @throws NamespaceException
*/
+ @WithSpan
public InitialPreviewResponse newUntitled(
BufferAllocator allocator,
FromBase from,
@@ -573,8 +576,7 @@ public InitialPreviewResponse newUntitled(
Map references)
throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context,
- DatasetResourceUtils.createSourceVersionReferenceList(references));
+ final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, references);
final Map sourceVersionMapping = createSourceVersionMapping(references);
final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping);
@@ -600,6 +602,7 @@ public InitialPreviewResponse newUntitled(
List parentDataset = getParentDataset(from);
if (ex instanceof UserException) {
+ // TODO - Why is this not thrown?
toInvalidQueryException((UserException) ex, query.getSql(), context, parentSummary, jobId, jobDataSessionId);
}
@@ -627,12 +630,10 @@ public InitialUntitledRunResponse newTmpUntitled(
List context,
String engineName,
String sessionId,
- Map references,
- Integer limit)
+ Map references)
throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- final VirtualDatasetUI vds = createNewUntitledMetadataOnly(from, version, context,
- DatasetResourceUtils.createSourceVersionReferenceList(references));
+ final VirtualDatasetUI vds = createNewUntitledMetadataOnly(from, version, context, references);
final Map sourceVersionMapping = createSourceVersionMapping(references);
final SqlQuery query = new SqlQuery(vds.getSql(), vds.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping);
@@ -675,12 +676,15 @@ public InitialPreviewResponse newUntitled(
VirtualDatasetUI createNewUntitledMetadataOnly(FromBase from,
DatasetVersion version,
List context,
- List sourceVersionReferences) {
- final DatasetPath datasetPath = TMP_DATASET_PATH;
- final VirtualDatasetUI newDataset = newDatasetBeforeQueryMetadata(datasetPath, version, from.wrap(), context, username());
+ Map references) {
+ final VirtualDatasetUI newDataset = newDatasetBeforeQueryMetadata(TMP_DATASET_PATH, version, from.wrap(), context, username(), datasetService.getCatalog(), references);
newDataset.setLastTransform(new Transform(TransformType.createFromParent).setTransformCreateFromParent(new TransformCreateFromParent(from.wrap())));
+
+ final List sourceVersionReferences =
+ DatasetResourceUtils.createSourceVersionReferenceList(references);
newDataset.setReferencesList(sourceVersionReferences);
newDataset.getState().setReferenceList(sourceVersionReferences);
+
return newDataset;
}
@@ -690,10 +694,9 @@ InitialRunResponse newUntitledAndRun(FromBase from,
String engineName,
String sessionId,
Map references)
- throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException, InterruptedException {
+ throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException {
- final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context,
- DatasetResourceUtils.createSourceVersionReferenceList(references));
+ final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, references);
final Map sourceVersionMapping = createSourceVersionMapping(references);
final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping);
@@ -732,7 +735,7 @@ InitialRunResponse newUntitledAndRun(FromBase from,
* @param version Initial version of the new dataset
* @param context Dataset context or current schema
* @param engineName Engine to runt the query
- * @param sessionId Session Id
+ * @param sessionId SessionId
* @param references References
* @return {@link InitialUntitledRunResponse)}
* @throws DatasetNotFoundException
@@ -746,8 +749,7 @@ InitialUntitledRunResponse newTmpUntitledAndRun(FromBase from,
Map references)
throws DatasetNotFoundException, DatasetVersionNotFoundException {
- final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context,
- DatasetResourceUtils.createSourceVersionReferenceList(references));
+ final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, references);
final Map sourceVersionMapping = createSourceVersionMapping(references);
final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping);
@@ -795,13 +797,6 @@ protected Map createSourceVersionMapping(Map context)
- throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException, InterruptedException {
- return newUntitledAndRun(from, version, context, null, null, null);
- }
-
public void applyQueryMetaToDatasetAndSave(JobId jobId,
QueryMetadata queryMetadata,
VirtualDatasetUI newDataset,
@@ -817,7 +812,7 @@ public void applyQueryMetaToDatasetAndSave(JobId jobId,
QuerySemantics.populateSemanticFields(JobsProtoUtil.toStuff(queryMetadata.getFieldTypeList()), newDataset.getState());
applyQueryMetadata(newDataset, jobInfo, queryMetadata);
- if (from.wrap().getType() == FromType.SQL) {
+ if (from == null || from.wrap().getType() == FromType.SQL) {
newDataset.setState(QuerySemantics.extract(queryMetadata));
}
@@ -825,29 +820,17 @@ public void applyQueryMetaToDatasetAndSave(JobId jobId,
}
public static VirtualDatasetUI newDatasetBeforeQueryMetadata(
- DatasetPath datasetPath,
- DatasetVersion version,
- From from,
- List sqlContext,
- String owner) {
+ final DatasetPath datasetPath,
+ final DatasetVersion version,
+ final From from,
+ final List sqlContext,
+ final String owner,
+ Catalog catalog,
+ final Map references) {
VirtualDatasetState dss = new VirtualDatasetState()
.setFrom(from);
dss.setContextList(sqlContext);
VirtualDatasetUI vds = new VirtualDatasetUI();
- switch(from.getType()){
- case SQL:
- vds.setDerivation(Derivation.SQL);
- break;
- case Table:
- vds.setDerivation(Derivation.DERIVED_UNKNOWN);
- dss.setReferredTablesList(Arrays.asList(from.getTable().getAlias()));
- break;
- case SubQuery:
- default:
- vds.setDerivation(Derivation.UNKNOWN);
- dss.setReferredTablesList(Arrays.asList(from.getSubQuery().getAlias()));
- break;
- }
vds.setOwner(owner);
vds.setIsNamed(false);
@@ -859,6 +842,23 @@ public static VirtualDatasetUI newDatasetBeforeQueryMetadata(
vds.setId(UUID.randomUUID().toString());
vds.setContextList(sqlContext);
+ switch (from.getType()) {
+ case SQL:
+ vds.setDerivation(Derivation.SQL);
+ break;
+ case Table:
+ vds.setDerivation(Derivation.DERIVED_UNKNOWN);
+ dss.setReferredTablesList(Collections.singletonList(from.getTable().getAlias()));
+
+ updateVersionedDatasetId(vds, from, catalog, references);
+ break;
+ case SubQuery:
+ default:
+ vds.setDerivation(Derivation.UNKNOWN);
+ dss.setReferredTablesList(Collections.singletonList(from.getSubQuery().getAlias()));
+ break;
+ }
+
// if we're doing a select * from table, and the context matches the base path of the table, let's avoid qualifying the table name.
if(from.getType() == FromType.Table) {
NamespaceKey path = new DatasetPath(from.getTable().getDatasetPath()).toNamespaceKey();
@@ -870,9 +870,33 @@ public static VirtualDatasetUI newDatasetBeforeQueryMetadata(
return vds;
}
+ /**
+ * Update the datasetId in the given dataset UI. This method only applies to versioned table.
+ */
+ private static void updateVersionedDatasetId(
+ VirtualDatasetUI vds,
+ final From from,
+ Catalog catalog,
+ final Map references) {
+ if (references == null || references.isEmpty() || catalog == null) {
+ return;
+ }
+
+ final NamespaceKey namespaceKey =
+ new DatasetPath(from.getTable().getDatasetPath()).toNamespaceKey();
+ final Map versionContextMapping =
+ DatasetResourceUtils.createSourceVersionMapping(references);
+
+ if (!CatalogUtil.requestedPluginSupportsVersionedTables(namespaceKey, catalog)) {
+ return;
+ }
+
+ vds.setId(catalog.resolveCatalog(versionContextMapping).getDatasetId(namespaceKey));
+ }
+
/**
* Get the history before a given version. This should only be used if this version is known to be
- * the last version in the history. Otherwise the other version of this method that takes a tip
+ * the last version in the history. Otherwise, the other version of this method that takes a tip
* version as well as a current version.
*
* @param datasetPath
@@ -894,7 +918,7 @@ History getHistory(final DatasetPath datasetPath, DatasetVersion currentDataset)
*
* @param datasetPath the dataset path of the version at the tip of the history
* @param versionToMarkCurrent the version currently selected in the client
- * @param tipVersion the latest history item known, which may be passed the selected versionToMarkCurrent,
+ * @param tipVersion the latest history item known which may be passed the selected versionToMarkCurrent,
* this can be null and the tip will be assumed to be the versionToMarkCurrent the
* same behavior as the version of this method that lacks the tipVersion entirely
* @return
@@ -916,27 +940,8 @@ History getHistory(final DatasetPath datasetPath, final DatasetVersion versionTo
DatasetVersionResourcePath versionedResourcePath =
new DatasetVersionResourcePath(currentPath, currentVersion);
- // grab the most recent job for this dataset version (note the use of limit 1 to avoid
- // retrieving all results, the API just returns a list, so this also has to index into the returned list
- // that will always contain a single element)
- final SearchJobsRequest request = SearchJobsRequest.newBuilder()
- .setDataset(VersionedDatasetPath.newBuilder()
- .addAllPath(currentDataset.getFullPathList())
- .setVersion(currentDataset.getVersion().getVersion())
- .build())
- .setLimit(1)
- .build();
- Iterable jobSummaries = jobsService.searchJobs(request);
- final JobState jobState;
- // jobs are not persisted forever so we may not have a job for this version of the dataset
- Iterator iterator = jobSummaries.iterator();
- if (iterator.hasNext()) {
- jobState = JobsProtoUtil.toStuff(iterator.next().getJobState());
- } else {
- jobState = JobState.COMPLETED;
- }
historyItems.add(
- new HistoryItem(versionedResourcePath, jobState,
+ new HistoryItem(versionedResourcePath, JobState.COMPLETED,
TransformBase.unwrap(currentDataset.getLastTransform()).accept(new DescribeTransformation()), username(),
currentDataset.getCreatedAt(), 0L, true, null, null));
@@ -999,7 +1004,7 @@ History getHistory(final DatasetPath datasetPath, final DatasetVersion versionTo
* @throws NamespaceException
*/
void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newPath)
- throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException {
+ throws DatasetVersionNotFoundException, DatasetNotFoundException {
DatasetVersion previousDatasetVersion;
DatasetPath previousPath;
@@ -1019,6 +1024,16 @@ void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newP
previousPath = new DatasetPath(previousVersion.getDatasetPath());
previousDatasetVersion = new DatasetVersion(previousVersion.getDatasetVersion());
previousVersionRequiresRename = !previousPath.equals(newPath);
+ VirtualDatasetUI previousDataset = datasetService.getVersion(previousPath, previousDatasetVersion);
+ // If the previous VDS version is incomplete, ignore that version. This could happen when the user click on a
+ // PDS, an incomplete VDS version is created to show the PDS in UI. If the user modify the SQL and save the
+ // VDS, the previous VDS version is incomplete since it never run and doesn't have metadata.
+ try {
+ DatasetVersionMutator.validate(previousPath, previousDataset);
+ } catch (Exception e) {
+ previousVersionRequiresRename = false;
+ }
+
if (previousVersionRequiresRename) {
// create a new link to the previous dataset with a changed dataset path
NameDatasetRef prev = new NameDatasetRef()
@@ -1027,7 +1042,7 @@ void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newP
currentDataset.setPreviousVersion(prev);
currentDataset.setName(newPath.getDataset().getName());
datasetService.putVersion(currentDataset);
- currentDataset = datasetService.getVersion(previousPath, previousDatasetVersion);
+ currentDataset = previousDataset;
} else {
datasetService.putVersion(currentDataset);
}
@@ -1041,7 +1056,7 @@ void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newP
public static void applyQueryMetadata(VirtualDatasetUI dataset, JobInfo jobInfo, QueryMetadata metadata) {
applyQueryMetadata(dataset,
Optional.ofNullable(jobInfo.getParentsList()),
- Optional.ofNullable(jobInfo.getBatchSchema()).map((b) -> BatchSchema.deserialize(b)),
+ Optional.ofNullable(jobInfo.getBatchSchema()).map(BatchSchema::deserialize),
Optional.ofNullable(jobInfo.getFieldOriginsList()),
Optional.ofNullable(jobInfo.getGrandParentsList()),
metadata);
@@ -1069,12 +1084,8 @@ public static void applyQueryMetadata(VirtualDatasetUI dataset, Optional context,
- DatasetSummary parentSummary, Map references)
+ DatasetType parentType, List parentFullPathList, Map references)
throws NamespaceException {
- final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, newVersion, context,
- DatasetResourceUtils.createSourceVersionReferenceList(references));
+ final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, newVersion, context, references);
List parents = new ArrayList<>();
- final DatasetType parentType = parentSummary.getDatasetType();
- ParentDataset parent = new ParentDataset().setDatasetPathList(parentSummary.getFullPath()).setType(parentType);
+ final ParentDataset parent = new ParentDataset().setDatasetPathList(parentFullPathList).setType(parentType);
parents.add(parent);
newDataset.setParentsList(parents);
- if (NamespaceUtils.isPhysicalDataset(parentType)) {
- newDataset.setDerivation(Derivation.DERIVED_PHYSICAL);
- } else if (parentType == DatasetType.VIRTUAL_DATASET) {
- newDataset.setDerivation(Derivation.DERIVED_VIRTUAL);
- } else {
- newDataset.setDerivation(Derivation.DERIVED_UNKNOWN);
+ switch (parentType) {
+ case PHYSICAL_DATASET:
+ case PHYSICAL_DATASET_SOURCE_FILE:
+ case PHYSICAL_DATASET_SOURCE_FOLDER:
+ case PHYSICAL_DATASET_HOME_FILE:
+ case PHYSICAL_DATASET_HOME_FOLDER:
+ newDataset.setDerivation(Derivation.DERIVED_PHYSICAL);
+ break;
+
+ case VIRTUAL_DATASET:
+ newDataset.setDerivation(Derivation.DERIVED_VIRTUAL);
+ break;
+
+ default:
+ newDataset.setDerivation(Derivation.DERIVED_UNKNOWN);
}
- return InitialPreviewResponse.of(newDataset(newDataset, null), true, null, null);
+ // Save the incomplete dataset (without metadata) to allow data graph and catalog working on UI.
+ // Later run/preview calls will save the complete dataset.
+ datasetService.putTempVersionWithoutValidation(newDataset);
+
+ final DatasetUI datasetUI = newDataset(newDataset, null);
+ final History history = getHistory(new DatasetPath(datasetUI.getFullPath()), newDataset.getVersion(), null);
+
+ return InitialPreviewResponse.of(datasetUI, null, new SessionId(), null, true, history, null);
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java
index ca96a8f88b..e18a1c8042 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java
@@ -33,7 +33,6 @@
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
import javax.annotation.security.RolesAllowed;
import javax.inject.Inject;
@@ -71,6 +70,7 @@
import com.dremio.dac.explore.model.DatasetUI;
import com.dremio.dac.explore.model.DatasetUIWithHistory;
import com.dremio.dac.explore.model.DatasetVersionResourcePath;
+import com.dremio.dac.explore.model.FromBase;
import com.dremio.dac.explore.model.HistogramValue;
import com.dremio.dac.explore.model.History;
import com.dremio.dac.explore.model.HistoryItem;
@@ -98,8 +98,12 @@
import com.dremio.dac.proto.model.dataset.FromSQL;
import com.dremio.dac.proto.model.dataset.NameDatasetRef;
import com.dremio.dac.proto.model.dataset.ReplacePatternRule;
+import com.dremio.dac.proto.model.dataset.SourceVersionReference;
import com.dremio.dac.proto.model.dataset.SplitRule;
import com.dremio.dac.proto.model.dataset.Transform;
+import com.dremio.dac.proto.model.dataset.TransformCreateFromParent;
+import com.dremio.dac.proto.model.dataset.TransformType;
+import com.dremio.dac.proto.model.dataset.VirtualDatasetState;
import com.dremio.dac.proto.model.dataset.VirtualDatasetUI;
import com.dremio.dac.resource.BaseResourceWithAllocator;
import com.dremio.dac.server.BufferAllocatorFactory;
@@ -108,7 +112,6 @@
import com.dremio.dac.service.errors.ConflictException;
import com.dremio.dac.service.errors.DatasetNotFoundException;
import com.dremio.dac.service.errors.DatasetVersionNotFoundException;
-import com.dremio.dac.service.errors.NewDatasetQueryException;
import com.dremio.dac.util.DatasetsUtil;
import com.dremio.exec.catalog.Catalog;
import com.dremio.exec.catalog.CatalogUtil;
@@ -124,7 +127,6 @@
import com.dremio.service.jobs.CompletionListener;
import com.dremio.service.jobs.JobNotFoundException;
import com.dremio.service.jobs.JobStatusListener;
-import com.dremio.service.jobs.JobSubmittedListener;
import com.dremio.service.jobs.JobsService;
import com.dremio.service.jobs.JobsVersionContext;
import com.dremio.service.jobs.SqlQuery;
@@ -152,23 +154,16 @@
public class DatasetVersionResource extends BaseResourceWithAllocator {
private static final Logger logger = LoggerFactory.getLogger(DatasetVersionResource.class);
- private static final int WAIT_FOR_RUN_HISTORY_S = 15;
-
private final DatasetTool tool;
private final QueryExecutor executor;
private final DatasetVersionMutator datasetService;
- private final JobsService jobsService;
-
private final Transformer transformer;
private final Recommenders recommenders;
private final JoinRecommender joinRecommender;
-
private final SecurityContext securityContext;
private final DatasetPath datasetPath;
private final DatasetVersion version;
private final HistogramGenerator histograms;
- private final CatalogService catalogService;
-
@Inject
public DatasetVersionResource (
SabotContext context,
@@ -217,7 +212,6 @@ public DatasetVersionResource(
super(allocator);
this.executor = executor;
this.datasetService = datasetService;
- this.jobsService = jobsService;
this.recommenders = recommenders;
this.transformer = transformer;
this.joinRecommender = joinRecommender;
@@ -226,7 +220,6 @@ public DatasetVersionResource(
this.securityContext = securityContext;
this.datasetPath = datasetPath;
this.version = version;
- this.catalogService = catalogService;
}
protected DatasetVersionResource(
@@ -247,7 +240,6 @@ protected DatasetVersionResource(
super(allocatorFactory);
this.executor = executor;
this.datasetService = datasetService;
- this.jobsService = jobsService;
this.recommenders = recommenders;
this.transformer = transformer;
this.joinRecommender = joinRecommender;
@@ -256,7 +248,6 @@ protected DatasetVersionResource(
this.securityContext = securityContext;
this.datasetPath = datasetPath;
this.version = version;
- this.catalogService = catalogService;
}
@@ -338,7 +329,7 @@ public InitialPreviewResponse getDatasetForVersion(
@QueryParam("refType") String refType,
@QueryParam("refValue") String refValue,
@QueryParam("triggerJob") String triggerJob) // "true" or "false". Default - "true". On error - "true"
- throws DatasetVersionNotFoundException, NamespaceException, JobNotFoundException, NewDatasetQueryException, IOException {
+ throws DatasetVersionNotFoundException, NamespaceException, JobNotFoundException {
Catalog catalog = datasetService.getCatalog();
final boolean versioned = isVersionedPlugin(datasetPath, catalog);
@@ -348,16 +339,39 @@ public InitialPreviewResponse getDatasetForVersion(
final VirtualDatasetUI dataset = getDatasetConfig(versioned);
return tool.createPreviewResponseForExistingDataset(
- getOrCreateAllocator("getDatasetForVersion"),
dataset,
new DatasetVersionResourcePath(
datasetPath, (tipVersion != null) ? tipVersion : dataset.getVersion()),
- limit,
engineName,
sessionId,
triggerJob);
}
+ // Versioned sources
+ // First check if the version already exists as result of running the query
+ try {
+ final VirtualDatasetUI vds = getDatasetConfig(true);
+ return tool.createPreviewResponseForExistingDataset(
+ vds,
+ new DatasetVersionResourcePath(datasetPath, version),
+ engineName,
+ sessionId,
+ triggerJob);
+ } catch (DatasetVersionNotFoundException e) {
+ // ignore
+ }
+
+ // The version doesn't exist, generate initial preview response from source.
+ return getInitialPreviewResponseForVersionedSource(engineName, sessionId, refType, refValue, triggerJob);
+ }
+
+ private InitialPreviewResponse getInitialPreviewResponseForVersionedSource(
+ String engineName,
+ String sessionId,
+ String refType,
+ String refValue,
+ String triggerJob)
+ throws NamespaceException, JobNotFoundException {
if (refType == null || refValue == null) {
throw UserException
.validationError()
@@ -366,55 +380,53 @@ public InitialPreviewResponse getDatasetForVersion(
}
final Map versionContextReqMapping =
- DatasetResourceUtils.createSourceVersionMapping(
- datasetPath.getRoot().getName(), refType, refValue);
+ DatasetResourceUtils.createSourceVersionMapping(datasetPath.getRoot().getName(), refType, refValue);
final Map versionContextMapping =
- DatasetResourceUtils.createSourceVersionMapping(versionContextReqMapping);
+ DatasetResourceUtils.createSourceVersionMapping(versionContextReqMapping);
- catalog = catalog.resolveCatalog(versionContextMapping);
+ final Catalog catalog = datasetService.getCatalog().resolveCatalog(versionContextMapping);
DremioTable table = catalog.getTable(new NamespaceKey(datasetPath.toPathList()));
if (!(table instanceof ViewTable)) {
throw UserException.validationError()
- .message("Expecting getting a view but returns a entity type of %s", table.getClass())
- .buildSilently();
+ .message("Expecting getting a view but returns a entity type of %s", table.getClass())
+ .buildSilently();
}
- tool.newUntitled(
- getOrCreateAllocator("newUntitled"),
- new FromSQL(table.getDatasetConfig().getVirtualDataset().getSql()),
- version,
- table.getDatasetConfig().getVirtualDataset().getContextList(),
- null,
- false,
- limit,
- engineName,
- sessionId,
- versionContextReqMapping);
-
- String tag = table.getDatasetConfig().getTag();
-
- VirtualDatasetUI virtualDatasetUI =
- datasetService.getVersion(tool.TMP_DATASET_PATH, version, true);
- virtualDatasetUI.setFullPathList(datasetPath.toPathList());
- virtualDatasetUI.setName(datasetPath.getDataset().getName());
- virtualDatasetUI.setIsNamed(true);
- virtualDatasetUI.setSavedTag(tag);
- logger.debug("Creating temp version {} in datasetVersion for view {} at version {} ",
- DatasetsUtil.printVersionViewInfo(virtualDatasetUI),
+ VirtualDatasetUI vds = new VirtualDatasetUI();
+ vds.setOwner(securityContext.getUserPrincipal().getName());
+ vds.setVersion(version);
+ vds.setFullPathList(datasetPath.toPathList());
+ vds.setName(datasetPath.getDataset().getName());
+ vds.setIsNamed(true);
+ vds.setId(table.getDatasetConfig().getId().getId());
+ vds.setContextList(table.getDatasetConfig().getVirtualDataset().getContextList());
+ vds.setSql(table.getDatasetConfig().getVirtualDataset().getSql());
+ vds.setSqlFieldsList(table.getDatasetConfig().getVirtualDataset().getSqlFieldsList());
+ final FromBase from = new FromSQL(table.getDatasetConfig().getVirtualDataset().getSql());
+ vds.setState(new VirtualDatasetState()
+ .setContextList(table.getDatasetConfig().getVirtualDataset().getContextList())
+ .setFrom(from.wrap()));
+ vds.setLastTransform(new Transform(TransformType.createFromParent)
+ .setTransformCreateFromParent(new TransformCreateFromParent(from.wrap())));
+ final List sourceVersionReferences =
+ DatasetResourceUtils.createSourceVersionReferenceList(versionContextReqMapping);
+ vds.setReferencesList(sourceVersionReferences);
+ vds.setSavedTag(table.getDatasetConfig().getTag());
+
+ logger.debug("Creating temp version {} in datasetVersion for view {} at version {}.",
+ DatasetsUtil.printVersionViewInfo(vds),
datasetPath.toUnescapedString(),
versionContextMapping.get(datasetPath.getRoot().getName()));
- datasetService.putVersion(virtualDatasetUI);
- virtualDatasetUI = datasetService.getVersion(datasetPath, version, true);
+ datasetService.putVersion(vds);
+ vds = datasetService.getVersion(datasetPath, version, true);
return tool.createPreviewResponseForExistingDataset(
- getOrCreateAllocator("getDatasetForVersion"),
- virtualDatasetUI,
- new DatasetVersionResourcePath(datasetPath, version),
- limit,
- engineName,
- sessionId,
- triggerJob);
+ vds,
+ new DatasetVersionResourcePath(datasetPath, version),
+ engineName,
+ sessionId,
+ triggerJob);
}
@GET @Path("review")
@@ -450,10 +462,33 @@ public InitialPreviewResponse transformAndPreview(
throw new ClientErrorException("Query parameter 'newVersion' should not be null");
}
- final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, datasetPath, getDatasetConfig(), transform, QueryType.UI_PREVIEW);
+ final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, datasetPath, getDatasetConfig(), transform, false, QueryType.UI_PREVIEW);
return tool.createPreviewResponse(datasetPath, datasetAndData, getOrCreateAllocator("transformAndPreview"), limit, false);
}
+ @POST @Path("transform_and_preview")
+ @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON)
+ public InitialPreviewResponse transformAndPreviewAsync(
+ /* Body */ TransformBase transform,
+ @QueryParam("newVersion") DatasetVersion newVersion,
+ @QueryParam("limit") @DefaultValue("50") int limit)
+ throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException {
+ if (newVersion == null) {
+ throw new ClientErrorException("Query parameter 'newVersion' should not be null");
+ }
+
+ final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, datasetPath, getDatasetConfig(), transform, true, QueryType.UI_PREVIEW);
+
+ return InitialPreviewResponse.of(
+ newDataset(datasetAndData.getDataset(), null),
+ datasetAndData.getJobId(),
+ datasetAndData.getSessionId(),
+ null,
+ true,
+ null,
+ null); // errors will be retrieved from job status
+ }
+
/**
* Apply the given transform on the dataset version and return results. Also save the
* transformed dataset as given new version.
@@ -477,7 +512,7 @@ public InitialTransformAndRunResponse transformAndRun(
}
final DatasetVersionResourcePath resourcePath = resourcePath();
- final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, resourcePath.getDataset(), getDatasetConfig(), transform, QueryType.UI_RUN);
+ final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, resourcePath.getDataset(), getDatasetConfig(), transform, false, QueryType.UI_RUN);
final History history = tool.getHistory(resourcePath.getDataset(), datasetAndData.getDataset().getVersion());
return InitialTransformAndRunResponse.of(
newDataset(datasetAndData.getDataset(), null),
@@ -485,6 +520,38 @@ public InitialTransformAndRunResponse transformAndRun(
datasetAndData.getSessionId(),
history);
}
+
+ /**
+ * Apply the given transform on the dataset version and return initial results after the job is started. Also Creating
+ * a thread to save the transformed dataset as given new version after the metadata os retrieved.
+ *
+ * @param transform
+ * @param newVersion
+ * @return
+ * @throws DatasetVersionNotFoundException
+ * @throws DatasetNotFoundException
+ * @throws NamespaceException
+ */
+ @POST @Path("transform_and_run")
+ @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON)
+ public InitialTransformAndRunResponse transformAndRunAsync(
+ /* Body */ TransformBase transform,
+ @QueryParam("newVersion") DatasetVersion newVersion
+ ) throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException {
+
+ if (newVersion == null) {
+ throw new ClientErrorException("Query parameter 'newVersion' should not be null");
+ }
+
+ final DatasetVersionResourcePath resourcePath = resourcePath();
+ final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, resourcePath.getDataset(), getDatasetConfig(), transform, true, QueryType.UI_RUN);
+ return InitialTransformAndRunResponse.of(
+ newDataset(datasetAndData.getDataset(), null),
+ datasetAndData.getJobId(),
+ datasetAndData.getSessionId(),
+ null);
+ }
+
protected DatasetUI newDataset(VirtualDatasetUI vds, DatasetVersion tipVersion) throws NamespaceException {
return DatasetUI.newInstance(vds, null, datasetService.getNamespaceService());
}
@@ -507,23 +574,23 @@ public InitialRunResponse run(@QueryParam("tipVersion") DatasetVersion tipVersio
final SqlQuery query = new SqlQuery(virtualDatasetUI.getSql(), virtualDatasetUI.getState().getContextList(), securityContext,
Strings.isNullOrEmpty(engineName)? null : engineName, sessionId, sourceVersionMapping);
- JobSubmittedListener listener = new JobSubmittedListener();
+ MetadataJobStatusListener listener = new MetadataJobStatusListener(tool, virtualDatasetUI, null);
+ // The saved dataset is incomplete, we want save the dataset again once the metadata is collected.
+ if (virtualDatasetUI.getSqlFieldsList() == null) {
+ listener.waitToApplyMetadataAndSaveDataset();
+ }
final JobData jobData = executor.runQueryWithListener(query, QueryType.UI_RUN, datasetPath, version, listener);
final JobId jobId = jobData.getJobId();
final SessionId jobDataSessionId = jobData.getSessionId();
- // wait for job to start (or WAIT_FOR_RUN_HISTORY_S seconds).
- boolean success = listener.await(WAIT_FOR_RUN_HISTORY_S, TimeUnit.SECONDS);
- if (!success) {
- throw new InterruptedException("Starting a query timed out after " + WAIT_FOR_RUN_HISTORY_S +
- " seconds, please try again.");
+ if (virtualDatasetUI.getSqlFieldsList() == null) {
+ listener.setJobId(jobData.getJobId());
}
// tip version is optional, as it is only needed when we are navigated back in history
// otherwise assume the current version is at the tip of the history
tipVersion = tipVersion != null ? tipVersion : virtualDatasetUI.getVersion();
final History history = tool.getHistory(datasetPath, virtualDatasetUI.getVersion(), tipVersion);
- // VBesschetnov 2019-01-08
- // this is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history
+ // This is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history
// of a dataset reference on a version for other dataset. And a user navigate to that version and tries to preview it,
// we would not be resolve a tip version and preview will fail. We should always send requests to original dataset
// path (tip version path) to be able to get a preview/run data
@@ -559,9 +626,9 @@ public InitialPendingTransformResponse transformDataSetPreview(
* Saves this version as the current version of a dataset under the asDatasetPath if provided
*
* @param asDatasetPath
- * @param savedTag the last OCC version known the the client. If no one else has saved
+ * @param savedTag the last OCC version known the client. If no one else has saved
* to this name since the client making request learned of this OCC
- * version then the request will be successful. Otherwise it will fail
+ * version then the request will be successful. Otherwise, it will fail
* because saving would clobber the already saved dataset that the client
* did not know about.
* @return
@@ -587,13 +654,20 @@ public DatasetUIWithHistory saveAsDataSet(
if(versioned && !versionedViewEnabled){
throw UserException.unsupportedError().message("Versioned view is not enabled").buildSilently();
}
+ //Gets the latest version of the view from DatasetVersion store
final VirtualDatasetUI vds = getDatasetConfig(versioned);
- if(savedTag != null && branchName == null && versioned){
+ if(vds != null && branchName == null && versioned) {
branchName = vds.getReferencesList().get(0).getReference().getValue();
}
- if(versioned && branchName != null){
- setReference(vds, branchName);
+
+ if (versioned) {
+ if (branchName != null) {
+ setReference(vds, branchName);
+ } else {
+ throw UserException.unsupportedError().message("Tried to create a versioned view but branch name is null").buildSilently();
+ }
}
+
final DatasetUI savedDataset = save(vds, asDatasetPath, savedTag, branchName, versioned);
return new DatasetUIWithHistory(savedDataset, tool.getHistory(asDatasetPath, savedDataset.getDatasetVersion()));
}
@@ -627,19 +701,28 @@ public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String sa
return save(vds, asDatasetPath, savedTag, null, false, attributes);
}
- public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String savedTag, String branchName, boolean isVersionedSource, NamespaceAttribute... attributes)
+ public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String savedTag, String branchName, final boolean isVersionedSource, NamespaceAttribute... attributes)
throws DatasetNotFoundException, UserNotFoundException, NamespaceException, DatasetVersionNotFoundException {
checkSaveVersionedView(branchName, isVersionedSource);
String queryString = vds.getSql();
- boolean isVersionViewEnabled = datasetService.checkIfVersionedViewEnabled();
+ final boolean isVersionViewEnabled = datasetService.checkIfVersionedViewEnabled();
if (isVersionViewEnabled) {
ParserUtil.validateViewQuery(queryString);
}
- final String nameConflictErrorMsg = String.format("VDS '%s' already exists. Please enter a different name.",
- asDatasetPath.getLeaf());
+
+ if (isVersionedSource) {
+ if (ParserUtil.checkTimeTravelOnView(queryString)){
+ throw UserException.unsupportedError()
+ .message("Versioned views not supported for time travel queries. Please use AT TAG or AT COMMIT instead")
+ .buildSilently();
+ }
+ }
+
final List fullPathList = asDatasetPath.toPathList();
if(!isVersionedSource){
if (isAncestor(vds, fullPathList)) {
+ final String nameConflictErrorMsg = String.format("VDS '%s' already exists. Please enter a different name.",
+ asDatasetPath.getLeaf());
throw new ConflictException(nameConflictErrorMsg);
}
if (!datasetPath.equals(asDatasetPath)) {
@@ -679,7 +762,9 @@ public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String sa
} catch(NamespaceNotFoundException nfe) {
throw new ClientErrorException("Parent folder doesn't exist", nfe);
} catch(ConcurrentModificationException cme) {
- throw new ConflictException(nameConflictErrorMsg, cme);
+ final String cmeMessage = String.format("View '%s' experienced a concurrent modification exception. Please ensure there are no self-references in your view and no other systems are editing this view.",
+ asDatasetPath.getLeaf());
+ throw new ConflictException(cmeMessage, cme);
} catch (IOException e) {
throw UserException.validationError().message("Error saving to the source: %s", e.getMessage()).buildSilently();
}
@@ -688,7 +773,7 @@ public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String sa
}
/**
- * @return true if pathList is an ancestor (parent or grand parent) of the virtual dataset
+ * @return true if pathList is an ancestor (parent or grandparent) of the virtual dataset
*/
private static boolean isAncestor(VirtualDatasetUI vds, List pathList) {
List parents = vds.getParentsList();
@@ -791,20 +876,19 @@ public Card getSplitCard(
@POST @Path("/editOriginalSql")
@Produces(APPLICATION_JSON)
public InitialPreviewResponse reapplyDatasetAndPreview() throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException, JobNotFoundException {
- Transformer.DatasetAndData datasetAndData = reapplyDataset(QueryType.UI_PREVIEW, JobStatusListener.NO_OP);
+ Transformer.DatasetAndData datasetAndData = reapplyDataset(JobStatusListener.NO_OP);
//max records = 0 means, that we should not wait for job completion
return tool.createPreviewResponse(new DatasetPath(datasetAndData.getDataset().getFullPathList()), datasetAndData, getOrCreateAllocator("reapplyDatasetAndPreview"), 0, false);
}
- private Transformer.DatasetAndData reapplyDataset(QueryType queryType, JobStatusListener listener) throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException {
+ private Transformer.DatasetAndData reapplyDataset(JobStatusListener listener) throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException {
List items = getPreviousDatasetVersions(getDatasetConfig());
List transforms = new ArrayList<>();
for(VirtualDatasetUI dataset : items){
transforms.add(dataset.getLastTransform());
}
- return transformer.editOriginalSql(version, transforms, queryType, listener);
-
+ return transformer.editOriginalSql(version, transforms, QueryType.UI_PREVIEW, listener);
}
@POST @Path("/reapplyAndSave")
@@ -813,7 +897,7 @@ public DatasetUIWithHistory reapplySave(
@QueryParam("as") DatasetPath asDatasetPath
) throws DatasetVersionNotFoundException, UserNotFoundException, DatasetNotFoundException, NamespaceException {
final CompletionListener completionListener = new CompletionListener();
- Transformer.DatasetAndData datasetAndData = reapplyDataset(QueryType.UI_PREVIEW, completionListener);
+ Transformer.DatasetAndData datasetAndData = reapplyDataset(completionListener);
completionListener.awaitUnchecked();
DatasetUI savedDataset = save(datasetAndData.getDataset(), asDatasetPath, null);
return new DatasetUIWithHistory(savedDataset, tool.getHistory(asDatasetPath, datasetAndData.getDataset().getVersion()));
@@ -849,10 +933,10 @@ private ReplaceValuesCard genReplaceValuesCard(List selected, Selection
Set selectedSet = new HashSet<>(selected);
SqlQuery query = new SqlQuery(virtualDatasetUI.getSql(), virtualDatasetUI.getState().getContextList(), securityContext);
DataType colType = getColType(selection.getColName());
- Histogram histo = histograms.getHistogram(datasetPath, version, selection, colType, query, getOrCreateAllocator("genReplaceValuesCard"));
+ Histogram histogram = histograms.getHistogram(datasetPath, version, selection, colType, query, getOrCreateAllocator("genReplaceValuesCard"));
long selectedCount = histograms.getSelectionCount(datasetPath, version, query, colType, selection.getColName(), selectedSet, getOrCreateAllocator("genReplaceValuesCard"));
- return new ReplaceValuesCard(histo.getValues(), selectedCount, histo.getAvailableValues() - selectedCount, histo.getAvailableValues());
+ return new ReplaceValuesCard(histogram.getValues(), selectedCount, histogram.getAvailableValues() - selectedCount, histogram.getAvailableValues());
}
@POST @Path("replace_preview")
@@ -942,16 +1026,6 @@ private ReplaceValuesCard getValuesCard(ReplaceValuesPreviewReq req) throws Data
return genReplaceValuesCard(req.getReplacedValues(), selection);
}
-
- @GET @Path("history")
- @Produces(APPLICATION_JSON)
- public History getHistory(@QueryParam("tipVersion") DatasetVersion tipVersion) throws DatasetVersionNotFoundException {
- // tip version is optional, as it is only needed when we are navigated back in history
- // otherwise assume the current version is at the tip of the history
- tipVersion = tipVersion != null ? tipVersion : virtualDatasetUI.getVersion();
- return tool.getHistory(datasetPath, version, tipVersion);
- }
-
public static final List AVAILABLE_TYPES_FOR_CLEANING = unmodifiableList(asList(TEXT, INTEGER, FLOAT));
@POST @Path("clean") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON)
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java
index 9cd15f0457..747cd8ec92 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java
@@ -15,8 +15,7 @@
*/
package com.dremio.dac.explore;
-import static com.dremio.dac.explore.DatasetTool.TMP_DATASET_PATH;
-
+import java.security.AccessControlException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -39,7 +38,6 @@
import com.dremio.dac.annotations.RestResource;
import com.dremio.dac.annotations.Secured;
import com.dremio.dac.explore.model.CreateFromSQL;
-import com.dremio.dac.explore.model.DatasetDetails;
import com.dremio.dac.explore.model.DatasetPath;
import com.dremio.dac.explore.model.DatasetSearchUI;
import com.dremio.dac.explore.model.DatasetSearchUIs;
@@ -50,18 +48,7 @@
import com.dremio.dac.explore.model.InitialUntitledRunResponse;
import com.dremio.dac.explore.model.NewUntitledFromParentRequest;
import com.dremio.dac.explore.model.VersionContextReq;
-import com.dremio.dac.model.common.DACRuntimeException;
-import com.dremio.dac.model.folder.SourceFolderPath;
-import com.dremio.dac.model.namespace.DatasetContainer;
-import com.dremio.dac.model.sources.PhysicalDatasetPath;
-import com.dremio.dac.model.sources.SourceName;
-import com.dremio.dac.model.sources.SourcePath;
-import com.dremio.dac.model.sources.SourceUI;
-import com.dremio.dac.model.spaces.Home;
import com.dremio.dac.model.spaces.HomeName;
-import com.dremio.dac.model.spaces.HomePath;
-import com.dremio.dac.model.spaces.Space;
-import com.dremio.dac.model.spaces.SpacePath;
import com.dremio.dac.proto.model.dataset.FromSQL;
import com.dremio.dac.proto.model.dataset.FromTable;
import com.dremio.dac.resource.BaseResourceWithAllocator;
@@ -73,30 +60,27 @@
import com.dremio.dac.service.errors.DatasetNotFoundException;
import com.dremio.dac.service.errors.DatasetVersionNotFoundException;
import com.dremio.dac.service.errors.NewDatasetQueryException;
+import com.dremio.dac.service.reflection.ReflectionServiceHelper;
import com.dremio.dac.service.search.SearchContainer;
import com.dremio.datastore.SearchTypes.SortOrder;
import com.dremio.exec.catalog.CatalogUtil;
-import com.dremio.exec.catalog.ConnectionReader;
import com.dremio.exec.catalog.DatasetCatalog;
import com.dremio.exec.catalog.DremioTable;
import com.dremio.file.FilePath;
-import com.dremio.file.SourceFilePath;
import com.dremio.service.jobs.JobsService;
-import com.dremio.service.namespace.BoundedDatasetCount;
import com.dremio.service.namespace.NamespaceException;
import com.dremio.service.namespace.NamespaceKey;
-import com.dremio.service.namespace.NamespaceService;
import com.dremio.service.namespace.dataset.DatasetVersion;
import com.dremio.service.namespace.dataset.proto.DatasetConfig;
import com.dremio.service.namespace.proto.NameSpaceContainer;
-import com.dremio.service.namespace.source.proto.SourceConfig;
-import com.dremio.service.namespace.space.proto.ExtendedConfig;
-import com.dremio.service.namespace.space.proto.HomeConfig;
-import com.dremio.service.namespace.space.proto.SpaceConfig;
+import com.dremio.service.users.User;
+import com.dremio.service.users.UserService;
import com.google.common.base.Preconditions;
/**
- * List datasets from space/folder/home/source
+ * Creates datasets from SQL Runner
+ * Searches datasets from Catalog
+ * Provides dataset summary from Catalog
*
*/
@RestResource
@@ -104,50 +88,75 @@
@RolesAllowed({"admin", "user"})
@Path("/datasets")
public class DatasetsResource extends BaseResourceWithAllocator {
-
private final DatasetVersionMutator datasetService;
- private final NamespaceService namespaceService;
private final DatasetTool tool;
- private final ConnectionReader connectionReader;
private final DatasetCatalog datasetCatalog;
private final CatalogServiceHelper catalogServiceHelper;
- private CollaborationHelper collaborationService;
+ private final CollaborationHelper collaborationService;
+ private final ReflectionServiceHelper reflectionServiceHelper;
+ private final UserService userService;
@Inject
public DatasetsResource(
- NamespaceService namespaceService,
DatasetVersionMutator datasetService,
JobsService jobsService,
QueryExecutor executor,
- ConnectionReader connectionReader,
@Context SecurityContext securityContext,
DatasetCatalog datasetCatalog,
CatalogServiceHelper catalogServiceHelper,
BufferAllocatorFactory allocatorFactory,
- CollaborationHelper collaborationService) {
- this(namespaceService, datasetService,
+ CollaborationHelper collaborationService,
+ ReflectionServiceHelper reflectionServiceHelper,
+ UserService userService) {
+ this(datasetService,
new DatasetTool(datasetService, jobsService, executor, securityContext),
- connectionReader, datasetCatalog, catalogServiceHelper, allocatorFactory, collaborationService);
+ datasetCatalog, catalogServiceHelper, allocatorFactory, collaborationService, reflectionServiceHelper, userService);
}
- protected DatasetsResource(NamespaceService namespaceService,
+ protected DatasetsResource(
DatasetVersionMutator datasetService,
DatasetTool tool,
- ConnectionReader connectionReader,
DatasetCatalog datasetCatalog,
CatalogServiceHelper catalogServiceHelper,
BufferAllocatorFactory allocatorFactory,
- CollaborationHelper collaborationService
+ CollaborationHelper collaborationService,
+ ReflectionServiceHelper reflectionServiceHelper,
+ UserService userService
)
{
super(allocatorFactory);
- this.namespaceService = namespaceService;
this.datasetService = datasetService;
this.tool = tool;
- this.connectionReader = connectionReader;
this.datasetCatalog = datasetCatalog;
this.catalogServiceHelper = catalogServiceHelper;
this.collaborationService = collaborationService;
+ this.reflectionServiceHelper = reflectionServiceHelper;
+ this.userService = userService;
+ }
+
+ private DatasetConfig getDatasetConfig(DatasetPath datasetPath, Map references) {
+ DatasetCatalog datasetNewCatalog = datasetCatalog.resolveCatalog(DatasetResourceUtils.createSourceVersionMapping(references));
+ NamespaceKey namespaceKey = datasetPath.toNamespaceKey();
+ final DremioTable table = datasetNewCatalog.getTable(namespaceKey);
+ if (table == null) {
+ throw new DatasetNotFoundException(datasetPath);
+ }
+ return table.getDatasetConfig();
+ }
+
+ private DatasetSummary getDatasetSummary(DatasetPath datasetPath,
+ Map references) throws NamespaceException, DatasetNotFoundException {
+ NamespaceKey namespaceKey = datasetPath.toNamespaceKey();
+ final DatasetConfig datasetConfig = getDatasetConfig(datasetPath, references);
+
+ return newDatasetSummary(datasetConfig,
+ datasetService.getJobsCount(namespaceKey),
+ datasetService.getDescendantsCount(namespaceKey),
+ references,
+ Collections.emptyList(),
+ null,
+ null,
+ null);
}
private InitialPreviewResponse newUntitled(DatasetPath fromDatasetPath,
@@ -159,11 +168,13 @@ private InitialPreviewResponse newUntitled(DatasetPath fromDatasetPath,
String triggerJob)
throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
FromTable from = new FromTable(fromDatasetPath.toPathString());
- DatasetSummary summary = getDatasetSummary(fromDatasetPath, references);
if (DatasetTool.shouldTriggerJob(triggerJob)) {
+ DatasetSummary summary = getDatasetSummary(fromDatasetPath, references);
return newUntitled(from, newVersion, fromDatasetPath.toParentPathList(), summary, limit, engineName, sessionId, references);
} else {
- return tool.createPreviewResponseForPhysicalDataset(from, newVersion, fromDatasetPath.toParentPathList(), summary, references);
+ DatasetConfig datasetConfig = getDatasetConfig(fromDatasetPath, references);
+ return tool.createPreviewResponseForPhysicalDataset(from, newVersion, fromDatasetPath.toParentPathList(),
+ datasetConfig.getType(), datasetConfig.getFullPathList(), references);
}
}
@@ -176,7 +187,7 @@ private InitialPreviewResponse newUntitled(FromBase from, DatasetVersion newVers
}
/**
- * A user clicked "new query" and then wrote a SQL query. This is the first version of the dataset we will be creating (this is a "initial commit")
+ * A user clicked "new query" and then wrote a SQL query. This is the first version of the dataset we will be creating (this is an "initial commit")
*
* @param newVersion The version id we should use for the new version of dataset (generated by client)
* @param sql The sql information to generate the new dataset
@@ -188,6 +199,7 @@ private InitialPreviewResponse newUntitled(FromBase from, DatasetVersion newVers
@POST @Path("new_untitled_sql")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
+ @Deprecated
public InitialPreviewResponse newUntitledSql(
@QueryParam("newVersion") DatasetVersion newVersion,
@QueryParam("limit") Integer limit,
@@ -208,7 +220,7 @@ public InitialPreviewResponse newUntitledSql(
}
/**
- * A user clicked "SQL Runner", then wrote a SQL query and then clicked "Preview". This is the first version of the dataset we will be creating (this is a "initial commit")
+ * A user clicked "SQL Runner", then wrote a SQL query and then clicked "Preview". This is the first version of the dataset we will be creating (this is an "initial commit")
*
* @param newVersion The version id we should use for the new version of dataset (generated by client)
* @param sql The sql information to generate the new dataset
@@ -233,19 +245,18 @@ public InitialUntitledRunResponse newTmpUntitledSql(
sql.getContext(),
sql.getEngineName(),
sessionId,
- sql.getReferences(),
- limit // ignored
- );
+ sql.getReferences());
}
@POST @Path("new_untitled_sql_and_run")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
+ @Deprecated
public InitialRunResponse newUntitledSqlAndRun(
@QueryParam("newVersion") DatasetVersion newVersion,
@QueryParam("sessionId") String sessionId,
/* body */ CreateFromSQL sql)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, InterruptedException {
+ throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException {
Preconditions.checkNotNull(newVersion, "newVersion should not be null");
return tool.newUntitledAndRun(
@@ -264,7 +275,7 @@ public InitialUntitledRunResponse newTmpUntitledSqlAndRun(
@QueryParam("newVersion") DatasetVersion newVersion,
@QueryParam("sessionId") String sessionId,
/* body */ CreateFromSQL sql)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, InterruptedException {
+ throws DatasetNotFoundException, DatasetVersionNotFoundException {
Preconditions.checkNotNull(newVersion, "newVersion should not be null");
return tool.newTmpUntitledAndRun(
@@ -315,24 +326,6 @@ public InitialPreviewResponse newUntitledFromParent(
}
}
- public InitialPreviewResponse createUntitledFromSourceFile(SourceName sourceName, String path, Integer limit)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path);
- return tool.newUntitled(getOrCreateAllocator("createUntitledFromSourceFile"), new FromTable(filePath.toPathString()), DatasetVersion.newVersion(), filePath.toParentPathList(), limit);
- }
-
- public InitialPreviewResponse createUntitledFromSourceFolder(SourceName sourceName, String path, Integer limit)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- SourceFolderPath folderPath = SourceFolderPath.fromURLPath(sourceName, path);
- return tool.newUntitled(getOrCreateAllocator("createUntitledFromSourceFolder"), new FromTable(folderPath.toPathString()), DatasetVersion.newVersion(), folderPath.toPathList(), limit);
- }
-
- public InitialPreviewResponse createUntitledFromPhysicalDataset(SourceName sourceName, String path, Integer limit)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- PhysicalDatasetPath datasetPath = PhysicalDatasetPath.fromURLPath(sourceName, path);
- return tool.newUntitled(getOrCreateAllocator("createUntitledFromPhysicalDataset"), new FromTable(datasetPath.toPathString()), DatasetVersion.newVersion(), datasetPath.toParentPathList(), limit);
- }
-
public InitialPreviewResponse createUntitledFromHomeFile(HomeName homeName, String path, Integer limit)
throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
FilePath filePath = FilePath.fromURLPath(homeName, path);
@@ -368,31 +361,55 @@ public DatasetSummary getDatasetSummary(
@QueryParam("refType") String refType,
@QueryParam("refValue") String refValue) throws NamespaceException, DatasetNotFoundException {
final DatasetPath datasetPath = new DatasetPath(PathUtils.toPathComponents(path));
- return getDatasetSummary(datasetPath, DatasetResourceUtils.createSourceVersionMapping(datasetPath.getRoot().getName(), refType, refValue));
+ return getEnhancedDatasetSummary(datasetPath, DatasetResourceUtils.createSourceVersionMapping(datasetPath.getRoot().getName(), refType, refValue));
}
- private DatasetSummary getDatasetSummary(DatasetPath datasetPath,
- Map references) throws NamespaceException, DatasetNotFoundException {
- DatasetCatalog datasetNewCatalog = datasetCatalog.resolveCatalog(DatasetResourceUtils.createSourceVersionMapping(references));
- final DremioTable table = datasetNewCatalog.getTable(datasetPath.toNamespaceKey());
- if (table == null) {
- throw new DatasetNotFoundException(datasetPath);
+ protected User getUser(String username, String entityId) {
+ User user = null;
+ if (username != null) {
+ try {
+ user = userService.getUser(username);
+ } catch (Exception e) {
+ // ignore
+ }
}
- final DatasetConfig datasetConfig = table.getDatasetConfig();
+ return user;
+ }
+
+ private DatasetSummary getEnhancedDatasetSummary(DatasetPath datasetPath,
+ Map references)
+ throws NamespaceException, DatasetNotFoundException {
+ NamespaceKey namespaceKey = datasetPath.toNamespaceKey();
+ final DatasetConfig datasetConfig = getDatasetConfig(datasetPath, references);
+
+ String entityId = datasetConfig.getId().getId();
Optional tags = Optional.empty();
- String sourceName = datasetPath.toNamespaceKey().getRoot();
- Boolean isVersioned = CatalogUtil.requestedPluginSupportsVersionedTables(sourceName, datasetService.getCatalog());
+ String sourceName = namespaceKey.getRoot();
+ boolean isVersioned = CatalogUtil.requestedPluginSupportsVersionedTables(sourceName, datasetService.getCatalog());
if (!isVersioned) {
// only use CollaborationHelper for non-versioned dataset from non-arctic source
// arctic source doesn't rely on NamespaceService while CollaborationHelper use NamespaceService underneath
- tags = collaborationService.getTags(datasetConfig.getId().getId());
+ tags = collaborationService.getTags(entityId);
+ }
+
+ // TODO: DX-61580 Add last modified user to DatasetConfig
+ // For now, using the owner as the last modified user. The code is messy. Will be improved in the follow-up story.
+ User owner = getUser(datasetConfig.getOwner(), entityId);
+ User lastModifyingUser = getUser(datasetConfig.getOwner(), entityId); // datasetConfig.getLastUser();
+ Boolean hasReflection;
+ try {
+ hasReflection = reflectionServiceHelper.doesDatasetHaveReflection(entityId);
+ } catch (AccessControlException e) {
+ // If the user doesn't have the proper privilege, set it to null specifically so that it's not even sent back
+ hasReflection = null;
}
return newDatasetSummary(datasetConfig,
- datasetService.getJobsCount(datasetPath.toNamespaceKey()),
- datasetService.getDescendantsCount(datasetPath.toNamespaceKey()),
+ datasetService.getJobsCount(namespaceKey),
+ datasetService.getDescendantsCount(namespaceKey),
references,
- tags.isPresent() ? tags.get().getTags() : Collections.emptyList());
+ tags.isPresent() ? tags.get().getTags() : Collections.emptyList(),
+ hasReflection, owner, lastModifyingUser);
}
protected DatasetSummary newDatasetSummary(
@@ -400,63 +417,10 @@ protected DatasetSummary newDatasetSummary(
int jobCount,
int descendants,
Map references,
- List tags) throws NamespaceException {
- return DatasetSummary.newInstance(datasetConfig, jobCount, descendants, references, tags);
- }
-
- @GET
- @Path("/context/{type}/{datasetContainer}/{path: .*}")
- @Produces(MediaType.APPLICATION_JSON)
- public DatasetDetails getDatasetContext(@PathParam("type") String type,
- @PathParam("datasetContainer") String datasetContainer,
- @PathParam("path") String path)
- throws Exception {
- // TODO - DX-4072 - this is a bit hacky, but not sure of a better way to do this right now, handling
- // of dataset paths inside of URL paths could use overall review and standardization
- final DatasetPath datasetPath = new DatasetPath(datasetContainer + "." + path);
- if (datasetPath.equals(TMP_DATASET_PATH)) {
- // TODO - this can be removed if the UI prevents sending tmp.UNTITLED, for now handle it gracefully and hand
- // back a response that will not cause a rendering failure
- return new DatasetDetails(
- TMP_DATASET_PATH.toPathList(),
- "", 0, 0, System.currentTimeMillis(),
- new Space(null, "None", null, null, null, 0, null));
- }
-
- final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey());
- String containerName = datasetConfig.getFullPathList().get(0);
- DatasetContainer spaceInfo;
- if ("home".equals(type)) {
- HomePath homePath = new HomePath(containerName);
- HomeConfig home = namespaceService.getHome(homePath.toNamespaceKey());
- long dsCount = namespaceService.getAllDatasetsCount(homePath.toNamespaceKey());
- home.setExtendedConfig(new ExtendedConfig().setDatasetCount(dsCount));
- spaceInfo = newHome(homePath, home);
- } else if ("space".equals(type)) {
- final NamespaceKey spaceKey = new SpacePath(containerName).toNamespaceKey();
- SpaceConfig space = namespaceService.getSpace(spaceKey);
- spaceInfo = newSpace(space, namespaceService.getAllDatasetsCount(spaceKey));
- } else if ("source".equals(type)) {
- final NamespaceKey sourceKey = new SourcePath(containerName).toNamespaceKey();
- SourceConfig source = namespaceService.getSource(sourceKey);
- BoundedDatasetCount datasetCount = namespaceService.getDatasetCount(sourceKey, BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH);
- spaceInfo = SourceUI.get(source, connectionReader)
- .setNumberOfDatasets(datasetCount.getCount());
- } else {
- throw new DACRuntimeException("Incorrect dataset container type provided:" + type);
- }
- return new DatasetDetails(datasetConfig,
- datasetService.getJobsCount(datasetPath.toNamespaceKey()),
- datasetService.getDescendantsCount(datasetPath.toNamespaceKey()),
- spaceInfo
- );
- }
-
- protected Home newHome(HomePath homePath, HomeConfig home) {
- return new Home(homePath, home);
- }
-
- protected Space newSpace(SpaceConfig spaceConfig, int datasetCount) throws Exception {
- return Space.newInstance(spaceConfig, null, datasetCount);
+ List tags,
+ Boolean hasReflection,
+ User owner,
+ User lastModifyingUser) throws NamespaceException {
+ return DatasetSummary.newInstance(datasetConfig, jobCount, descendants, references, tags, hasReflection, owner, lastModifyingUser);
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java b/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java
index 32601eb702..895f8ba44d 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java
@@ -70,6 +70,7 @@ public List getRules(Selection selection, DataType selColType) {
return rules;
}
+ @Override
public TransformRuleWrapper wrapRule(ExtractRule extractRule) {
switch (extractRule.getType()) {
case pattern:
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java b/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java
index 129e8ab60d..89017da0f8 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java
@@ -383,6 +383,7 @@ static void produceRanges(List ranges, LocalDateTime min, LocalDateTime
break;
case MILLENNIUM:
timeValue = toMillis(tmpValue.plusYears(1000));
+ break;
default:
break;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java b/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java
index 40fec58ae2..8f4d1c16f6 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java
@@ -379,10 +379,12 @@ public JsonPathElement last() {
return path.peek();
}
+ @Override
public Iterator iterator() {
return path.descendingIterator();
}
+ @Override
public String toString() {
return Joiner.on("").join(path.descendingIterator());
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java b/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java
index 433eee5fd5..86ea5fb66a 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java
@@ -45,7 +45,6 @@ class MetadataJobStatusListener implements JobStatusListener {
MetadataJobStatusListener(DatasetTool datasetTool, VirtualDatasetUI newDataset, FromBase from) {
Preconditions.checkArgument(datasetTool != null, "datasetTool can't be null.");
Preconditions.checkArgument(newDataset != null, "newDataset can't be null.");
- Preconditions.checkArgument(from != null, "from can't be null.");
this.datasetTool = datasetTool;
this.newDataset = newDataset;
this.from = from;
@@ -62,7 +61,9 @@ public void waitToApplyMetadataAndSaveDataset() {
private void applyMetadataAndSaveDataset() {
try {
- latch.await();
+ synchronized (latch) {
+ latch.await();
+ }
} catch (final InterruptedException ex) {
Throwables.propagate(ex);
}
@@ -98,17 +99,23 @@ public void metadataCollected(QueryMetadata metadata) {
@Override
public void jobFailed(Exception e) {
error = e;
- latch.notifyAll();
+ synchronized (latch) {
+ latch.notifyAll();
+ }
}
@Override
public void jobCompleted() {
- latch.notifyAll();
+ synchronized (latch) {
+ latch.notifyAll();
+ }
}
@Override
public void jobCancelled(String reason) {
cancelled = true;
- latch.notifyAll();
+ synchronized (latch) {
+ latch.notifyAll();
+ }
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java b/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java
index 72baecdec8..5fb0c3ff53 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java
@@ -68,6 +68,8 @@
import com.dremio.service.namespace.file.FileFormat;
import com.dremio.service.users.SystemUser;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
+
/**
* A per RequestScoped class used to execute queries.
*/
@@ -136,6 +138,7 @@ JobData runQueryWithListener(SqlQuery query, QueryType queryType, DatasetPath da
* @param runInSameThread runs attemptManager in a single thread
* @param ignoreColumnLimits ignores the max number of columns allowed for a scan
*/
+ @WithSpan
JobData runQueryWithListener(SqlQuery query, QueryType queryType, DatasetPath datasetPath,
DatasetVersion version, JobStatusListener statusListener, boolean runInSameThread, boolean ignoreColumnLimits) {
String messagePath = datasetPath + (version == null ? "" : "/" + version);
@@ -219,7 +222,7 @@ public JobData runQueryAndWaitForCompletion(SqlQuery query, QueryType queryType,
return data;
}
- public List getColumnList(final String username, DatasetPath path, List referenceList) {
+ public List getColumnList(DatasetPath path, List referenceList) {
Map sourceVersionMapping = QueryExecutorUtils.createSourceVersionMapping(referenceList);
EntityExplorer entityExplorer = catalogService.getCatalog(MetadataRequestOptions.of(
SchemaConfig.newBuilder(CatalogUser.from(context.getUserPrincipal().getName())).build(), sourceVersionMapping));
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java b/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java
index 1dabcd91d1..ce597934eb 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java
@@ -19,6 +19,7 @@
import java.security.AccessControlException;
import java.util.List;
+import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.calcite.plan.RelOptCost;
@@ -195,7 +196,8 @@ public void planCompleted(ExecutionPlan plan) {
}
@Override
- public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) {
+ public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after,
+ long millisTaken, Map timeBreakdownPerRule) {
switch(phase){
case JOIN_PLANNING_MULTI_JOIN:
// Join optimization starts with multijoin analysis phase
@@ -210,8 +212,10 @@ public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode
break;
case REDUCE_EXPRESSIONS:
builder.addExpandedPlan(before);
+ break;
default:
// noop.
+ break;
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java b/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java
index f8481c0723..5628c67cfc 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java
@@ -126,6 +126,7 @@ private List recommendReplacePattern(Selection selection) {
return rules;
}
+ @Override
public TransformRuleWrapper wrapRule(ReplacePatternRule rule) {
return new ReplaceTransformRuleWrapper(rule);
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java b/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java
index 940392c166..77480b485e 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java
@@ -54,6 +54,7 @@ public List getRules(Selection selection, DataType selColType) {
return rules;
}
+ @Override
public TransformRuleWrapper wrapRule(SplitRule rule) {
return new SplitTransformRuleWrapper(rule);
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java b/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java
index 46209299f5..0b4edfcc8d 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java
@@ -183,7 +183,7 @@ public TransformResult visit(TransformJoin join) throws Exception {
List columns = new ArrayList<>();
List joinedColumns = new ArrayList<>();
List allJoinedColumns = new ArrayList<>();
- columns.addAll(executor.getColumnList(username, rightPath, join.getReferencesList()));
+ columns.addAll(executor.getColumnList(rightPath, join.getReferencesList()));
final int edge = m.columnCount();
for (JoinCondition jc : join.getJoinConditionsList()) {
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java b/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java
index 358b8823f0..bec2684d9d 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java
@@ -33,6 +33,8 @@
import com.dremio.dac.explore.model.TransformBase;
import com.dremio.dac.model.job.JobData;
import com.dremio.dac.proto.model.dataset.FilterType;
+import com.dremio.dac.proto.model.dataset.From;
+import com.dremio.dac.proto.model.dataset.FromSQL;
import com.dremio.dac.proto.model.dataset.FromType;
import com.dremio.dac.proto.model.dataset.NameDatasetRef;
import com.dremio.dac.proto.model.dataset.SourceVersionReference;
@@ -243,8 +245,10 @@ private VirtualDatasetUI asDataset(
SQLGenerator.generateSQL(protectAgainstNull(result, transform), isSupportedTransform(transform), catalogService);
baseDataset.setSql(sql);
baseDataset.setLastTransform(transform.wrap());
- DatasetTool.applyQueryMetadata(baseDataset, actor.getParents(), actor.getBatchSchema(), actor.getFieldOrigins(),
- actor.getGrandParents(), actor.getMetadata());
+ if (actor != null && actor.hasMetadata()) {
+ DatasetTool.applyQueryMetadata(baseDataset, actor.getParents(), actor.getBatchSchema(), actor.getFieldOrigins(),
+ actor.getGrandParents(), actor.getMetadata());
+ }
return baseDataset;
}
@@ -267,9 +271,10 @@ public DatasetAndData transformWithExecute(
DatasetPath path,
VirtualDatasetUI original,
TransformBase transform,
+ boolean isAsync,
QueryType queryType)
- throws DatasetNotFoundException, NamespaceException {
- return this.transformWithExecute(newVersion, path, original, transform, queryType, false);
+ throws DatasetNotFoundException {
+ return this.transformWithExecute(newVersion, path, original, transform, isAsync, queryType, false);
}
/**
@@ -292,7 +297,7 @@ public InitialPendingTransformResponse transformPreviewWithExecute(
BufferAllocator allocator,
int limit)
throws DatasetNotFoundException, NamespaceException {
- final TransformResultDatsetAndData result = this.transformWithExecute(newVersion, path, original, transform, QueryType.UI_PREVIEW, true);
+ final TransformResultDatasetAndData result = this.transformWithExecute(newVersion, path, original, transform, false, QueryType.UI_PREVIEW, true);
final TransformResult transformResult = result.getTransformResult();
final List highlightedColumnNames = Lists.newArrayList(transformResult.getModifiedColumns());
highlightedColumnNames.addAll(transformResult.getAddedColumns());
@@ -338,14 +343,19 @@ public VirtualDatasetUI getDataset() {
}
- private TransformResultDatsetAndData transformWithExecute(
+ private TransformResultDatasetAndData transformWithExecute(
DatasetVersion newVersion,
DatasetPath path,
VirtualDatasetUI original,
TransformBase transform,
+ boolean isAsync,
QueryType queryType,
boolean isPreview)
- throws DatasetNotFoundException, NamespaceException {
+ throws DatasetNotFoundException {
+
+ if (isAsync && transform.wrap().getType() == updateSQL) {
+ return updateSQLTransformWithExecuteAsync(newVersion, path, original, transform, queryType, isPreview);
+ }
final ExecuteTransformActor actor = new ExecuteTransformActor(queryType, newVersion, original.getState(), isPreview, username(), path, executor);
final TransformResult transformResult = transform.accept(actor);
@@ -357,7 +367,7 @@ private TransformResultDatsetAndData transformWithExecute(
final SqlQuery query = new SqlQuery(sql, vss.getContextList(), securityContext, sourceVersionMapping);
actor.getMetadata(query);
}
- final TransformResultDatsetAndData resultToReturn = new TransformResultDatsetAndData(actor.getJobData(),
+ final TransformResultDatasetAndData resultToReturn = new TransformResultDatasetAndData(actor.getJobData(),
asDataset(newVersion, path, original, transform, transformResult, actor, catalogService), transformResult);
// save this dataset version.
datasetService.putVersion(resultToReturn.getDataset());
@@ -365,6 +375,52 @@ private TransformResultDatsetAndData transformWithExecute(
return resultToReturn;
}
+ // If isAsync is true and the transform type is updateSQL, we need skip the actor visit and submit the query later
+ // asynchronously. The reason is that actor executes the query in visit() when the transform type is updateSQL.
+ private TransformResultDatasetAndData updateSQLTransformWithExecuteAsync(
+ DatasetVersion newVersion,
+ DatasetPath path,
+ VirtualDatasetUI original,
+ TransformBase transform,
+ QueryType queryType,
+ boolean isPreview)
+ throws DatasetNotFoundException {
+
+ Preconditions.checkArgument(transform.wrap().getType() == updateSQL);
+ final ExecuteTransformActor actor = new ExecuteTransformActor(queryType, newVersion, original.getState(), isPreview, username(), path, executor);
+ final TransformResult transformResult = new TransformResult(
+ new VirtualDatasetState()
+ .setFrom(new From(FromType.SQL).setSql(new FromSQL(transform.wrap().getUpdateSQL().getSql())))
+ .setContextList(transform.wrap().getUpdateSQL().getSqlContextList()));
+ setReferencesInVirtualDatasetUI(original, transform);
+ VirtualDatasetUI dataset = asDataset(newVersion, path, original, transform, transformResult, actor, catalogService);
+
+ VirtualDatasetState vss = protectAgainstNull(transformResult, transform);
+ Map sourceVersionMapping = TransformerUtils.createSourceVersionMapping(transform.getReferencesList());
+ String sql = SQLGenerator.generateSQL(vss, isSupportedTransform(transform), catalogService);
+
+ SqlQuery query = new SqlQuery(sql, vss.getContextList(), securityContext, transform.wrap().getUpdateSQL().getEngineName(),
+ transform.wrap().getUpdateSQL().getSessionId(), sourceVersionMapping);
+ AsyncMetadataJobStatusListener.MetaDataListener listener = new AsyncMetadataJobStatusListener.MetaDataListener() {
+ @Override
+ public void metadataCollected(com.dremio.service.jobs.metadata.proto.QueryMetadata metadata) {
+ // save this dataset version.
+ if (actor.hasMetadata()) {
+ DatasetTool.applyQueryMetadata(dataset, actor.getParents(), actor.getBatchSchema(), actor.getFieldOrigins(),
+ actor.getGrandParents(), actor.getMetadata());
+ dataset.setState(QuerySemantics.extract(actor.getMetadata()));
+ }
+ datasetService.putVersion(dataset);
+ }
+ };
+ actor.getMetadataAsync(query, listener);
+
+ final TransformResultDatasetAndData resultToReturn = new TransformResultDatasetAndData(actor.getJobData(),
+ dataset, transformResult);
+
+ return resultToReturn;
+ }
+
public boolean isSupportedTransform(TransformBase transform) {
return transform.wrap().getType() != updateSQL;
}
@@ -387,10 +443,10 @@ private void setReferencesInVirtualDatasetUI(
}
}
- private static class TransformResultDatsetAndData extends DatasetAndData {
+ private static class TransformResultDatasetAndData extends DatasetAndData {
private final TransformResult transformResult;
- public TransformResultDatsetAndData(JobData jobData, VirtualDatasetUI dataset, TransformResult transformResult) {
+ public TransformResultDatasetAndData(JobData jobData, VirtualDatasetUI dataset, TransformResult transformResult) {
super(jobData, dataset);
this.transformResult = transformResult;
}
@@ -478,19 +534,17 @@ public ExecuteTransformActor(
this.queryType = queryType;
}
- @Override
- protected com.dremio.service.jobs.metadata.proto.QueryMetadata getMetadata(SqlQuery query) {
- this.jobData = executor.runQueryWithListener(query, queryType, path, newVersion, collector);
+ private void applyMetadata(com.dremio.service.jobs.metadata.proto.QueryMetadata metadata, SqlQuery query) {
JobId jobId = null;
SessionId sessionId = null;
try {
jobId = jobData.getJobId();
sessionId = jobData.getSessionId();
- this.metadata = collector.getMetadata();
+ this.metadata = metadata;
final JobDetails jobDetails = jobsService.getJobDetails(
JobDetailsRequest.newBuilder()
.setJobId(JobsProtoUtil.toBuf(jobId))
- .setUserName(query.getUsername())
+ .setUserName(username())
.build());
final JobInfo jobInfo = JobsProtoUtil.getLastAttempt(jobDetails).getInfo();
this.batchSchema = Optional.ofNullable(jobInfo.getBatchSchema()).map((b) -> BatchSchema.deserialize(b));
@@ -513,10 +567,29 @@ protected com.dremio.service.jobs.metadata.proto.QueryMetadata getMetadata(SqlQu
this.batchSchema = queryMetadata.getBatchSchema();
this.parents = queryMetadata.getParents();
}
+ }
+
+ @Override
+ protected com.dremio.service.jobs.metadata.proto.QueryMetadata getMetadata(SqlQuery query) {
+ this.jobData = executor.runQueryWithListener(query, queryType, path, newVersion, collector);
+ applyMetadata(collector.getMetadata(), query);
return metadata;
}
+ protected void getMetadataAsync(SqlQuery query, AsyncMetadataJobStatusListener.MetaDataListener listener) {
+ AsyncMetadataJobStatusListener.MetaDataListener metadataListener = new AsyncMetadataJobStatusListener.MetaDataListener() {
+ @Override
+ public void metadataCollected(com.dremio.service.jobs.metadata.proto.QueryMetadata metadata) {
+ ExecuteTransformActor.this.applyMetadata(metadata, query);
+ }
+ };
+ AsyncMetadataJobStatusListener asyncListener = new AsyncMetadataJobStatusListener(metadataListener);
+ asyncListener.addMetadataListener(listener);
+
+ this.jobData = executor.runQueryWithListener(query, queryType, path, newVersion, asyncListener);
+ }
+
@Override
protected boolean hasMetadata() {
return (metadata != null);
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java
index 2674029627..9edd3110a6 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java
@@ -20,8 +20,12 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.UUID;
+import java.util.stream.Collectors;
import com.dremio.dac.model.common.AddressableResource;
+import com.dremio.dac.model.common.RootEntity;
+import com.dremio.dac.model.folder.FolderName;
import com.dremio.dac.model.job.JobFilters;
import com.dremio.dac.proto.model.dataset.VirtualDatasetUI;
import com.dremio.dac.util.JSONUtil;
@@ -89,6 +93,33 @@ public static Dataset newInstance(
return new Dataset(datasetConfig.getId(), resourcePath, versionedResourcePath, datasetName, sql, datasetConfig, null, jobCount, tags);
}
+ public static Dataset newInstance(
+ RootEntity rootEntity,
+ List folderNamespace,
+ String folderName,
+ String id) {
+ final List folderPath =
+ folderNamespace.stream()
+ .map(name -> new FolderName(name))
+ .collect(Collectors.toList());
+ final DatasetName datasetName = new DatasetName(folderName);
+ final DatasetPath datasetPath = new DatasetPath(rootEntity, folderPath, datasetName);
+
+ final DatasetVersion datasetVersion = DatasetVersion.newVersion();
+ final VirtualDatasetUI vds = new VirtualDatasetUI();
+ vds.setFullPathList(datasetPath.toPathList());
+ vds.setName(datasetName.getName());
+ vds.setId((id == null) ? UUID.randomUUID().toString() : id);
+ vds.setVersion(datasetVersion);
+
+ // For the iceberg view in nessie, we generate a datasetVersion for it.
+ final DatasetResourcePath datasetResourcePath = new DatasetResourcePath(datasetPath);
+ final DatasetVersionResourcePath datasetVersionResourcePath =
+ new DatasetVersionResourcePath(datasetPath, datasetVersion);
+
+ return new Dataset(vds.getId(), datasetResourcePath, datasetVersionResourcePath, datasetName, null, vds, null,0, null);
+ }
+
public int getJobCount() {
return jobCount;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java
index 1fec9faa52..1bd21d416d 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java
@@ -42,6 +42,8 @@ public class DatasetPath extends NamespacePath {
private static final String URL_PATH_TYPE = "dataset";
+ private String version = " ";
+
public static DatasetPath fromURLPath(RootEntity root, String path) {
List components = PathUtils.toPathComponents(path);
@@ -56,6 +58,11 @@ public DatasetPath(RootEntity root, List folderPath, DatasetName dat
super(root, folderPath, dataset);
}
+ public DatasetPath(String path, String version) {
+ super(path);
+ this.version = version;
+ }
+
@JsonCreator
public DatasetPath(String path) {
super(path);
@@ -104,6 +111,10 @@ public DatasetName getDataset() {
return (DatasetName)getLeaf();
}
+ public String getVersion() {
+ return version;
+ }
+
@Override
public LeafEntity getLeaf(String name) throws IllegalArgumentException {
return new DatasetName(name);
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java
index 36fb0c80ef..3cc550a9f3 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java
@@ -17,14 +17,11 @@
import static com.dremio.common.utils.PathUtils.encodeURIComponent;
-import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import com.dremio.common.utils.PathUtils;
import com.dremio.dac.proto.model.collaboration.CollaborationTag;
-import com.dremio.service.namespace.NamespaceKey;
import com.dremio.service.namespace.dataset.DatasetVersion;
import com.dremio.service.namespace.dataset.proto.DatasetConfig;
import com.dremio.service.namespace.dataset.proto.DatasetType;
@@ -171,38 +168,4 @@ public Map getLinks() {
return links;
}
- public Map getApiLinks() throws UnsupportedEncodingException {
- final NamespaceKey datasetPath = new NamespaceKey(fullPath);
- final String dottedFullPath = datasetPath.toUrlEncodedString();
- final String fullPathString = PathUtils.toFSPath(fullPath).toString();
-
- Map links = new HashMap();
- switch (datasetType) {
- case VIRTUAL_DATASET:
- links.put("edit", "/dataset/" + dottedFullPath + "/version/" + datasetVersion + "?view=explore"); //edit dataset
- final DatasetVersion datasetVersion = DatasetVersion.newVersion();
- links.put("run", "/datasets/new_untitled?parentDataset=" + dottedFullPath + "&newVersion="
- + (datasetVersion == null ? datasetVersion : encodeURIComponent(datasetVersion.toString()))); //create new dataset
- break;
- case PHYSICAL_DATASET_HOME_FILE:
- links.put("run", "/home/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString);
- break;
- case PHYSICAL_DATASET_HOME_FOLDER:
- // Folder not supported yet
- break;
- case PHYSICAL_DATASET_SOURCE_FILE:
- links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString);
- break;
- case PHYSICAL_DATASET_SOURCE_FOLDER:
- links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_folder" + fullPathString);
- break;
- case PHYSICAL_DATASET:
- links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_physical_dataset" + fullPathString);
- break;
- default:
- break;
- }
- return links;
- }
-
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java
index 2bd419b777..2757847f47 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java
@@ -22,7 +22,7 @@
import java.util.Map;
import java.util.Set;
-import com.dremio.common.utils.PathUtils;
+import com.dremio.dac.api.JsonISODateTime;
import com.dremio.dac.model.job.JobFilters;
import com.dremio.dac.util.DatasetsUtil;
import com.dremio.service.jobs.JobIndexKeys;
@@ -31,11 +31,11 @@
import com.dremio.service.namespace.dataset.proto.DatasetConfig;
import com.dremio.service.namespace.dataset.proto.DatasetType;
import com.dremio.service.namespace.dataset.proto.VirtualDataset;
+import com.dremio.service.users.User;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
/**
* Dataset summary for overlay
@@ -50,6 +50,16 @@ public class DatasetSummary {
private final DatasetVersion datasetVersion;
private final Map references;
private final List tags;
+ private final String entityId;
+ private Boolean hasReflection;
+ private final String ownerName;
+ private final String ownerEmail;
+ private final String lastModifyingUserName;
+ private final String lastModifyingUserEmail;
+ @JsonISODateTime
+ private final Long createdAt;
+ @JsonISODateTime
+ private final Long lastModified;
public DatasetSummary(@JsonProperty("fullPath") List fullPath,
@JsonProperty("jobCount") int jobCount,
@@ -58,7 +68,15 @@ public DatasetSummary(@JsonProperty("fullPath") List fullPath,
@JsonProperty("datasetType") DatasetType datasetType,
@JsonProperty("datasetVersion") DatasetVersion datasetVersion,
@JsonProperty("tags") List tags,
- @JsonProperty("references") Map references) {
+ @JsonProperty("references") Map references,
+ @JsonProperty("entityId") String entityId,
+ @JsonProperty("hasReflection") Boolean hasReflection,
+ @JsonProperty("ownerName") String ownerName,
+ @JsonProperty("ownerEmail") String ownerEmail,
+ @JsonProperty("lastModifyingUserName") String lastModifyingUserName,
+ @JsonProperty("lastModifyingUserEmail") String lastModifyingUserEmail,
+ @JsonProperty("createdAt") Long createdAt,
+ @JsonProperty("lastModified") Long lastModified) {
this.fullPath = fullPath;
this.jobCount = jobCount;
this.descendants = descendants;
@@ -67,9 +85,18 @@ public DatasetSummary(@JsonProperty("fullPath") List fullPath,
this.datasetVersion = datasetVersion;
this.tags = tags;
this.references = references;
+ this.entityId = entityId;
+ this.hasReflection = hasReflection;
+ this.ownerName = ownerName;
+ this.ownerEmail = ownerEmail;
+ this.lastModifyingUserName = lastModifyingUserName;
+ this.lastModifyingUserEmail = lastModifyingUserEmail;
+ this.createdAt = createdAt;
+ this.lastModified = lastModified;
}
- public static DatasetSummary newInstance(DatasetConfig datasetConfig, int jobCount, int descendants, Map references, List tags) {
+ public static DatasetSummary newInstance(DatasetConfig datasetConfig, int jobCount, int descendants, Map references, List tags,
+ Boolean hasReflection, User owner, User lastModifyingUser) {
List fullPath = datasetConfig.getFullPathList();
DatasetType datasetType = datasetConfig.getType();
@@ -99,7 +126,16 @@ public Field apply(com.dremio.dac.model.common.Field input) {
datasetVersion = null;
}
- return new DatasetSummary(fullPath, jobCount, descendants, fields, datasetType, datasetVersion, tags, references);
+ final String entityId = datasetConfig.getId() == null ? null : datasetConfig.getId().getId();
+ final String ownerName = owner != null ? owner.getUserName() : null;
+ final String ownerEmail = owner != null ? owner.getEmail() : null;
+ final String lastModifyingUserName = lastModifyingUser != null ? lastModifyingUser.getUserName() : null;
+ final String lastModifyingUserEmail = lastModifyingUser != null ? lastModifyingUser.getEmail() : null;
+ final Long createdAt = datasetConfig.getCreatedAt();
+ final Long lastModified = datasetConfig.getLastModified();
+
+ return new DatasetSummary(fullPath, jobCount, descendants, fields, datasetType, datasetVersion, tags, references,
+ entityId, hasReflection, ownerName, ownerEmail, lastModifyingUserName, lastModifyingUserEmail, createdAt, lastModified);
}
public DatasetVersion getDatasetVersion() {
@@ -134,6 +170,36 @@ public Map getReferences() {
return references;
}
+ public String getEntityId() {
+ return entityId;
+ }
+
+ public Boolean getHasReflection() { return hasReflection; }
+
+ public String getOwnerName() {
+ return ownerName;
+ }
+
+ public String getOwnerEmail() {
+ return ownerEmail;
+ }
+
+ public String getLastModifyingUserName() {
+ return lastModifyingUserName;
+ }
+
+ public String getLastModifyingUserEmail() {
+ return lastModifyingUserEmail;
+ }
+
+ public Long getCreatedAt() {
+ return createdAt;
+ }
+
+ public Long getLastModified() {
+ return lastModified;
+ }
+
// links
// TODO make this consistent with DatasetUI.createLinks. In ideal case, both methods should use the same util method
public Map getLinks() {
@@ -150,42 +216,6 @@ public Map getLinks() {
return links;
}
- // api links
- public Map getApiLinks() {
- final Map links = Maps.newHashMap();
- final NamespaceKey datasetPath = new NamespaceKey(fullPath);
- final String dottedFullPath = datasetPath.toUrlEncodedString();
- final String fullPathString = PathUtils.toFSPath(fullPath).toString();
-
- links.put("jobs", this.getJobsUrl());
- switch (datasetType) {
- case VIRTUAL_DATASET:
- links.put("edit", "/dataset/" + dottedFullPath + "/version/" + datasetVersion + "/preview"); // edit dataset
- final DatasetVersion datasetVersion = DatasetVersion.newVersion();
- links.put("run", "/datasets/new_untitled?parentDataset=" + dottedFullPath + "&newVersion="
- + (datasetVersion == null ? datasetVersion : encodeURIComponent(datasetVersion.toString()))); //create new dataset
- break;
- case PHYSICAL_DATASET_HOME_FILE:
- links.put("run", "/home/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString);
- break;
- case PHYSICAL_DATASET_HOME_FOLDER:
- // Folder not supported yet
- break;
- case PHYSICAL_DATASET_SOURCE_FILE:
- links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString);
- break;
- case PHYSICAL_DATASET_SOURCE_FOLDER:
- links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_folder" + fullPathString);
- break;
- case PHYSICAL_DATASET:
- links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_physical_dataset" + fullPathString);
- break;
- default:
- break;
- }
- return links;
- }
-
private String getJobsUrl() {
final NamespaceKey datasetPath = new NamespaceKey(fullPath);
final JobFilters jobFilters = new JobFilters()
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java
index 1aedb88911..0ca17d7151 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java
@@ -27,11 +27,13 @@
import com.dremio.dac.model.folder.SourceFolderPath;
import com.dremio.dac.model.job.JobFilters;
import com.dremio.dac.model.sources.PhysicalDatasetPath;
+import com.dremio.dac.model.sources.VirtualDatasetPath;
import com.dremio.dac.model.spaces.HomeName;
import com.dremio.dac.proto.model.dataset.Derivation;
import com.dremio.dac.proto.model.dataset.VirtualDatasetUI;
import com.dremio.dac.util.DatasetUIUtils;
import com.dremio.dac.util.DatasetsUtil;
+import com.dremio.exec.catalog.VersionedDatasetId;
import com.dremio.file.FilePath;
import com.dremio.file.SourceFilePath;
import com.dremio.service.jobs.JobIndexKeys;
@@ -43,6 +45,7 @@
import com.dremio.service.namespace.dataset.proto.ParentDataset;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
@@ -52,7 +55,7 @@
public class DatasetUI {
private final String id;
- private String entityId;
+ private final String entityId;
private final String sql;
private final List context;
// full path to use when making transforms on this dataset like transforms
@@ -97,7 +100,7 @@ public static DatasetUI newInstance(
datasetType = DatasetType.VIRTUAL_DATASET;
} else {
// if its tmp.UNTITLED we want to get the parent dataset path to display. The UI uses displayFullPath for history
- // requests and therefore we need to be precise here. We manually check the path as this code would previously get
+ // requests, and therefore we need to be precise here. We manually check the path as this code would previously get
// triggered for history dataset entries that derive from another dataset.
if (isUnsaved && vds.getDerivation() == Derivation.DERIVED_VIRTUAL && parentsList.size() > 0
&& Arrays.asList("tmp", "UNTITLED").equals(fullPath)) {
@@ -118,17 +121,37 @@ public static DatasetUI newInstance(
entityId = namespaceService.getEntityIdByPath(new NamespaceKey(displayFullPath));
}
+ final String datasetId = vds.getId();
Map versionContextReqMap = DatasetUIUtils.createVersionContextMap(vds.getReferencesList());
- // if it's versioned, vds'id will be the same as entityId
- if(entityId == null && context!=null && context.size() >= 1 && versionContextReqMap.containsKey(context.get(0))) {
- entityId = vds.getId();
+ if (VersionedDatasetId.tryParse(datasetId) != null) {
+ Preconditions.checkArgument(entityId == null);
+ entityId = datasetId;
}
- return new DatasetUI(vds.getId(), sql, context, fullPath, displayFullPath, vds.getSavedTag(), vds.getVersion(),
- null, null, canReapply, datasetType,
- createLinks(fullPath, displayFullPath, vds.getVersion(), isUnsavedDirectPhysicalDataset),
- createApiLinks(fullPath, displayFullPath, datasetType, vds.getVersion(), isUnsaved, isDerivedDirectly),
- /* entityId */ entityId, versionContextReqMap);
+
+ return new DatasetUI(
+ datasetId,
+ sql,
+ context,
+ fullPath,
+ displayFullPath,
+ vds.getSavedTag(),
+ vds.getVersion(),
+ null,
+ null,
+ canReapply,
+ datasetType,
+ createLinks(
+ fullPath,
+ displayFullPath,
+ vds.getVersion(),
+ isUnsavedDirectPhysicalDataset,
+ entityId,
+ datasetType),
+ createApiLinks(
+ fullPath, displayFullPath, datasetType, vds.getVersion(), isUnsaved, isDerivedDirectly),
+ entityId,
+ versionContextReqMap);
}
@JsonCreator
@@ -258,15 +281,29 @@ public Map getApiLinks() {
public String getEntityId() { return entityId; }
// TODO make this consistent with DatasetSummary.getLinks. In ideal case, both methods should use the same util method
- public static Map createLinks(List fullPath, List displayFullPath, DatasetVersion datasetVersion, boolean isUnsavedDirectPhysicalDataset) {
+ public static Map createLinks(
+ List fullPath,
+ List displayFullPath,
+ DatasetVersion datasetVersion,
+ boolean isUnsavedDirectPhysicalDataset,
+ String entityId,
+ DatasetType datasetType) {
String dottedFullPath = PathUtils.constructFullPath(fullPath);
String queryUrlPath;
+
+ final boolean isVersionedDataset = VersionedDatasetId.tryParse(entityId) != null;
if (isUnsavedDirectPhysicalDataset) {
if (displayFullPath.get(0).startsWith(HomeName.HOME_PREFIX)) {
queryUrlPath = new DatasetPath(displayFullPath).getQueryUrlPath();
} else {
queryUrlPath = new PhysicalDatasetPath(displayFullPath).getQueryUrlPath();
}
+ } else if (isVersionedDataset) {
+ queryUrlPath =
+ ((datasetType == DatasetType.VIRTUAL_DATASET)
+ ? new VirtualDatasetPath(displayFullPath)
+ : new PhysicalDatasetPath(displayFullPath))
+ .getQueryUrlPath();
} else {
queryUrlPath = new DatasetPath(displayFullPath).getQueryUrlPath();
}
@@ -279,7 +316,6 @@ public static Map createLinks(List fullPath, List suggestions;
-
- @JsonCreator
- public SuggestionResponse(@JsonProperty("suggestions") List suggestions) {
- this.suggestions = ImmutableList.copyOf(suggestions);
- }
-
- /**
- * Get list of suggestions.
- */
- public ImmutableList getSuggestions() {
- return suggestions;
- }
-
- @Override
- public String toString() {
- return "SuggestionResponse{" +
- "suggestions=" + suggestions +
- '}';
- }
-
- /**
- * Query Suggestion object to return in SQL Analyze response.
- */
- public static class Suggestion {
-
- private final String name;
- private final String type;
-
- @JsonCreator
- public Suggestion(
- @JsonProperty("name") String name,
- @JsonProperty("type") String type) {
- this.name = name;
- this.type = type;
- }
-
- /**
- * Get Name
- *
- * @return the suggestion value.
- */
- public String getName() {
- return name;
- }
-
- /**
- * Get Type
- *
- * @return the suggestion type.
- */
- public String getType() {
- return type;
- }
-
- @Override
- public String toString() {
- return "Suggestion{" +
- "name='" + name + '\'' +
- ", type='" + type + '\'' +
- '}';
- }
- }
-}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/ValidationResponse.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/ValidationResponse.java
deleted file mode 100644
index e40a072d3a..0000000000
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/ValidationResponse.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2017-2019 Dremio Corporation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.dremio.dac.explore.model;
-
-import java.util.List;
-
-import com.dremio.dac.model.job.QueryError;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.ImmutableList;
-
-
-/**
- * SQL Analyze API Response object to return for SQL query validation.
- * Wrapper for a list of QueryError objects.
- */
-public class ValidationResponse {
-
- private ImmutableList sqlErrors;
-
- @JsonCreator
- public ValidationResponse(@JsonProperty("errors") List sqlErrors) {
- this.sqlErrors = ImmutableList.copyOf(sqlErrors);
- }
-
- /**
- * Get list of errors.
- */
- public ImmutableList getErrors() {
- return sqlErrors;
- }
-
- @Override
- public String toString() {
- return "ValidationResponse{" +
- "sqlErrors=" + sqlErrors +
- '}';
- }
-}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java
index ed92a5800f..54926ae977 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java
@@ -17,6 +17,8 @@
import java.util.Locale;
+import javax.annotation.Nullable;
+
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Strings;
@@ -48,11 +50,10 @@ public VersionContextReq(
* Case-insensitive enum conversion.
* Returns null on most failures.
*/
- public static VersionContextReq tryParse(String type, String value) {
+ public static @Nullable VersionContextReq tryParse(String type, String value) {
if (Strings.isNullOrEmpty(type) || Strings.isNullOrEmpty(value)) {
return null;
}
-
return new VersionContextReq(
VersionContextType.valueOf(type.toUpperCase(Locale.ROOT)),
value);
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java
index a05395462c..23ddbc97cb 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java
@@ -21,7 +21,8 @@ public final class VersionContextUtils {
private VersionContextUtils() {
}
- public static VersionContext map(VersionContextReq from) {
+ public static VersionContext parse(String refType, String refValue) {
+ VersionContextReq from = VersionContextReq.tryParse(refType, refValue);
if (from == null) {
return VersionContext.NOT_SPECIFIED;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java b/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java
index 7cdecd8c51..41a6b66ca0 100644
--- a/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java
+++ b/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java
@@ -48,8 +48,8 @@ public void setup() {
@Override
public void eval() {
- out.buffer = buffer.reallocIfNeeded(input.end - input.start);
- buffer = out.buffer;
+ buffer = buffer.reallocIfNeeded(input.end - input.start);
+ out.buffer = buffer;
out.start = 0;
out.end = input.end - input.start;
diff --git a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java
index 242943d191..c283e76f82 100644
--- a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java
+++ b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java
@@ -197,38 +197,47 @@ public SchemaMutability getSchemaMutability() {
return SchemaMutability.USER_VIEW;
}
+ @Override
public String getAccessKey() {
return accessKey;
}
+ @Override
public String getSecretKey() {
return secretKey;
}
+ @Override
public String getIamRole() {
return iamRole;
}
+ @Override
public String getExternalId() {
return externalId;
}
+ @Override
public String getTokenEndpoint() {
return tokenEndpoint;
}
+ @Override
public String getClientId() {
return clientId;
}
+ @Override
public String getClientSecret() {
return clientSecret;
}
+ @Override
public String getAccountName() {
return accountName;
}
+ @Override
public String getAccountKind() {
return accountKind;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java
index 283a860207..12520b9329 100644
--- a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java
+++ b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java
@@ -34,7 +34,6 @@
import com.dremio.dac.model.spaces.HomeName;
import com.dremio.dac.service.errors.SourceBadStateException;
import com.dremio.exec.proto.UserBitShared;
-import com.dremio.exec.server.SabotContext;
import com.dremio.exec.store.CatalogService;
import com.dremio.exec.store.StoragePlugin;
import com.dremio.file.FilePath;
@@ -46,6 +45,8 @@
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
+
/**
* Injectable tool for doing home file manipulation.
*/
@@ -57,10 +58,10 @@ public class HomeFileTool {
private final HostNameProvider hostNameProvider;
private final SecurityContext securityContext;
- public interface HostNameProvider extends Supplier {};
+ public interface HostNameProvider extends Supplier {}
@Inject
- public HomeFileTool(SabotContext context, CatalogService catalog, HostNameProvider hostnameProvider, @Context SecurityContext securityContext) throws ExecutionSetupException {
+ public HomeFileTool(CatalogService catalog, HostNameProvider hostnameProvider, @Context SecurityContext securityContext) throws ExecutionSetupException {
StoragePlugin plugin;
try {
plugin = catalog.getSource(HomeFileSystemStoragePlugin.HOME_PLUGIN_NAME);
@@ -101,11 +102,11 @@ public HomeFileTool(HomeFileConf config, FileSystem fs, String hostname, Securit
*/
@VisibleForTesting
public Path getStagingLocation(FilePath filePath, String extension) {
- FilePath uniquePath = filePath.rename(format("%s_%s-%s", filePath.getFileName().toString(), extension, UUID.randomUUID().toString()));
+ FilePath uniquePath = filePath.rename(format("%s_%s-%s", filePath.getFileName().toString(), extension, UUID.randomUUID()));
return Path.mergePaths(config.getStagingPath(hostNameProvider.get()), PathUtils.toFSPath(uniquePath.toPathList()));
}
- public HomeFileConf getConf() {
+ public HomeFileConf getConfForBackup() {
return config;
}
@@ -114,7 +115,7 @@ public HomeFileConf getConf() {
*
* @param parent parent directory
* @param fileName file name
- * @return
+ * @return Returns the file path.
*/
private Path filePath(Path parent, String fileName) throws IOException {
return fs.canonicalizePath(parent.resolve(fileName));
@@ -130,8 +131,9 @@ private Path getUploadLocation(FilePath filePath, String extension) {
* @param filePath file path in under home space
* @param input input stream containing file's data
* @return location where file is staged
- * @throws IOException
+ * @throws IOException - An exception that might occur if the file system cannot be written to.
*/
+ @WithSpan
public Path stageFile(FilePath filePath, String extension, InputStream input) throws IOException {
final Path stagingLocation = getStagingLocation(filePath, extension);
fs.mkdirs(stagingLocation, HomeFileSystemStoragePlugin.DEFAULT_PERMISSIONS);
@@ -142,6 +144,7 @@ public Path stageFile(FilePath filePath, String extension, InputStream input) th
return fs.makeQualified(stagingLocation);
}
+ @WithSpan
public Path saveFile(String stagingLocation, FilePath filePath, FileType fileType) throws IOException {
return saveFile(Path.of(stagingLocation), filePath, FileFormat.getExtension(fileType));
}
@@ -151,8 +154,9 @@ public Path saveFile(String stagingLocation, FilePath filePath, FileType fileTyp
* @param stagingLocation staging directory where file is uploaded
* @param filePath file path in under home space
* @return final location of file
- * @throws IOException
+ * @throws IOException - An exception if the file system cannot be written to.
*/
+ @VisibleForTesting
public Path saveFile(Path stagingLocation, FilePath filePath, String extension) throws IOException {
if (!validStagingLocation(stagingLocation)) {
throw new IllegalArgumentException("Invalid staging location provided");
@@ -160,7 +164,6 @@ public Path saveFile(Path stagingLocation, FilePath filePath, String extension)
final Path uploadLocation = getUploadLocation(filePath, extension);
fs.mkdirs(uploadLocation.getParent());
- // rename staging dir to uploadPath
fs.rename(stagingLocation, uploadLocation);
return uploadLocation;
}
@@ -171,6 +174,7 @@ public Path saveFile(Path stagingLocation, FilePath filePath, String extension)
* @param stagingLocation staging directory where file is uploaded
* @return if the location is valid or not
*/
+ @WithSpan
public boolean validStagingLocation(Path stagingLocation) {
final Path stagingPath = fs.makeQualified(stagingLocation);
@@ -184,8 +188,9 @@ public boolean validStagingLocation(Path stagingLocation) {
/**
* Delete file uploaded by user
- * @throws IOException
+ * @throws IOException - An exception if the file system cannot be written to.
*/
+ @WithSpan
public void deleteFile(String fileLocation) throws IOException {
if (fileLocation != null) {
fs.delete(Path.of(fileLocation), true);
@@ -198,10 +203,11 @@ public boolean fileExists(String fileLocation) throws IOException {
/**
* Delete the contents in given user home.
- * @param userHome
+ * @param userHome - The location of a user's home space.
* @return Whether successful or not.
- * @throws IOException
+ * @throws IOException - An exception if the file system cannot be written to.
*/
+ @WithSpan
public boolean deleteHomeAndContents(String userHome) throws IOException {
final Path homePath = config.getInnerUploads().resolve(userHome);
if (fs.exists(homePath)) {
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java b/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java
index 62b9980ad1..2f6ff8e065 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java
@@ -22,8 +22,10 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.dremio.dac.model.common.NamespacePath;
@@ -167,8 +169,6 @@ public Map getLinks() {
links.put("file_format", folderPath.toUrlPathWithAction("folder_format"));
links.put("file_prefix", folderPath.toUrlPathWithAction("file"));
}
- // renames not allowed on source folders
- links.put("rename", folderPath.toUrlPathWithAction("rename_folder"));
}
// add jobs if not already added.
if (!links.containsKey("jobs")) {
@@ -214,6 +214,28 @@ static NamespacePath parseUrlPath(String urlPath) {
throw new IllegalArgumentException("Not a valid filePath: " + urlPath);
}
+ public static Folder newInstance(RootEntity rootEntity, FolderConfig folderConfig, String id) {
+ return new Folder(
+ (id == null) ? UUID.randomUUID().toString() : id,
+ folderConfig.getName(),
+ new FolderPath(
+ rootEntity,
+ folderConfig.getFullPathList().subList(1, folderConfig.getFullPathList().size() - 1)
+ .stream()
+ .map(FolderName::new)
+ .collect(Collectors.toList()),
+ new FolderName(folderConfig.getName())).toUrlPath(),
+ false,
+ false,
+ false,
+ null,
+ null,
+ null,
+ null,
+ null,
+ 0);
+ }
+
public static Folder newInstance(FolderPath folderPath, FolderConfig folderConfig, NamespaceTree contents, boolean isQueryable, boolean isFileSystemFolder) {
return newInstance(folderPath, folderConfig, null, contents, isQueryable, isFileSystemFolder, null, 0);
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java
index 98d0bc2c5c..010e11686b 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java
@@ -47,6 +47,7 @@
import com.dremio.service.job.proto.TableDatasetProfile;
import com.dremio.service.job.proto.TopOperation;
import com.dremio.service.jobs.JobsProtoUtil;
+import com.dremio.service.jobs.JobsServiceUtil;
import com.dremio.service.namespace.dataset.proto.DatasetType;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -318,7 +319,7 @@ public JobDetailsUI(
failureInfo,
cancellationInfo,
attempts.get(0).getInfo().getSql(),
- attempts.get(0).getInfo().getDescription(),
+ JobsServiceUtil.getJobDescription(attempts.get(0).getInfo().getRequestType(), attempts.get(0).getInfo().getSql(), attempts.get(0).getInfo().getDescription()),
Util.last(attempts).getStats(),
DatasetType.VIRTUAL_DATASET, // TODO: return correct result. This is closest since only the ui submits queries and they are using virtual datasets...
datasetVersion,
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java
index 68ae229f4e..7a28c8e872 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java
@@ -43,7 +43,7 @@
import java.util.stream.Collectors;
import org.apache.calcite.util.Util;
-import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import com.dremio.dac.api.CatalogEntity;
@@ -72,6 +72,7 @@
import com.dremio.service.job.proto.ReflectionType;
import com.dremio.service.job.proto.ScannedDataset;
import com.dremio.service.jobs.JobsProtoUtil;
+import com.dremio.service.jobs.JobsServiceUtil;
import com.dremio.service.namespace.NamespaceException;
import com.dremio.service.namespace.NamespaceKey;
import com.dremio.service.namespace.NamespaceService;
@@ -110,6 +111,8 @@ public class JobInfoDetailsUI {
private Long inputRecords;
private Long outputBytes;
private Long outputRecords;
+ private Long addedFiles;
+ private Long removedFiles;
private Long duration;
private List durationDetails;
private int nrReflectionsConsidered;
@@ -169,6 +172,8 @@ public JobInfoDetailsUI(
@JsonProperty("inputRecords") Long inputRecords,
@JsonProperty("outputBytes") Long outputBytes,
@JsonProperty("outputRecords") Long outputRecords,
+ @JsonProperty("addedFiles") Long addedFiles,
+ @JsonProperty("removedFiles") Long removedFiles,
@JsonProperty("duration") Long duration,
@JsonProperty("durationDetails") List durationDetails,
@JsonProperty("nrReflectionsConsidered") int nrReflectionsConsidered,
@@ -220,6 +225,8 @@ public JobInfoDetailsUI(
this.inputRecords = inputRecords;
this.outputBytes = outputBytes;
this.outputRecords = outputRecords;
+ this.addedFiles = addedFiles;
+ this.removedFiles = removedFiles;
this.duration = duration;
this.durationDetails = durationDetails;
this.nrReflectionsConsidered = nrReflectionsConsidered;
@@ -280,10 +287,12 @@ public JobInfoDetailsUI of(JobDetails jobDetails, UserBitShared.QueryProfile pro
inputRecords = jobAttempt.getStats().getInputRecords();
outputBytes = jobAttempt.getStats().getOutputBytes();
outputRecords = jobAttempt.getStats().getOutputRecords();
+ addedFiles = jobAttempt.getStats().getAddedFiles();
+ removedFiles = jobAttempt.getStats().getRemovedFiles();
duration = JobUtil.getTotalDuration(jobDetails, attemptIndex);
durationDetails = JobUtil.buildDurationDetails(jobAttempt.getStateListList());
requestType = RequestType.valueOf(jobInfo.getRequestType().toString());
- description = jobInfo.getDescription();
+ description = JobsServiceUtil.getJobDescription(RequestType.valueOf(jobInfo.getRequestType().toString()), jobInfo.getSql(), jobInfo.getDescription());
attemptDetails = AttemptsUIHelper.fromAttempts(jobId, attempts);
attemptsSummary = AttemptsUIHelper.constructSummary(attempts);
datasetPaths = jobInfo.getDatasetPathList();
@@ -341,6 +350,8 @@ public JobInfoDetailsUI of(JobDetails jobDetails, UserBitShared.QueryProfile pro
inputRecords,
outputBytes,
outputRecords,
+ addedFiles,
+ removedFiles,
duration,
durationDetails,
nrReflectionsConsidered,
@@ -530,6 +541,14 @@ public Long getOutputRecords() {
return outputRecords;
}
+ public Long getAddedFiles() {
+ return addedFiles;
+ }
+
+ public Long getRemovedFiles() {
+ return removedFiles;
+ }
+
public boolean isStarFlakeAccelerated() {
return isStarFlakeAccelerated;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java
index 02398100f3..62e4540df1 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java
@@ -320,12 +320,14 @@ private void buildPhaseData(UserBitShared.MajorFragmentProfile major) {
}
Comparator minorIdComparator = new Comparator() {
+ @Override
public int compare(final UserBitShared.MinorFragmentProfile o1, final UserBitShared.MinorFragmentProfile o2) {
return Long.compare(o1.getMinorFragmentId(), o2.getMinorFragmentId());
}
};
Comparator operatorIdComparator = new Comparator() {
+ @Override
public int compare(final UserBitShared.OperatorProfile o1, final UserBitShared.OperatorProfile o2) {
return Long.compare(o1.getOperatorId(), o2.getOperatorId());
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java
index de716f5a27..5fc234ced9 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java
@@ -25,6 +25,7 @@
import com.dremio.service.job.proto.JobState;
import com.dremio.service.job.proto.ParentDatasetInfo;
import com.dremio.service.jobs.JobsProtoUtil;
+import com.dremio.service.jobs.JobsServiceUtil;
import com.dremio.service.namespace.NamespaceService;
import com.dremio.service.namespace.dataset.proto.DatasetType;
import com.fasterxml.jackson.annotation.JsonCreator;
@@ -101,6 +102,7 @@ public JobSummaryUI(
}
public static JobSummaryUI of(com.dremio.service.job.JobSummary input, NamespaceService service) {
+ String desc = JobsServiceUtil.getJobDescription(input.getRequestType(), input.getSql(), input.getDescription());
final ParentDatasetInfo datasetInfo = JobsUI.getDatasetToDisplay(input, service);
return new JobSummaryUI(
input.getJobId().getId(),
@@ -113,7 +115,7 @@ public static JobSummaryUI of(com.dremio.service.job.JobSummary input, Namespace
input.getUser(),
input.getStartTime() == 0 ? null : input.getStartTime(),
input.getEndTime() == 0 ? null : input.getEndTime(),
- Strings.isNullOrEmpty(input.getDescription()) ? null : obfuscateSql(input.getDescription()),
+ Strings.isNullOrEmpty(desc) ? null : obfuscateSql(desc),
JobsProtoUtil.toStuff(input.getRequestType()),
input.getAccelerated(),
input.getDatasetVersion(),
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java
index 256518cb70..9cf949e866 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java
@@ -35,6 +35,7 @@
import com.dremio.service.jobs.JobNotFoundException;
import com.dremio.service.jobs.JobsProtoUtil;
import com.dremio.service.jobs.JobsService;
+import com.dremio.service.jobs.JobsServiceUtil;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -149,7 +150,7 @@ private static JobInfoUI convertJobInfo(JobInfo info) {
.setAcceleration(info.getAcceleration())
.setGrandParentsList(info.getGrandParentsList())
.setDownloadInfo(info.getDownloadInfo())
- .setDescription(info.getDescription())
+ .setDescription(JobsServiceUtil.getJobDescription(info.getRequestType(), info.getSql(), info.getDescription()))
.setMaterializationFor(info.getMaterializationFor())
.setOriginalCost(info.getOriginalCost())
.setPartitionsList(info.getPartitionsList())
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java
index f4e4dfd83a..866dc78f6d 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java
@@ -29,6 +29,7 @@
import com.dremio.service.job.proto.JobStats;
import com.dremio.service.jobs.Job;
import com.dremio.service.jobs.JobsProtoUtil;
+import com.dremio.service.jobs.JobsServiceUtil;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -105,7 +106,7 @@ public PartialJobListItem(Job input) {
this.user = firstAttempt.getInfo().getUser();
this.startTime = firstAttempt.getInfo().getStartTime();
this.endTime = lastAttempt.getInfo().getFinishTime();
- this.description = firstAttempt.getInfo().getDescription();
+ this.description = JobsServiceUtil.getJobDescription(lastAttempt.getInfo().getRequestType(), lastAttempt.getInfo().getSql(), lastAttempt.getInfo().getDescription());
this.accelerated = lastAttempt.getInfo().getAcceleration() != null;
this.requestType = firstAttempt.getInfo().getRequestType();
this.datasetVersion = firstAttempt.getInfo().getDatasetVersion();
@@ -129,7 +130,7 @@ public PartialJobListItem(JobSummary input) {
this.user = input.getUser();
this.startTime = input.getStartTime() == 0 ? null : input.getStartTime();
this.endTime = input.getEndTime() == 0 ? null : input.getEndTime();
- this.description = Strings.isNullOrEmpty(input.getDescription()) ? null : input.getDescription();
+ this.description = JobsServiceUtil.getJobDescription(input.getRequestType(), input.getSql(), input.getDescription());
this.accelerated = input.getAccelerated();
this.requestType = JobsProtoUtil.toStuff(input.getRequestType());
this.datasetVersion = input.getDatasetVersion();
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java
index 5a36472054..04bc6c95ab 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java
@@ -26,6 +26,7 @@
import com.dremio.service.job.proto.JobState;
import com.dremio.service.job.proto.QueryType;
import com.dremio.service.jobs.JobsProtoUtil;
+import com.dremio.service.jobs.JobsServiceUtil;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -158,7 +159,7 @@ public PartialJobListingItem(JobSummary input) {
input.getOutputRecords() + " Records";
this.spilled = input.getSpilled();
this.isStarFlakeAccelerated = input.getSnowflakeAccelerated();
- this.description = input.getDescription();
+ this.description = JobsServiceUtil.getJobDescription(input.getRequestType(), input.getSql(), input.getDescription());
this.requestType = input.getRequestType();
this.datasetVersion = input.getDatasetVersion();
this.outputLimited = input.getOutputLimited();
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/namespace/ExternalNamespaceTreeUtils.java b/dac/backend/src/main/java/com/dremio/dac/model/namespace/ExternalNamespaceTreeUtils.java
new file mode 100644
index 0000000000..0bbcef32b1
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/model/namespace/ExternalNamespaceTreeUtils.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.model.namespace;
+
+import java.util.List;
+import java.util.Objects;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import com.dremio.dac.explore.model.Dataset;
+import com.dremio.dac.model.folder.Folder;
+import com.dremio.dac.model.sources.PhysicalDataset;
+import com.dremio.dac.model.sources.SourceName;
+import com.dremio.exec.catalog.TableVersionContext;
+import com.dremio.exec.catalog.VersionedDatasetId;
+import com.dremio.plugins.ExternalNamespaceEntry;
+import com.dremio.service.namespace.space.proto.FolderConfig;
+
+/**
+ * Helpers for making NamespaceTrees from external catalogs (e.g. Nessie)
+ */
+public final class ExternalNamespaceTreeUtils {
+ private ExternalNamespaceTreeUtils() {}
+
+ public static NamespaceTree namespaceTreeOf(
+ SourceName sourceName, List entries) {
+ Objects.requireNonNull(sourceName);
+
+ final NamespaceTree namespaceTree = new NamespaceTree();
+ entries.forEach(
+ entry -> {
+ final String id = entry.getId();
+ final String name = entry.getName();
+ final List namespace = entry.getNamespace();
+ final List fullPathList =
+ Stream.of(Stream.of(sourceName.getName()), entry.getNameElements().stream())
+ .flatMap(Function.identity())
+ .collect(Collectors.toList());
+ final TableVersionContext tableVersionContext = entry.getTableVersionContext();
+ final String versionedDatasetId =
+ (id == null || tableVersionContext == null)
+ ? null
+ : VersionedDatasetId.newBuilder()
+ .setTableKey(fullPathList)
+ .setContentId(id)
+ .setTableVersionContext(tableVersionContext)
+ .build()
+ .asString();
+
+ switch (entry.getType()) {
+ case UNKNOWN:
+ break; // Unknown types are ignored
+ case FOLDER:
+ namespaceTree.addFolder(
+ Folder.newInstance(
+ sourceName,
+ new FolderConfig().setFullPathList(fullPathList).setName(name),
+ versionedDatasetId));
+ break;
+ case ICEBERG_TABLE:
+ namespaceTree.addPhysicalDataset(
+ PhysicalDataset.newInstance(sourceName, namespace, name, versionedDatasetId));
+ break;
+ case ICEBERG_VIEW:
+ namespaceTree.addDataset(
+ Dataset.newInstance(sourceName, namespace, name, versionedDatasetId));
+ break;
+ default:
+ throw new IllegalStateException("Unexpected value: " + entry.getType());
+ }
+ });
+
+ return namespaceTree;
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java b/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java
index 7743972034..ad3861c4b4 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java
@@ -15,7 +15,6 @@
*/
package com.dremio.dac.model.namespace;
-import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.FUNCTION;
import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.SOURCE;
import java.util.ArrayList;
@@ -49,6 +48,7 @@
import com.dremio.file.File;
import com.dremio.file.FilePath;
import com.dremio.file.SourceFilePath;
+import com.dremio.options.OptionManager;
import com.dremio.service.namespace.NamespaceException;
import com.dremio.service.namespace.NamespaceNotFoundException;
import com.dremio.service.namespace.NamespaceUtils;
@@ -96,21 +96,22 @@ public static NamespaceTree newInstance(
Type rootEntityType,
CollaborationHelper collaborationService) throws NamespaceException, DatasetNotFoundException {
- return newInstance(datasetService, children, rootEntityType, collaborationService, null, null);
+ return newInstance(datasetService, children, rootEntityType, collaborationService, null, null, null);
}
public static NamespaceTree newInstance(
- final DatasetVersionMutator datasetService,
- List children,
- Type rootEntityType,
- CollaborationHelper collaborationService,
- Boolean fileSystemSource,
- Boolean isImpersonationEnabled) throws NamespaceException, DatasetNotFoundException {
+ final DatasetVersionMutator datasetService,
+ List children,
+ Type rootEntityType,
+ CollaborationHelper collaborationService,
+ Boolean fileSystemSource,
+ Boolean isImpersonationEnabled,
+ OptionManager optionManager) throws NamespaceException, DatasetNotFoundException {
NamespaceTree result = new NamespaceTree();
result.setIsFileSystemSource(fileSystemSource);
result.setIsImpersonationEnabled(isImpersonationEnabled);
- populateInstance(result, datasetService, children, rootEntityType, collaborationService);
+ populateInstance(result, datasetService, children, rootEntityType, collaborationService, optionManager);
return result;
}
@@ -120,14 +121,14 @@ protected static void populateInstance(
DatasetVersionMutator datasetService,
List children,
Type rootEntityType,
- CollaborationHelper collaborationService)
+ CollaborationHelper collaborationService, OptionManager optionManager)
throws NamespaceException, DatasetNotFoundException {
// get a list of all ids so we can fetch all collaboration tags in one search
final Map tags = new HashMap<>();
if (collaborationService != null) {
TagsSearchResult tagsInfo = collaborationService.getTagsForIds(children.stream().
- map(NamespaceUtils::getId).collect(Collectors.toSet()));
+ map(NamespaceUtils::getIdOrNull).collect(Collectors.toSet()));
tags.putAll(tagsInfo.getTags());
tree.setCanTagsBeSkipped(tagsInfo.getCanTagsBeSkipped());
@@ -157,7 +158,7 @@ protected static void populateInstance(
datasetPath.getDataset(),
vds.getSql(),
vds,
- datasetService.getJobsCount(datasetPath.toNamespaceKey()),
+ datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager),
rootEntityType,
tags.get(datasetConfig.getId().getId())
);
@@ -170,7 +171,7 @@ protected static void populateInstance(
fileDSId,
new FilePath(container.getFullPathList()),
fileFormat,
- datasetService.getJobsCount(datasetPath.toNamespaceKey()), false, true,
+ datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager), false, true,
fileFormat.getFileType() != FileType.UNKNOWN, datasetConfig.getType(),
tags.get(fileDSId)
);
@@ -183,7 +184,7 @@ protected static void populateInstance(
sourceFileDSId,
new SourceFilePath(container.getFullPathList()),
sourceFileFormat,
- datasetService.getJobsCount(datasetPath.toNamespaceKey()), false, false,
+ datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager), false, false,
sourceFileFormat.getFileType() != FileType.UNKNOWN, datasetConfig.getType(),
tags.get(sourceFileDSId)
);
@@ -207,7 +208,7 @@ protected static void populateInstance(
new PhysicalDatasetResourcePath(new SourceName(container.getFullPathList().get(0)), path),
new PhysicalDatasetName(path.getFileName().getName()),
DatasetsUtil.toPhysicalDatasetConfig(container.getDataset()),
- datasetService.getJobsCount(datasetPath.toNamespaceKey()),
+ datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager),
tags.get(container.getDataset().getId().getId())
);
break;
@@ -333,4 +334,7 @@ public void setIsImpersonationEnabled(final Boolean isImpersonationEnabled) {
this.isImpersonationEnabled = isImpersonationEnabled;
}
+ public long totalCount() {
+ return getFolders().size() + getDatasets().size() + getFiles().size() + getPhysicalDatasets().size();
+ }
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java b/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java
index 51f99da858..abec3895a9 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java
@@ -79,6 +79,9 @@
import com.google.common.base.Throwables;
import com.google.common.collect.Iterators;
+import io.opentelemetry.api.trace.Span;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
+
/**
* A resource focused on guessing, previewing and applying formats to files and folders.
*/
@@ -137,7 +140,10 @@ public FileFormat getOrDetectFormat(NamespacePath folderPath, DatasetType expect
// determine whether folder or file.
final boolean isFolder;
- switch(physicalDatasetConfig.getType()) {
+ final DatasetType datasetType = physicalDatasetConfig.getType();
+ Span.current().setAttribute("formattools.getOrDetectFormat.datasetType", datasetType.name());
+
+ switch(datasetType) {
case PHYSICAL_DATASET_HOME_FILE:
case PHYSICAL_DATASET_SOURCE_FILE:
isFolder = false;
@@ -157,7 +163,7 @@ public FileFormat getOrDetectFormat(NamespacePath folderPath, DatasetType expect
fileFormat.setVersion(physicalDatasetConfig.getTag());
return fileFormat;
} catch (PhysicalDatasetNotFoundException nfe) {
- // ignore and fall through to detect the format so we don't have extra nested blocks.
+ // ignore and fall through to detect the format, so we don't have extra nested blocks.
}
final NamespaceKey key = folderPath.toNamespaceKey();
@@ -187,7 +193,7 @@ private FileFormat detectFileFormat(NamespaceKey key) {
}
}
} catch(IOException ex) {
- // we could return unknown but if there no files, what's the point.
+ // we could return unknown but if there are no files, what's the point.
throw UserException.ioExceptionError(ex)
.message("No files detected or unable to read file format with selected option.")
.build(logger);
@@ -205,7 +211,7 @@ private FileFormat detectFileFormat(NamespaceKey key) {
return asFormat(key, path,false, nullableFileFormat.get());
}
} catch(IOException ex) {
- // we could return unknown but if there no files, what's the point.
+ // we could return unknown but if there are no files, what's the point.
throw UserException.ioExceptionError(ex)
.message("No files detected or unable to read file format with selected option.")
.build(logger);
@@ -278,6 +284,7 @@ private static FileFormat asLayerFormat(NamespaceKey key, FileFormat fileFormat)
return FileFormat.getForFolder(config);
}
+ @WithSpan
public JobDataFragment previewData(FileFormat format, NamespacePath namespacePath, boolean useFormatLocation) {
final NamespaceKey key = namespacePath.toNamespaceKey();
final FileSystemPlugin> plugin = getPlugin(key);
@@ -294,7 +301,7 @@ public JobDataFragment previewData(FileFormat format, NamespacePath namespacePat
try {
attributes = fs.getFileAttributes(path);
} catch(IOException ex) {
- // we could return unknown but if there no files, what's the point.
+ // we could return unknown but if there are no files, what's the point.
throw new IllegalStateException("No files detected or unable to read data.", ex);
}
@@ -426,7 +433,7 @@ private JobDataFragment getData(FormatPlugin formatPlugin, FileSystem filesystem
}
- private final FileSystemPlugin> getPlugin(NamespaceKey key) {
+ private FileSystemPlugin> getPlugin(NamespaceKey key) {
StoragePlugin plugin = catalogService.getSource(key.getRoot());
if(plugin instanceof FileSystemPlugin) {
return (FileSystemPlugin>) plugin;
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java b/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java
index 297853f3c3..e637ef00d8 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java
@@ -18,9 +18,13 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.UUID;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
import com.dremio.dac.model.common.AddressableResource;
import com.dremio.dac.model.common.ResourcePath;
+import com.dremio.dac.model.common.RootEntity;
import com.dremio.dac.model.job.JobFilters;
import com.dremio.service.jobs.JobIndexKeys;
import com.dremio.service.namespace.physicaldataset.proto.PhysicalDatasetConfig;
@@ -85,6 +89,30 @@ public Map getLinks() {
return links;
}
+ public static PhysicalDataset newInstance(RootEntity rootEntity,
+ List folderNamespace,
+ String folderName,
+ String id) {
+ List fullPathList = Stream.of(
+ Stream.of(rootEntity.getName()),
+ folderNamespace.stream(),
+ Stream.of(folderName))
+ .reduce(Stream::concat)
+ .orElseThrow(IllegalStateException::new)
+ .collect(Collectors.toList());
+
+ final PhysicalDatasetPath path = new PhysicalDatasetPath(fullPathList);
+
+ return new PhysicalDataset(
+ new PhysicalDatasetResourcePath(new SourceName(rootEntity.getName()), path),
+ new PhysicalDatasetName(path.getFileName().getName()),
+ new PhysicalDatasetConfig()
+ .setId((id == null) ? UUID.randomUUID().toString() : id)
+ .setFullPathList(fullPathList),
+ null,
+ null);
+ }
+
public List getTags() {
return tags;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/sources/VirtualDatasetPath.java b/dac/backend/src/main/java/com/dremio/dac/model/sources/VirtualDatasetPath.java
new file mode 100644
index 0000000000..8942e86186
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/model/sources/VirtualDatasetPath.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.model.sources;
+
+import java.util.List;
+
+import com.dremio.dac.model.common.LeafEntity;
+import com.dremio.dac.model.common.NamespacePath;
+import com.dremio.dac.model.common.RootEntity;
+import com.dremio.dac.model.folder.FolderName;
+import com.dremio.file.FileName;
+
+public class VirtualDatasetPath extends NamespacePath {
+ public VirtualDatasetPath(SourceName source, List folderPath, FileName fileName) {
+ super(source, folderPath, fileName);
+ }
+
+ public VirtualDatasetPath(List path) {
+ super(path);
+ }
+
+ @Override
+ public RootEntity getRoot(String name) throws IllegalArgumentException {
+ return new SourceName(name);
+ }
+
+ @Override
+ public LeafEntity getLeaf(String name) throws IllegalArgumentException {
+ return new FileName(name);
+ }
+
+ @Override
+ public int getMinimumComponents() {
+ return 2;
+ }
+
+ @Override
+ public SourceName getRoot() {
+ return (SourceName) super.getRoot();
+ }
+
+ public FileName getFileName() {
+ return new FileName(getLeaf().getName());
+ }
+
+ public SourceName getSourceName() {
+ return new SourceName(getRoot().getName());
+ }
+
+ @Override
+ protected String getDefaultUrlPathType() {
+ return "dataset";
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java b/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java
index 0a86b3bd12..1e91ab413b 100644
--- a/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java
+++ b/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java
@@ -23,11 +23,11 @@
/**
* Class PreferenceData
*/
-public class PreferenceData {
+public final class PreferenceData {
private final UserPreferenceProto.PreferenceType preferenceType;
private final List entities;
- public PreferenceData(UserPreferenceProto.PreferenceType preferenceType, List entities) {
+ public PreferenceData(final UserPreferenceProto.PreferenceType preferenceType, final List entities) {
this.preferenceType = preferenceType;
this.entities = entities;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java
index 3798318045..5c5a3ff414 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java
@@ -73,7 +73,7 @@ public BackupStats createBackup(BackupOptions options) throws IOException, Names
final FileSystem fs = HadoopFileSystem.get(backupDirPath, new Configuration());
// Checking if directory already exists and that the daemon can access it
BackupRestoreUtil.checkOrCreateDirectory(fs, backupDirPath);
- return BackupRestoreUtil.createBackup(fs, options, kvStoreProvider, fileStore.get().getConf(), null);
+ return BackupRestoreUtil.createBackup(fs, options, kvStoreProvider, fileStore.get().getConfForBackup(), null);
}
@@ -99,7 +99,7 @@ public BackupStats backupUploads(
final com.dremio.io.file.Path backupRootDirPath = backupDestinationDir.getParent();
final FileSystem fs = HadoopFileSystem.get(backupRootDirPath, new Configuration());
final BackupStats backupStats = new BackupStats(options.getBackupDestinationDirectory(), 0, 0);
- BackupRestoreUtil.backupUploadedFiles(fs, backupDestinationDir, fileStore.get().getConf(), backupStats);
+ BackupRestoreUtil.backupUploadedFiles(fs, backupDestinationDir, fileStore.get().getConfForBackup(), backupStats);
return backupStats;
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java
index 1ac8d5a3ee..8524dd06eb 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java
@@ -16,7 +16,6 @@
package com.dremio.dac.resource;
import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.SPACE;
-import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import java.util.Arrays;
import java.util.ConcurrentModificationException;
@@ -35,10 +34,10 @@
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
-import com.dremio.common.exceptions.UserException;
import com.dremio.common.utils.PathUtils;
import com.dremio.dac.annotations.RestResource;
import com.dremio.dac.annotations.Secured;
+import com.dremio.dac.model.common.ResourcePath;
import com.dremio.dac.model.folder.Folder;
import com.dremio.dac.model.folder.FolderName;
import com.dremio.dac.model.folder.FolderPath;
@@ -50,7 +49,10 @@
import com.dremio.dac.service.errors.ClientErrorException;
import com.dremio.dac.service.errors.DatasetNotFoundException;
import com.dremio.dac.service.errors.FolderNotFoundException;
+import com.dremio.dac.service.errors.NotSupportedException;
import com.dremio.dac.util.ResourceUtil;
+import com.dremio.exec.catalog.CatalogFeatures;
+import com.dremio.options.OptionManager;
import com.dremio.service.namespace.NamespaceException;
import com.dremio.service.namespace.NamespaceNotFoundException;
import com.dremio.service.namespace.NamespaceService;
@@ -65,29 +67,32 @@
@RolesAllowed({"admin", "user"})
@Path("/space/{space}")
public class FolderResource {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FolderResource.class);
-
private final DatasetVersionMutator datasetService;
private final NamespaceService namespaceService;
private final CollaborationHelper collaborationHelper;
private final SpaceName spaceName;
+ private final OptionManager optionManager;
@Inject
public FolderResource(
DatasetVersionMutator datasetService,
NamespaceService namespaceService,
CollaborationHelper collaborationHelper,
- @PathParam("space") SpaceName spaceName) {
+ @PathParam("space") SpaceName spaceName,
+ OptionManager optionManager) {
this.datasetService = datasetService;
this.namespaceService = namespaceService;
this.collaborationHelper = collaborationHelper;
this.spaceName = spaceName;
+ this.optionManager = optionManager;
}
@GET
@Path("/folder/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
public Folder getFolder(@PathParam("path") String path, @QueryParam("includeContents") @DefaultValue("true") boolean includeContents) throws NamespaceException, FolderNotFoundException, DatasetNotFoundException {
+ throwIfNotSupported();
+
FolderPath folderPath = FolderPath.fromURLPath(spaceName, path);
try {
final FolderConfig folderConfig = namespaceService.getFolder(folderPath.toNamespaceKey());
@@ -104,6 +109,8 @@ public Folder getFolder(@PathParam("path") String path, @QueryParam("includeCont
@Path("/folder/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
public void deleteFolder(@PathParam("path") String path, @QueryParam("version") String version) throws NamespaceException, FolderNotFoundException {
+ throwIfNotSupported();
+
FolderPath folderPath = FolderPath.fromURLPath(spaceName, path);
if (version == null) {
throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG);
@@ -123,6 +130,8 @@ public void deleteFolder(@PathParam("path") String path, @QueryParam("version")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Folder createFolder(FolderName name, @PathParam("path") String path) throws NamespaceException {
+ throwIfNotSupported();
+
String fullPath = PathUtils.toFSPathString(Arrays.asList(path, name.toString()));
FolderPath folderPath = FolderPath.fromURLPath(spaceName, fullPath);
@@ -146,14 +155,14 @@ protected NamespaceTree newNamespaceTree(List children) thro
return NamespaceTree.newInstance(datasetService, children, SPACE, collaborationHelper);
}
- @POST
- @Produces(APPLICATION_JSON)
- @Path("/rename_folder/{path: .*}")
- public Folder renameFolder(@PathParam("path") String path, @QueryParam("renameTo") String renameTo)
- throws NamespaceException, FolderNotFoundException {
- throw UserException.unsupportedError()
- .message("Renaming a folder is not supported")
- .build(logger);
+ protected OptionManager getOptionManager() {
+ return optionManager;
}
+ private void throwIfNotSupported() throws NotSupportedException {
+ CatalogFeatures features = CatalogFeatures.get(optionManager);
+ if (!features.isFeatureEnabled(CatalogFeatures.Feature.SPACE)) {
+ throw new NotSupportedException(ResourcePath.defaultImpl("/space"));
+ }
+ }
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java
index b4acca126b..5a390a6eb4 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java
@@ -53,14 +53,12 @@
import com.dremio.common.utils.SqlUtils;
import com.dremio.dac.annotations.RestResource;
import com.dremio.dac.annotations.Secured;
-import com.dremio.dac.explore.DatasetsResource;
import com.dremio.dac.explore.model.Dataset;
import com.dremio.dac.explore.model.DatasetName;
import com.dremio.dac.explore.model.DatasetPath;
import com.dremio.dac.explore.model.DatasetResourcePath;
import com.dremio.dac.explore.model.DatasetVersionResourcePath;
import com.dremio.dac.explore.model.FileFormatUI;
-import com.dremio.dac.explore.model.InitialPreviewResponse;
import com.dremio.dac.homefiles.HomeFileSystemStoragePlugin;
import com.dremio.dac.homefiles.HomeFileTool;
import com.dremio.dac.model.common.DACException;
@@ -71,10 +69,10 @@
import com.dremio.dac.model.job.JobDataFragment;
import com.dremio.dac.model.job.JobDataWrapper;
import com.dremio.dac.model.namespace.NamespaceTree;
-import com.dremio.dac.model.sources.FormatTools;
import com.dremio.dac.model.spaces.Home;
import com.dremio.dac.model.spaces.HomeName;
import com.dremio.dac.model.spaces.HomePath;
+import com.dremio.dac.model.spaces.HomeResourcePath;
import com.dremio.dac.proto.model.dataset.VirtualDatasetUI;
import com.dremio.dac.server.BufferAllocatorFactory;
import com.dremio.dac.server.GenericErrorMessage;
@@ -85,14 +83,14 @@
import com.dremio.dac.service.datasets.DatasetVersionMutator;
import com.dremio.dac.service.errors.ClientErrorException;
import com.dremio.dac.service.errors.DatasetNotFoundException;
-import com.dremio.dac.service.errors.DatasetVersionNotFoundException;
import com.dremio.dac.service.errors.FileNotFoundException;
import com.dremio.dac.service.errors.FolderNotFoundException;
import com.dremio.dac.service.errors.HomeNotFoundException;
-import com.dremio.dac.service.errors.NewDatasetQueryException;
+import com.dremio.dac.service.errors.NotSupportedException;
import com.dremio.dac.service.errors.SourceNotFoundException;
import com.dremio.dac.util.JobRequestUtil;
import com.dremio.dac.util.ResourceUtil;
+import com.dremio.exec.catalog.CatalogFeatures;
import com.dremio.exec.catalog.DatasetCatalog;
import com.dremio.exec.server.options.ProjectOptionManager;
import com.dremio.file.File;
@@ -121,7 +119,7 @@
import com.dremio.service.namespace.space.proto.HomeConfig;
/**
- * Resource for user home.
+ * Resource for user home's space.
*/
@RestResource
@Secured
@@ -137,12 +135,10 @@ public class HomeResource extends BaseResourceWithAllocator {
private final CollaborationHelper collaborationService;
private final HomeName homeName;
private final HomePath homePath;
- private final DatasetsResource datasetsResource;
- private final HomeFileTool fileStore;
+ private final HomeFileTool homeFileTool;
private final CatalogServiceHelper catalogServiceHelper;
private final DatasetCatalog datasetCatalog;
private final ProjectOptionManager projectOptionManager;
- private final FormatTools formatTools;
@Inject
public HomeResource(
@@ -150,13 +146,11 @@ public HomeResource(
DatasetVersionMutator datasetService,
@Context SecurityContext securityContext,
JobsService jobsService,
- DatasetsResource datasetsResource,
- HomeFileTool fileStore,
+ HomeFileTool homeFileTool,
CatalogServiceHelper catalogServiceHelper,
DatasetCatalog datasetCatalog,
ProjectOptionManager projectOptionManager,
CollaborationHelper collaborationService,
- FormatTools formatTools,
@PathParam("homeName") HomeName homeName,
BufferAllocatorFactory allocatorFactory)
{
@@ -165,29 +159,18 @@ public HomeResource(
this.datasetService = datasetService;
this.securityContext = securityContext;
this.jobsService = jobsService;
- this.datasetsResource = datasetsResource;
this.collaborationService = collaborationService;
this.homeName = homeName;
this.homePath = new HomePath(homeName);
- this.fileStore = fileStore;
+ this.homeFileTool = homeFileTool;
this.catalogServiceHelper = catalogServiceHelper;
this.datasetCatalog = datasetCatalog;
this.projectOptionManager = projectOptionManager;
- this.formatTools = formatTools;
- }
-
- protected Dataset newDataset(DatasetResourcePath resourcePath,
- DatasetVersionResourcePath versionedResourcePath,
- DatasetName datasetName,
- String sql,
- VirtualDatasetUI datasetConfig,
- int jobCount) {
- return Dataset.newInstance(resourcePath, versionedResourcePath, datasetName, sql, datasetConfig, jobCount, null);
}
protected File newFile(String id, NamespacePath filePath, FileFormat fileFormat, Integer jobCount,
- boolean isStaged, boolean isHomeFile, boolean isQueryable, DatasetType datasetType) throws Exception {
- return File.newInstance(id, filePath, fileFormat, jobCount, isStaged, isHomeFile, isQueryable, null);
+ boolean isStaged, boolean isQueryable, DatasetType datasetType) throws Exception {
+ return File.newInstance(id, filePath, fileFormat, jobCount, isStaged, true, isQueryable, null);
}
protected Folder newFolder(FolderPath folderPath, FolderConfig folderConfig, NamespaceTree contents) throws NamespaceNotFoundException {
@@ -205,6 +188,8 @@ protected NamespaceTree newNamespaceTree(List children) thro
@GET
@Produces(MediaType.APPLICATION_JSON)
public Home getHome(@QueryParam("includeContents") @DefaultValue("true") boolean includeContents) throws NamespaceException, HomeNotFoundException, DatasetNotFoundException {
+ throwIfNotSupported();
+
try {
checkHomeSpaceExists(homePath);
long dsCount = namespaceService.getDatasetCount(homePath.toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount();
@@ -219,25 +204,6 @@ public Home getHome(@QueryParam("includeContents") @DefaultValue("true") boolean
}
}
- @GET
- @Path("dataset/{path: .*}")
- @Produces(MediaType.APPLICATION_JSON)
- public Dataset getDataset(@PathParam("path") String path)
- throws NamespaceException, FileNotFoundException, DatasetNotFoundException {
- DatasetPath datasetPath = DatasetPath.fromURLPath(homeName, path);
- final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey());
- final VirtualDatasetUI vds = datasetService.get(datasetPath, datasetConfig.getVirtualDataset().getVersion());
- return newDataset(
- new DatasetResourcePath(datasetPath),
- new DatasetVersionResourcePath(datasetPath, vds.getVersion()),
- datasetPath.getDataset(),
- vds.getSql(),
- vds,
- datasetService.getJobsCount(datasetPath.toNamespaceKey())
- );
- }
-
-
@POST
@Path("upload_start/{path: .*}")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@@ -247,6 +213,8 @@ public File uploadFile(@PathParam("path") String path,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("fileName") FileName fileName,
@QueryParam("extension") String extension) throws Exception {
+ throwIfNotSupported();
+
checkFileUploadPermissions();
// add some validation
@@ -264,7 +232,7 @@ public File uploadFile(@PathParam("path") String path,
final FileConfig config = new FileConfig();
try {
// upload file to staging area
- final com.dremio.io.file.Path stagingLocation = fileStore.stageFile(filePath, extension, fileInputStream);
+ final com.dremio.io.file.Path stagingLocation = homeFileTool.stageFile(filePath, extension, fileInputStream);
config.setLocation(stagingLocation.toString());
config.setName(filePath.getLeaf().getName());
config.setCtime(System.currentTimeMillis());
@@ -274,19 +242,20 @@ public File uploadFile(@PathParam("path") String path,
} catch (IOException ioe) {
throw new DACException("Error writing to file at " + filePath, ioe);
}
- final File file = newFile(filePath.toUrlPath(),
- filePath, FileFormat.getForFile(config), 0, true, true, true,
+ return newFile(filePath.toUrlPath(),
+ filePath, FileFormat.getForFile(config), 0, true, true,
DatasetType.PHYSICAL_DATASET_HOME_FILE
);
- return file;
}
@POST
@Path("upload_cancel/{path: .*}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
- public void cancelUploadFile(FileFormat fileFormat, @PathParam("path") String path) throws IOException, DACException {
- fileStore.deleteFile(fileFormat.getLocation());
+ public void cancelUploadFile(FileFormat fileFormat, @PathParam("path") String path) throws IOException {
+ throwIfNotSupported();
+
+ homeFileTool.deleteFile(fileFormat.getLocation());
}
@POST
@@ -294,6 +263,8 @@ public void cancelUploadFile(FileFormat fileFormat, @PathParam("path") String pa
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public File finishUploadFile(FileFormat fileFormat, @PathParam("path") String path) throws Exception {
+ throwIfNotSupported();
+
checkFileUploadPermissions();
final FilePath filePath = FilePath.fromURLPath(homeName, path);
@@ -303,8 +274,7 @@ public File finishUploadFile(FileFormat fileFormat, @PathParam("path") String pa
.build(logger);
}
final String fileName = filePath.getFileName().getName();
- final com.dremio.io.file.Path finalLocation = fileStore.saveFile(fileFormat.getLocation(), filePath, fileFormat.getFileType());
- // save new name and location, full path
+ final com.dremio.io.file.Path finalLocation = homeFileTool.saveFile(fileFormat.getLocation(), filePath, fileFormat.getFileType());
fileFormat.setLocation(finalLocation.toString());
fileFormat.setName(fileName);
fileFormat.setFullPath(filePath.toPathList());
@@ -318,7 +288,7 @@ public File finishUploadFile(FileFormat fileFormat, @PathParam("path") String pa
filePath,
fileFormat,
datasetService.getJobsCount(filePath.toNamespaceKey()),
- false, true, false,
+ false, false,
DatasetType.PHYSICAL_DATASET_HOME_FILE
);
}
@@ -338,8 +308,9 @@ protected void checkFileUploadPermissions() {
@Consumes(MediaType.APPLICATION_JSON)
public JobDataFragment previewFormatSettingsStaging(FileFormat fileFormat, @PathParam("path") String path)
throws FileNotFoundException, SourceNotFoundException {
+ throwIfNotSupported();
- if (!fileStore.validStagingLocation(com.dremio.io.file.Path.of(fileFormat.getLocation()))) {
+ if (!homeFileTool.validStagingLocation(com.dremio.io.file.Path.of(fileFormat.getLocation()))) {
throw new IllegalArgumentException("Invalid staging location provided");
}
@@ -366,6 +337,7 @@ public JobDataFragment previewFormatSettingsStaging(FileFormat fileFormat, @Path
@Consumes(MediaType.APPLICATION_JSON)
public JobDataFragment previewFormatSettings(FileFormat fileFormat, @PathParam("path") String path)
throws FileNotFoundException, SourceNotFoundException {
+ throwIfNotSupported();
FilePath filePath = FilePath.fromURLPath(homeName, path);
logger.debug("filePath: " + filePath.toPathString());
@@ -387,20 +359,21 @@ public JobDataFragment previewFormatSettings(FileFormat fileFormat, @PathParam("
@Produces(MediaType.APPLICATION_JSON)
public File getFile(@PathParam("path") String path)
throws Exception {
+ throwIfNotSupported();
+
FilePath filePath = FilePath.fromURLPath(homeName, path);
try {
final DatasetConfig datasetConfig = namespaceService.getDataset(filePath.toNamespaceKey());
final FileConfig fileConfig = toFileConfig(datasetConfig);
- final File file = newFile(
+ return newFile(
datasetConfig.getId().getId(),
filePath,
FileFormat.getForFile(fileConfig),
datasetService.getJobsCount(filePath.toNamespaceKey()),
- false, true,
+ false,
fileConfig.getType() != FileType.UNKNOWN,
DatasetType.PHYSICAL_DATASET_HOME_FILE
);
- return file;
} catch (NamespaceNotFoundException nfe) {
throw new FileNotFoundException(filePath, nfe);
}
@@ -410,11 +383,13 @@ public File getFile(@PathParam("path") String path)
@Path("file/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
public void deleteFile(@PathParam("path") String path, @QueryParam("version") String version) throws NamespaceException, DACException {
- FilePath filePath = FilePath.fromURLPath(homeName, path);
+ throwIfNotSupported();
+
if (version == null) {
throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG);
}
+ FilePath filePath = FilePath.fromURLPath(homeName, path);
try {
catalogServiceHelper.deleteHomeDataset(namespaceService.getDataset(filePath.toNamespaceKey()), version, filePath.toNamespaceKey().getPathComponents());
} catch (IOException ioe) {
@@ -424,29 +399,13 @@ public void deleteFile(@PathParam("path") String path, @QueryParam("version") St
}
}
- @POST
- @Path("file_rename/{path: .*}")
- @Produces(MediaType.APPLICATION_JSON)
- public File renameFile(@PathParam("path") String path, @QueryParam("renameTo") FileName renameTo) throws Exception {
- FilePath filePath = FilePath.fromURLPath(homeName, path);
- final FilePath newFilePath = filePath.rename(renameTo.getName());
- final DatasetConfig datasetConfig = namespaceService.renameDataset(filePath.toNamespaceKey(), newFilePath.toNamespaceKey());
- final FileConfig fileConfig = toFileConfig(datasetConfig);
- return newFile(
- datasetConfig.getId().getId(),
- newFilePath,
- FileFormat.getForFile(fileConfig),
- datasetService.getJobsCount(filePath.toNamespaceKey()),
- false, true, false,
- DatasetType.PHYSICAL_DATASET_HOME_FILE
- );
- }
-
@GET
@Path("file_format/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
public FileFormatUI getFormatSettings(@PathParam("path") String path)
throws FileNotFoundException, HomeNotFoundException, NamespaceException {
+ throwIfNotSupported();
+
FilePath filePath = FilePath.fromURLPath(homeName, path);
final FileConfig fileConfig = toFileConfig(namespaceService.getDataset(filePath.toNamespaceKey()));
return new FileFormatUI(FileFormat.getForFile(fileConfig), filePath);
@@ -458,6 +417,8 @@ public FileFormatUI getFormatSettings(@PathParam("path") String path)
@Consumes(MediaType.APPLICATION_JSON)
public FileFormatUI saveFormatSettings(FileFormat fileFormat, @PathParam("path") String path)
throws FileNotFoundException, HomeNotFoundException, NamespaceException {
+ throwIfNotSupported();
+
FilePath filePath = FilePath.fromURLPath(homeName, path);
// merge file configs
final DatasetConfig existingDSConfig = namespaceService.getDataset(filePath.toNamespaceKey());
@@ -478,6 +439,8 @@ public FileFormatUI saveFormatSettings(FileFormat fileFormat, @PathParam("path")
@Path("/folder/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
public Folder getFolder(@PathParam("path") String path, @QueryParam("includeContents") @DefaultValue("true") boolean includeContents) throws Exception {
+ throwIfNotSupported();
+
FolderPath folderPath = FolderPath.fromURLPath(homeName, path);
try {
final FolderConfig folderConfig = namespaceService.getFolder(folderPath.toNamespaceKey());
@@ -494,6 +457,8 @@ public Folder getFolder(@PathParam("path") String path, @QueryParam("includeCont
@Path("/folder/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
public void deleteFolder(@PathParam("path") String path, @QueryParam("version") String version) throws NamespaceException, FolderNotFoundException {
+ throwIfNotSupported();
+
FolderPath folderPath = FolderPath.fromURLPath(homeName, path);
if (version == null) {
throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG);
@@ -513,6 +478,8 @@ public void deleteFolder(@PathParam("path") String path, @QueryParam("version")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Folder createFolder(FolderName name, @PathParam("path") String path) throws Exception {
+ throwIfNotSupported();
+
String fullPath = PathUtils.toFSPathString(Arrays.asList(path, name.toString()));
FolderPath folderPath = FolderPath.fromURLPath(homeName, fullPath);
@@ -528,17 +495,41 @@ public Folder createFolder(FolderName name, @PathParam("path") String path) thro
return newFolder(folderPath, folderConfig, null);
}
- @POST
- @Path("/new_untitled_from_file/{path: .*}")
+ @GET
+ @Path("dataset/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
- @Consumes(MediaType.APPLICATION_JSON)
- public InitialPreviewResponse createUntitledFromHomeFile(
- @PathParam("path") String path,
- @QueryParam("limit") Integer limit)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- return datasetsResource.createUntitledFromHomeFile(homeName, path, limit);
+ public Dataset getDataset(@PathParam("path") String path)
+ throws NamespaceException, FileNotFoundException, DatasetNotFoundException {
+ DatasetPath datasetPath = DatasetPath.fromURLPath(homeName, path);
+ final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey());
+ final VirtualDatasetUI vds = datasetService.get(datasetPath, datasetConfig.getVirtualDataset().getVersion());
+ return newDataset(
+ new DatasetResourcePath(datasetPath),
+ new DatasetVersionResourcePath(datasetPath, vds.getVersion()),
+ datasetPath.getDataset(),
+ vds.getSql(),
+ vds,
+ datasetService.getJobsCount(datasetPath.toNamespaceKey())
+ );
}
+ protected Dataset newDataset(DatasetResourcePath resourcePath,
+ DatasetVersionResourcePath versionedResourcePath,
+ DatasetName datasetName,
+ String sql,
+ VirtualDatasetUI datasetConfig,
+ int jobCount) {
+ return Dataset.newInstance(resourcePath, versionedResourcePath, datasetName, sql, datasetConfig, jobCount, null);
+ }
+
+
protected void checkHomeSpaceExists(HomePath homePath) {
}
+
+ private void throwIfNotSupported() throws NotSupportedException {
+ CatalogFeatures features = CatalogFeatures.get(projectOptionManager);
+ if (!features.isFeatureEnabled(CatalogFeatures.Feature.HOME)) {
+ throw new NotSupportedException(new HomeResourcePath(homePath.getHomeName()));
+ }
+ }
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java
index 17ea891446..2ed48255d5 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java
@@ -138,7 +138,7 @@ public NotificationResponse cancel() throws JobResourceNotFoundException {
.build());
return new NotificationResponse(ResponseType.OK, "Job cancellation requested");
} catch (JobNotFoundException e) {
- if (e.getErrorType() == JobNotFoundException.causeOfFailure.CANCEL_FAILED) {
+ if (e.getErrorType() == JobNotFoundException.CauseOfFailure.CANCEL_FAILED) {
throw new ConflictException(String.format("Job %s may have completed and cannot be canceled.", jobId.getId()));
} else {
throw JobResourceNotFoundException.fromJobNotFoundException(e);
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/NessieSourceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/NessieSourceResource.java
new file mode 100644
index 0000000000..e81e31c713
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/NessieSourceResource.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.resource;
+
+import static com.dremio.exec.ExecConstants.NESSIE_SOURCE_API;
+import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+
+import javax.annotation.security.RolesAllowed;
+import javax.inject.Inject;
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+
+import org.projectnessie.client.api.NessieApi;
+import org.projectnessie.client.api.NessieApiV2;
+
+import com.dremio.common.exceptions.UserException;
+import com.dremio.dac.annotations.Secured;
+import com.dremio.dac.service.errors.NessieSourceNotValidException;
+import com.dremio.dac.service.errors.NessieSourceResourceException;
+import com.dremio.dac.service.errors.SourceNotFoundException;
+import com.dremio.exec.store.CatalogService;
+import com.dremio.exec.store.NessieApiProvider;
+import com.dremio.options.OptionManager;
+import com.dremio.options.Options;
+import com.dremio.services.nessie.proxy.ProxyV2TreeResource;
+import com.google.common.base.Preconditions;
+
+/**
+ * Resource for providing APIs for Nessie As a Source.
+ */
+@Secured
+@RolesAllowed({"admin", "user"})
+@Path("/v2/source/{sourceName}/trees")
+@Options
+public class NessieSourceResource {
+
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NessieSourceResource.class);
+ private CatalogService catalogService;
+ private OptionManager optionManager;
+
+ @Inject
+ public NessieSourceResource(
+ CatalogService catalogService,
+ OptionManager optionManager
+ ) {
+ this.catalogService = catalogService;
+ this.optionManager = optionManager;
+ };
+
+ @Path("/")
+ public ProxyV2TreeResource handle(@PathParam("sourceName") String sourceName) {
+ if (optionManager.getOption(NESSIE_SOURCE_API)) {
+ NessieApiProvider provider;
+ try {
+ provider = catalogService.getSource(sourceName);
+ } catch (UserException namespaceNotFoundException) {
+ logger.error(String.format("Cannot find source: %s", sourceName));
+ throw new SourceNotFoundException(sourceName, namespaceNotFoundException);
+ } catch (ClassCastException classCastException) {
+ logger.error(String.format("%s is not versioned source", sourceName));
+ throw new NessieSourceNotValidException(classCastException, String.format("%s is not versioned source", sourceName));
+ } catch (Exception exception) {
+ logger.error("Unexpected Error");
+ throw new NessieSourceResourceException(exception, "Unexpected Error", BAD_REQUEST);
+ }
+ NessieApi nessieApi = provider.getNessieApi();
+ Preconditions.checkArgument(nessieApi instanceof NessieApiV2, "nessieApi provided by NessieApiProvider is not V2. V2 is required.");
+ return getTreeResource(nessieApi);
+ } else {
+ logger.error(String.format("Using nessie-as-a-source is disabled. The support key '%s' must be enabled.", NESSIE_SOURCE_API.getOptionName()));
+ throw new NotFoundException(String.format("Using nessie-as-a-source is disabled. The support key '%s' must be enabled.", NESSIE_SOURCE_API.getOptionName()));
+ }
+ }
+
+ protected ProxyV2TreeResource getTreeResource(NessieApi nessieApi) {
+ return new V2TreeResource((NessieApiV2) nessieApi);
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/NessieTestSourceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/NessieTestSourceResource.java
new file mode 100644
index 0000000000..bab5fd3f2f
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/NessieTestSourceResource.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.resource;
+
+import javax.inject.Inject;
+
+import org.projectnessie.client.api.NessieApi;
+import org.projectnessie.client.api.NessieApiV2;
+
+import com.dremio.exec.store.CatalogService;
+import com.dremio.options.OptionManager;
+import com.dremio.services.nessie.proxy.ProxyV2TreeResource;
+
+/**
+ * Resource for providing APIs for Nessie as a Source for Test.
+ */
+
+public class NessieTestSourceResource extends NessieSourceResource {
+
+ @Inject
+ public NessieTestSourceResource(CatalogService catalogService, OptionManager optionManager) {
+ super(catalogService, optionManager);
+ }
+
+ @Override
+ protected ProxyV2TreeResource getTreeResource(NessieApi nessieApi) {
+ return new ProxyV2TreeResource((NessieApiV2) nessieApi);
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/PutSpaceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/PutSpaceResource.java
deleted file mode 100644
index 5dc8580682..0000000000
--- a/dac/backend/src/main/java/com/dremio/dac/resource/PutSpaceResource.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (C) 2017-2019 Dremio Corporation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.dremio.dac.resource;
-
-import javax.annotation.security.RolesAllowed;
-import javax.inject.Inject;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-
-import com.dremio.dac.annotations.RestResource;
-import com.dremio.dac.annotations.Secured;
-import com.dremio.dac.model.spaces.Space;
-import com.dremio.dac.model.spaces.SpaceName;
-import com.dremio.dac.model.spaces.SpacePath;
-import com.dremio.service.namespace.BoundedDatasetCount;
-import com.dremio.service.namespace.NamespaceException;
-import com.dremio.service.namespace.NamespaceService;
-import com.dremio.service.namespace.proto.EntityId;
-import com.dremio.service.namespace.space.proto.SpaceConfig;
-import com.dremio.service.users.UserNotFoundException;
-
-/**
- * Rest resource for spaces.
- */
-@RestResource
-@Secured
-@RolesAllowed({"admin", "user"})
-@Path("/space/{spaceName}")
-public class PutSpaceResource {
- private final NamespaceService namespaceService;
- private final SpacePath spacePath;
-
- @Inject
- public PutSpaceResource(
- NamespaceService namespaceService,
- @PathParam("spaceName") SpaceName spaceName) {
- this.namespaceService = namespaceService;
- this.spacePath = new SpacePath(spaceName);
- }
-
- public static SpaceConfig addOrUpdateSpace(NamespaceService service,
- SpacePath spacePath, Space space)
- throws NamespaceException, UserNotFoundException {
-
- SpaceConfig spaceConfig = new SpaceConfig()
- .setId(space.getId() != null ? new EntityId(space.getId()) : null)
- .setName(space.getName())
- .setDescription(space.getDescription())
- .setTag(space.getVersion());
-
- service.addOrUpdateSpace(spacePath.toNamespaceKey(), spaceConfig);
- return service.getSpace(spacePath.toNamespaceKey());
- }
-
- @PUT
- @Produces(MediaType.APPLICATION_JSON)
- public Space putSpace(Space space) throws NamespaceException, UserNotFoundException {
- SpaceConfig spaceConfig = PutSpaceResource.addOrUpdateSpace(namespaceService, spacePath, space);
- return Space.newInstance(spaceConfig, null, namespaceService.getDatasetCount(spacePath.toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount());
- }
-}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java
index 786eac8d89..49cfd85c03 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java
@@ -272,5 +272,4 @@ public List getSources() throws NamespaceException, Unsuppor
}
return resources;
}
-
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java
index 6751fe9d5a..8e3cdd093d 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java
@@ -15,12 +15,6 @@
*/
package com.dremio.dac.resource;
-import static com.dremio.common.utils.SqlUtils.quoteIdentifier;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
import javax.annotation.security.RolesAllowed;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
@@ -31,27 +25,21 @@
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.SecurityContext;
-import org.apache.calcite.sql.advise.SqlAdvisor;
-import org.apache.calcite.sql.validate.SqlMoniker;
-import org.apache.calcite.sql.validate.SqlMonikerType;
-
import com.dremio.dac.annotations.RestResource;
import com.dremio.dac.annotations.Secured;
-import com.dremio.dac.explore.model.AnalyzeRequest;
import com.dremio.dac.explore.model.CreateFromSQL;
-import com.dremio.dac.explore.model.SuggestionResponse;
-import com.dremio.dac.explore.model.ValidationResponse;
import com.dremio.dac.model.job.JobDataFragment;
import com.dremio.dac.model.job.JobDataWrapper;
-import com.dremio.dac.model.job.QueryError;
import com.dremio.dac.server.BufferAllocatorFactory;
import com.dremio.dac.service.autocomplete.AutocompleteEngineProxy;
+import com.dremio.dac.service.autocomplete.AutocompleteV2Proxy;
+import com.dremio.dac.service.catalog.CatalogServiceHelper;
import com.dremio.dac.util.JobRequestUtil;
-import com.dremio.exec.planner.sql.SQLAnalyzer;
-import com.dremio.exec.planner.sql.SQLAnalyzerFactory;
import com.dremio.exec.server.SabotContext;
import com.dremio.exec.server.options.ProjectOptionManager;
import com.dremio.service.autocomplete.AutocompleteRequestImplementation;
+import com.dremio.service.autocomplete.AutocompleteV2Request;
+import com.dremio.service.autocomplete.AutocompleteV2Response;
import com.dremio.service.autocomplete.completions.Completions;
import com.dremio.service.job.QueryType;
import com.dremio.service.job.SqlQuery;
@@ -60,11 +48,10 @@
import com.dremio.service.jobs.CompletionListener;
import com.dremio.service.jobs.JobsService;
import com.dremio.service.namespace.NamespaceException;
-import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
/**
- * run external sql
+ * The REST resource that serves a query API, SQL autocomplete, & a functions list.
*/
@RestResource
@Secured
@@ -76,6 +63,7 @@ public class SQLResource extends BaseResourceWithAllocator {
private final SabotContext sabotContext;
private final ProjectOptionManager projectOptionManager;
private final FunctionsListService functionsListService;
+ private final CatalogServiceHelper catalogServiceHelper;
@Inject
public SQLResource(
@@ -83,7 +71,8 @@ public SQLResource(
JobsService jobs,
SecurityContext securityContext,
BufferAllocatorFactory allocatorFactory,
- ProjectOptionManager projectOptionManager) {
+ ProjectOptionManager projectOptionManager,
+ CatalogServiceHelper catalogServiceHelper) {
super(allocatorFactory);
this.jobs = jobs;
this.securityContext = securityContext;
@@ -93,11 +82,13 @@ public SQLResource(
sabotContext,
securityContext,
projectOptionManager);
+ this.catalogServiceHelper = catalogServiceHelper;
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
+ @Deprecated
public JobDataFragment query(CreateFromSQL sql) {
final SqlQuery query = JobRequestUtil.createSqlQuery(sql.getSql(), sql.getContext(), securityContext.getUserPrincipal().getName());
// Pagination is not supported in this API, so we need to truncate the results to 500 records
@@ -108,46 +99,6 @@ public JobDataFragment query(CreateFromSQL sql) {
.truncate(getOrCreateAllocator("query"), 500);
}
- @POST
- @Path("/analyze/suggest")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public SuggestionResponse suggestSQL(AnalyzeRequest analyzeRequest) {
- final String sql = analyzeRequest.getSql();
- final List context = analyzeRequest.getContext();
- final int cursorPosition = analyzeRequest.getCursorPosition();
-
- // Setup dependencies and execute suggestion acquisition
- SQLAnalyzer SQLAnalyzer =
- SQLAnalyzerFactory.createSQLAnalyzer(
- securityContext.getUserPrincipal().getName(), sabotContext, context, true, projectOptionManager);
-
- List sqlEditorHints = SQLAnalyzer.suggest(sql, cursorPosition);
-
- // Build response object and return
- return buildSuggestionResponse(sqlEditorHints);
- }
-
- @POST
- @Path("/analyze/validate")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public ValidationResponse validateSQL(AnalyzeRequest analyzeRequest) {
-
- final String sql = analyzeRequest.getSql();
- final List context = analyzeRequest.getContext();
-
- // Setup dependencies and execute validation
- SQLAnalyzer SQLAnalyzer =
- SQLAnalyzerFactory.createSQLAnalyzer(
- securityContext.getUserPrincipal().getName(), sabotContext, context, false, projectOptionManager);
-
- List validationErrors = SQLAnalyzer.validate(sql);
-
- // Build response object and return
- return buildValidationResponse(validationErrors);
- }
-
@POST
@Path("/autocomplete")
@Consumes(MediaType.APPLICATION_JSON)
@@ -164,64 +115,17 @@ public Completions getCompletions(AutocompleteRequestImplementation request) thr
request.getCursor());
}
- /**
- * Builds the response object for query suggestions.
- *
- * @param suggestionList The suggestion list returned from the SqlAdvisor.
- * @return The built SuggestionResponse object or null if there are no suggestions.
- */
- public SuggestionResponse buildSuggestionResponse(List suggestionList) {
-
- // Return empty response in REST request
- if (suggestionList == null || suggestionList.isEmpty()) {
- return null;
- }
-
- // Create and populate suggestion response list
- List suggestions = new ArrayList<>();
- for (SqlMoniker hint : suggestionList) {
-
- // Quote the identifiers if they are not keywords or functions,
- // and are required to be quoted.
- List qualifiedNames = hint.getFullyQualifiedNames();
- if ((hint.getType() != SqlMonikerType.KEYWORD) && (hint.getType() != SqlMonikerType.FUNCTION)) {
- qualifiedNames = qualifiedNames.stream().map(name -> quoteIdentifier(name)).collect(Collectors.toList());
- }
-
- suggestions.add(
- new SuggestionResponse.Suggestion(Joiner.on(".").join(qualifiedNames), hint.getType().name()));
- }
-
- SuggestionResponse response = new SuggestionResponse(suggestions);
- return response;
- }
-
- /**
- * Builds the response object for query validation.
- *
- * @param errorList The list of query errors returned from the SqlAdvisor.
- * @return The built ValidationResponse object or null if there are no available validation errors.
- */
- protected ValidationResponse buildValidationResponse(List errorList) {
-
- // Return empty response in REST request
- if (errorList == null || errorList.isEmpty()) {
- return null;
- }
-
- // Create and populate error response list
- List sqlErrors = new ArrayList<>();
- for (SqlAdvisor.ValidateErrorInfo error : errorList) {
- sqlErrors.add(
- new QueryError(error.getMessage(),
- new QueryError.Range(error.getStartLineNum(),
- error.getStartColumnNum(),
- error.getEndLineNum() + 1,
- error.getEndColumnNum() + 1)));
- }
+ @POST
+ @Path("/autocomplete/v2")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public AutocompleteV2Response getSuggestions(AutocompleteV2Request request) {
+ Preconditions.checkNotNull(request);
- ValidationResponse response = new ValidationResponse(sqlErrors);
- return response;
+ return AutocompleteV2Proxy.getSuggestions(
+ catalogServiceHelper,
+ request
+ );
}
@GET
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java
index 1b8a118ced..30cf1e4776 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java
@@ -14,6 +14,7 @@
* limitations under the License.
*/
package com.dremio.dac.resource;
+
import java.io.IOException;
import java.security.AccessControlException;
import java.util.Arrays;
@@ -39,17 +40,14 @@
import com.dremio.common.utils.PathUtils;
import com.dremio.dac.annotations.RestResource;
import com.dremio.dac.annotations.Secured;
-import com.dremio.dac.explore.DatasetsResource;
import com.dremio.dac.explore.QueryExecutor;
import com.dremio.dac.explore.model.FileFormatUI;
-import com.dremio.dac.explore.model.InitialPreviewResponse;
import com.dremio.dac.model.common.NamespacePath;
import com.dremio.dac.model.folder.Folder;
import com.dremio.dac.model.folder.FolderName;
import com.dremio.dac.model.folder.SourceFolderPath;
import com.dremio.dac.model.job.JobDataFragment;
import com.dremio.dac.model.sources.FormatTools;
-import com.dremio.dac.model.sources.PhysicalDataset;
import com.dremio.dac.model.sources.PhysicalDatasetPath;
import com.dremio.dac.model.sources.SourceName;
import com.dremio.dac.model.sources.SourcePath;
@@ -57,9 +55,6 @@
import com.dremio.dac.server.BufferAllocatorFactory;
import com.dremio.dac.server.GenericErrorMessage;
import com.dremio.dac.service.errors.ClientErrorException;
-import com.dremio.dac.service.errors.DatasetNotFoundException;
-import com.dremio.dac.service.errors.DatasetVersionNotFoundException;
-import com.dremio.dac.service.errors.NewDatasetQueryException;
import com.dremio.dac.service.errors.PhysicalDatasetNotFoundException;
import com.dremio.dac.service.errors.SourceFileNotFoundException;
import com.dremio.dac.service.errors.SourceFolderNotFoundException;
@@ -82,6 +77,7 @@
import com.dremio.service.namespace.dataset.proto.AccelerationSettings;
import com.dremio.service.namespace.dataset.proto.DatasetType;
import com.dremio.service.namespace.file.FileFormat;
+import com.dremio.service.namespace.file.proto.UnknownFileConfig;
import com.dremio.service.namespace.physicaldataset.proto.PhysicalDatasetConfig;
import com.dremio.service.namespace.source.proto.SourceConfig;
import com.dremio.service.reflection.ReflectionAdministrationService;
@@ -102,8 +98,7 @@ public class SourceResource extends BaseResourceWithAllocator {
private final SourceName sourceName;
private final SecurityContext securityContext;
private final SourcePath sourcePath;
- private final DatasetsResource datasetsResource;
- private final ConnectionReader cReader;
+ private final ConnectionReader connectionReader;
private final SourceCatalog sourceCatalog;
private final FormatTools formatTools;
private final ContextService context;
@@ -116,8 +111,7 @@ public SourceResource(
@PathParam("sourceName") SourceName sourceName,
QueryExecutor executor,
SecurityContext securityContext,
- DatasetsResource datasetsResource,
- ConnectionReader cReader,
+ ConnectionReader connectionReader,
SourceCatalog sourceCatalog,
FormatTools formatTools,
ContextService context,
@@ -129,17 +123,16 @@ public SourceResource(
this.sourceService = sourceService;
this.sourceName = sourceName;
this.securityContext = securityContext;
- this.datasetsResource = datasetsResource;
this.sourcePath = new SourcePath(sourceName);
this.executor = executor;
- this.cReader = cReader;
+ this.connectionReader = connectionReader;
this.sourceCatalog = sourceCatalog;
this.formatTools = formatTools;
this.context = context;
}
protected SourceUI newSource(SourceConfig config) throws Exception {
- return SourceUI.get(config, cReader);
+ return SourceUI.get(config, connectionReader);
}
@GET
@@ -150,15 +143,15 @@ public SourceUI getSource(
@QueryParam("refValue") String refValue)
throws Exception {
try {
- final SourceConfig config = namespaceService.getSource(sourcePath.toNamespaceKey());
+ final SourceConfig sourceConfig = namespaceService.getSource(sourcePath.toNamespaceKey());
final SourceState sourceState = sourceService.getSourceState(sourcePath.getSourceName().getName());
if (sourceState == null) {
throw new SourceNotFoundException(sourcePath.getSourceName().getName());
}
- final BoundedDatasetCount datasetCount = namespaceService.getDatasetCount(new NamespaceKey(config.getName()),
+ final BoundedDatasetCount datasetCount = namespaceService.getDatasetCount(new NamespaceKey(sourceConfig.getName()),
BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH);
- final SourceUI source = newSource(config)
+ final SourceUI source = newSource(sourceConfig)
.setNumberOfDatasets(datasetCount.getCount());
source.setDatasetCountBounded(datasetCount.isCountBound() || datasetCount.isTimeBound());
@@ -173,7 +166,7 @@ public SourceUI getSource(
source.setContents(
sourceService.listSource(
sourcePath.getSourceName(),
- namespaceService.getSource(sourcePath.toNamespaceKey()),
+ sourceConfig,
securityContext.getUserPrincipal().getName(),
refType,
refValue));
@@ -256,16 +249,6 @@ public Folder createFolder(
refValue);
}
- @GET
- @Path("/dataset/{path: .*}")
- @Produces(MediaType.APPLICATION_JSON)
- public PhysicalDataset getPhysicalDataset(@PathParam("path") String path)
- throws SourceNotFoundException, NamespaceException {
- sourceService.checkSourceExists(sourceName);
- PhysicalDatasetPath datasetPath = PhysicalDatasetPath.fromURLPath(sourceName, path);
- return sourceService.getPhysicalDataset(sourceName, datasetPath);
- }
-
private boolean useFastPreview() {
return context.get().getOptionManager().getOption(FormatTools.FAST_PREVIEW);
}
@@ -276,13 +259,13 @@ private boolean useFastPreview() {
public File getFile(@PathParam("path") String path)
throws SourceNotFoundException, NamespaceException, PhysicalDatasetNotFoundException {
if (useFastPreview()) {
- return sourceService.getFileDataset(sourceName, asFilePath(path), null);
+ return sourceService.getFileDataset(asFilePath(path), null);
}
sourceService.checkSourceExists(sourceName);
final SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path);
- return sourceService.getFileDataset(sourceName, filePath, null);
+ return sourceService.getFileDataset(filePath, null);
}
/**
@@ -318,7 +301,7 @@ public FileFormatUI getFileFormatSettings(@PathParam("path") String path)
SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path);
FileFormat fileFormat;
try {
- final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(sourceName, filePath);
+ final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(filePath);
fileFormat = FileFormat.getForFile(physicalDatasetConfig.getFormatSettings());
fileFormat.setVersion(physicalDatasetConfig.getTag());
} catch (PhysicalDatasetNotFoundException nfe) {
@@ -334,6 +317,7 @@ public FileFormatUI getFileFormatSettings(@PathParam("path") String path)
@Consumes(MediaType.APPLICATION_JSON)
public FileFormatUI saveFormatSettings(FileFormat fileFormat, @PathParam("path") String path)
throws NamespaceException, SourceNotFoundException {
+ checkUnknownFileConfig(fileFormat);
SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path);
sourceService.checkSourceExists(filePath.getSourceName());
fileFormat.setFullPath(filePath.toPathList());
@@ -391,7 +375,7 @@ public void deleteFileFormat(@PathParam("path") String path,
}
try {
- sourceService.deletePhysicalDataset(sourceName, new PhysicalDatasetPath(filePath), version, CatalogUtil.getDeleteCallback(context.get().getOrphanageFactory().get()));
+ sourceService.deletePhysicalDataset(sourceName, new PhysicalDatasetPath(filePath), version, CatalogUtil.getDeleteCallback(context.get().getOrphanageFactory().get()));
} catch (ConcurrentModificationException e) {
throw ResourceUtil.correctBadVersionErrorMessage(e, "file format", path);
}
@@ -413,7 +397,7 @@ public FileFormatUI getFolderFormat(@PathParam("path") String path)
FileFormat fileFormat;
try {
- final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(sourceName, folderPath);
+ final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(folderPath);
fileFormat = FileFormat.getForFolder(physicalDatasetConfig.getFormatSettings());
fileFormat.setVersion(physicalDatasetConfig.getTag());
} catch (PhysicalDatasetNotFoundException nfe) {
@@ -428,6 +412,7 @@ public FileFormatUI getFolderFormat(@PathParam("path") String path)
@Consumes(MediaType.APPLICATION_JSON)
public FileFormatUI saveFolderFormat(FileFormat fileFormat, @PathParam("path") String path)
throws NamespaceException, SourceNotFoundException {
+ checkUnknownFileConfig(fileFormat);
SourceFolderPath folderPath = SourceFolderPath.fromURLPath(sourceName, path);
sourceService.checkSourceExists(folderPath.getSourceName());
fileFormat.setFullPath(folderPath.toPathList());
@@ -460,37 +445,15 @@ public void deleteFolderFormat(@PathParam("path") String path,
}
}
- @POST
- @Path("new_untitled_from_file/{path: .*}")
- @Produces(MediaType.APPLICATION_JSON)
- @Consumes(MediaType.APPLICATION_JSON)
- public InitialPreviewResponse createUntitledFromSourceFile(
- @PathParam("path") String path,
- @QueryParam("limit") Integer limit)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- return datasetsResource.createUntitledFromSourceFile(sourceName, path, limit);
- }
-
- @POST
- @Path("new_untitled_from_folder/{path: .*}")
- @Produces(MediaType.APPLICATION_JSON)
- @Consumes(MediaType.APPLICATION_JSON)
- public InitialPreviewResponse createUntitledFromSourceFolder(
- @PathParam("path") String path,
- @QueryParam("limit") Integer limit)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- return datasetsResource.createUntitledFromSourceFolder(sourceName, path, limit);
- }
-
- @POST
- @Path("new_untitled_from_physical_dataset/{path: .*}")
- @Produces(MediaType.APPLICATION_JSON)
- @Consumes(MediaType.APPLICATION_JSON)
- public InitialPreviewResponse createUntitledFromPhysicalDataset(
- @PathParam("path") String path,
- @QueryParam("limit") Integer limit)
- throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException {
- return datasetsResource.createUntitledFromPhysicalDataset(sourceName, path, limit);
+ /**
+ * checks if format was set to UNKNOWN. If so, an error message is sent to the user
+ * @param fileFormat: format configuration set by dropdown table when "save" was pressed
+ * @throws ClientErrorException
+ */
+ private void checkUnknownFileConfig(FileFormat fileFormat) throws ClientErrorException {
+ if (fileFormat instanceof UnknownFileConfig) {
+ throw new ClientErrorException(GenericErrorMessage.UNKNOWN_FORMAT_MSG);
+ }
}
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java
index 5239af1987..d8952c67a2 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java
@@ -28,9 +28,6 @@
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import com.dremio.dac.annotations.RestResource;
import com.dremio.dac.annotations.Secured;
import com.dremio.dac.model.sources.SourceUI;
@@ -51,8 +48,6 @@
@Path("/sources")
@Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON)
public class SourcesResource {
- private static final Logger logger = LoggerFactory.getLogger(SourcesResource.class);
-
private final NamespaceService namespaceService;
private final SourceService sourceService;
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java
index 35eeda4090..2dc68a4378 100644
--- a/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java
@@ -18,15 +18,12 @@
import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.SPACE;
import java.security.AccessControlException;
-import java.util.ConcurrentModificationException;
import java.util.List;
import javax.annotation.security.RolesAllowed;
import javax.inject.Inject;
-import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
-import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
@@ -43,19 +40,20 @@
import com.dremio.dac.explore.model.DatasetPath;
import com.dremio.dac.explore.model.DatasetResourcePath;
import com.dremio.dac.explore.model.DatasetVersionResourcePath;
+import com.dremio.dac.model.common.ResourcePath;
import com.dremio.dac.model.namespace.NamespaceTree;
import com.dremio.dac.model.spaces.Space;
import com.dremio.dac.model.spaces.SpaceName;
import com.dremio.dac.model.spaces.SpacePath;
import com.dremio.dac.proto.model.dataset.VirtualDatasetUI;
-import com.dremio.dac.server.GenericErrorMessage;
import com.dremio.dac.service.collaboration.CollaborationHelper;
import com.dremio.dac.service.datasets.DatasetVersionMutator;
-import com.dremio.dac.service.errors.ClientErrorException;
import com.dremio.dac.service.errors.DatasetNotFoundException;
import com.dremio.dac.service.errors.FileNotFoundException;
+import com.dremio.dac.service.errors.NotSupportedException;
import com.dremio.dac.service.errors.SpaceNotFoundException;
-import com.dremio.dac.util.ResourceUtil;
+import com.dremio.exec.catalog.CatalogFeatures;
+import com.dremio.options.OptionManager;
import com.dremio.service.namespace.BoundedDatasetCount;
import com.dremio.service.namespace.NamespaceException;
import com.dremio.service.namespace.NamespaceNotFoundException;
@@ -79,18 +77,21 @@ public class SpaceResource {
private final CollaborationHelper collaborationService;
private final SpaceName spaceName;
private final SpacePath spacePath;
+ private final OptionManager optionManager;
@Inject
public SpaceResource(
NamespaceService namespaceService,
DatasetVersionMutator datasetService,
CollaborationHelper collaborationService,
- @PathParam("spaceName") SpaceName spaceName) {
+ @PathParam("spaceName") SpaceName spaceName,
+ OptionManager optionManager) {
this.namespaceService = namespaceService;
this.datasetService = datasetService;
this.collaborationService = collaborationService;
this.spaceName = spaceName;
this.spacePath = new SpacePath(spaceName);
+ this.optionManager = optionManager;
}
protected Space newSpace(SpaceConfig spaceConfig, NamespaceTree contents, int datasetCount) throws Exception {
@@ -101,6 +102,8 @@ protected Space newSpace(SpaceConfig spaceConfig, NamespaceTree contents, int da
@Produces(MediaType.APPLICATION_JSON)
public Space getSpace(@QueryParam("includeContents") @DefaultValue("true") boolean includeContents)
throws Exception {
+ throwIfNotSupported();
+
try {
final SpaceConfig config = namespaceService.getSpace(spacePath.toNamespaceKey());
final int datasetCount = namespaceService.getDatasetCount(spacePath.toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount();
@@ -117,39 +120,13 @@ public Space getSpace(@QueryParam("includeContents") @DefaultValue("true") boole
}
}
- @DELETE
- @Produces(MediaType.APPLICATION_JSON)
- @Deprecated
- public void deleteSpace(@QueryParam("version") String version) throws NamespaceException, SpaceNotFoundException, UserException {
- if (version == null) {
- throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG);
- }
-
- try {
- namespaceService.deleteSpace(spacePath.toNamespaceKey(), version);
- } catch (NamespaceNotFoundException nfe) {
- throw new SpaceNotFoundException(spacePath.getSpaceName().getName(), nfe);
- } catch (ConcurrentModificationException e) {
- throw ResourceUtil.correctBadVersionErrorMessage(e, "space", spaceName.getName());
- }
- }
-
- @POST
- @Path("/rename")
- @Produces(MediaType.APPLICATION_JSON)
- @Deprecated // UI does not allow to rename a space
- public Space renameSpace(@QueryParam("renameTo") String renameTo)
- throws NamespaceException, SpaceNotFoundException {
- throw UserException.unsupportedError()
- .message("Renaming a space is not supported")
- .build(logger);
- }
-
@GET
@Path("dataset/{path: .*}")
@Produces(MediaType.APPLICATION_JSON)
public Dataset getDataset(@PathParam("path") String path)
- throws NamespaceException, FileNotFoundException, DatasetNotFoundException {
+ throws NamespaceException, FileNotFoundException, DatasetNotFoundException, NotSupportedException {
+ throwIfNotSupported();
+
DatasetPath datasetPath = DatasetPath.fromURLPath(spaceName, path);
final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey());
final VirtualDatasetUI vds = datasetService.get(datasetPath, datasetConfig.getVirtualDataset().getVersion());
@@ -167,4 +144,15 @@ public Dataset getDataset(@PathParam("path") String path)
protected NamespaceTree newNamespaceTree(List children) throws DatasetNotFoundException, NamespaceException {
return NamespaceTree.newInstance(datasetService, children, SPACE, collaborationService);
}
+
+ protected OptionManager getOptionManager() {
+ return optionManager;
+ }
+
+ protected void throwIfNotSupported() {
+ CatalogFeatures features = CatalogFeatures.get(optionManager);
+ if (!features.isFeatureEnabled(CatalogFeatures.Feature.SPACE)) {
+ throw new NotSupportedException(ResourcePath.defaultImpl("/space"));
+ }
+ }
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SpacesResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SpacesResource.java
deleted file mode 100644
index fd9132b5cd..0000000000
--- a/dac/backend/src/main/java/com/dremio/dac/resource/SpacesResource.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2017-2019 Dremio Corporation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.dremio.dac.resource;
-
-
-import javax.annotation.security.RolesAllowed;
-import javax.inject.Inject;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-
-import com.dremio.dac.annotations.RestResource;
-import com.dremio.dac.annotations.Secured;
-import com.dremio.dac.model.spaces.Space;
-import com.dremio.dac.model.spaces.SpacePath;
-import com.dremio.dac.model.spaces.Spaces;
-import com.dremio.service.namespace.BoundedDatasetCount;
-import com.dremio.service.namespace.NamespaceException;
-import com.dremio.service.namespace.NamespaceNotFoundException;
-import com.dremio.service.namespace.NamespaceService;
-import com.dremio.service.namespace.space.proto.SpaceConfig;
-
-/**
- * Resource for information about spaces.
- */
-@RestResource
-@Secured
-@RolesAllowed({"admin", "user"})
-@Path("/spaces")
-@Deprecated
-public class SpacesResource {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SpacesResource.class);
-
- private final NamespaceService namespaceService;
-
- @Inject
- public SpacesResource(NamespaceService namespaceService) {
- this.namespaceService = namespaceService;
- }
-
- @GET
- @Produces(MediaType.APPLICATION_JSON)
- public Spaces getSpaces() throws Exception {
- final Spaces spaces = new Spaces();
- for (SpaceConfig spaceConfig : namespaceService.getSpaces()) {
- int datasetCount = 0;
-
- try {
- datasetCount = namespaceService.getDatasetCount(new SpacePath(spaceConfig.getName()).toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount();
- } catch (IllegalArgumentException e) {
- logger.warn("Could not load dataset count for {} because it has a invalid name: {}", spaceConfig.getName(), e.getMessage());
- } catch (NamespaceException e) {
- logger.warn("Could not load dataset count for {}: {}", spaceConfig.getName(), e.getMessage());
- }
-
- try {
- // we catch exceptions here so the user can still see their other Spaces
- spaces.add(newSpace(spaceConfig, datasetCount));
- } catch (NamespaceNotFoundException e) {
- logger.warn("Skipping Space {} because namespace not found: {}", spaceConfig.getName(), e.getMessage());
- }
- }
- return spaces;
- }
-
- protected Space newSpace(SpaceConfig spaceConfig, int datasetCount) throws Exception {
- return Space.newInstance(spaceConfig, null, datasetCount);
- }
-}
diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/V2TreeResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/V2TreeResource.java
new file mode 100644
index 0000000000..f24d7de1d5
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/resource/V2TreeResource.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.resource;
+
+import javax.inject.Inject;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+
+import org.projectnessie.api.v2.params.Transplant;
+import org.projectnessie.client.api.NessieApiV2;
+import org.projectnessie.error.NessieConflictException;
+import org.projectnessie.error.NessieNotFoundException;
+import org.projectnessie.model.CommitResponse;
+import org.projectnessie.model.MergeResponse;
+import org.projectnessie.model.Operations;
+import org.projectnessie.model.Reference;
+import org.projectnessie.model.SingleReferenceResponse;
+import org.projectnessie.model.ser.Views;
+
+import com.dremio.services.nessie.proxy.ProxyV2TreeResource;
+import com.fasterxml.jackson.annotation.JsonView;
+
+/**
+ * Nessie-specific extension of {@link ProxyV2TreeResource}.
+ * Disables certain API calls that are not needed in the NaaS proxy.
+ */
+
+public class V2TreeResource extends ProxyV2TreeResource {
+
+ @Inject
+ public V2TreeResource(NessieApiV2 api) {
+ super(api);
+ }
+
+ @Override
+ @JsonView(Views.V2.class)
+ public SingleReferenceResponse assignReference(Reference.ReferenceType type, String ref, Reference assignTo) throws NessieConflictException, NessieNotFoundException {
+ throw new WebApplicationException("transplantCommitsIntoBranch is not supported", Response.Status.FORBIDDEN);
+ }
+
+ @Override
+ @JsonView(Views.V2.class)
+ public MergeResponse transplantCommitsIntoBranch(String branch, Transplant transplant) throws NessieConflictException, NessieNotFoundException {
+ throw new WebApplicationException("transplantCommitsIntoBranch is not supported", Response.Status.FORBIDDEN);
+ }
+
+ @Override
+ @JsonView(Views.V2.class)
+ public CommitResponse commitMultipleOperations(String branch, Operations operations) throws NessieConflictException, NessieNotFoundException {
+ throw new WebApplicationException("commitMultipleOperations is not supported", Response.Status.FORBIDDEN);
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java b/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java
index 4acd3de67b..8a1758e958 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java
@@ -23,7 +23,6 @@
import com.dremio.common.perf.Timer;
import com.dremio.common.scanner.persistence.ScanResult;
import com.dremio.dac.annotations.APIResource;
-import com.fasterxml.jackson.jaxrs.base.JsonMappingExceptionMapper;
import com.fasterxml.jackson.jaxrs.base.JsonParseExceptionMapper;
/**
@@ -46,6 +45,9 @@ protected void init(ScanResult result) {
register(resource);
}
+ // Enable request contextualization.
+ register(new AuthenticationBinder());
+
// FEATURES
register(DACAuthFilterFeature.class);
register(DACJacksonJaxbJsonFeature.class);
@@ -53,7 +55,7 @@ protected void init(ScanResult result) {
// EXCEPTION MAPPERS
register(JsonParseExceptionMapper.class);
- register(JsonMappingExceptionMapper.class);
+ register(RestApiJsonMappingExceptionMapper.class);
// PROPERTIES
property(ServerProperties.RESPONSE_SET_STATUS_OVER_SEND_ERROR, "true");
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/AuthenticationBinder.java b/dac/backend/src/main/java/com/dremio/dac/server/AuthenticationBinder.java
new file mode 100644
index 0000000000..013301ba3b
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/server/AuthenticationBinder.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.server;
+
+import org.glassfish.hk2.utilities.binding.AbstractBinder;
+import org.glassfish.jersey.server.spi.internal.ResourceMethodInvocationHandlerProvider;
+
+/**
+ * Auth Binder
+ */
+public class AuthenticationBinder extends AbstractBinder {
+ @Override
+ protected void configure() {
+ bind(ContextualizedResourceMethodInvocationHandlerProvider.class).to(ResourceMethodInvocationHandlerProvider.class);
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/ContextualizedResourceMethodInvocationHandlerProvider.java b/dac/backend/src/main/java/com/dremio/dac/server/ContextualizedResourceMethodInvocationHandlerProvider.java
new file mode 100644
index 0000000000..c46e844b79
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/server/ContextualizedResourceMethodInvocationHandlerProvider.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.server;
+
+import java.lang.reflect.InvocationHandler;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.Context;
+
+import org.glassfish.jersey.server.model.Invocable;
+import org.glassfish.jersey.server.spi.internal.ResourceMethodInvocationHandlerProvider;
+
+import com.dremio.context.RequestContext;
+import com.dremio.context.UserContext;
+
+/**
+ * The ContextualizedResourceMethodInvocationHandlerProvider extracts the UserContext from attributes
+ * within the HttpServletRequest which is set by the DACAuthFilter.
+ */
+public class ContextualizedResourceMethodInvocationHandlerProvider implements ResourceMethodInvocationHandlerProvider {
+ public static final String USER_CONTEXT_ATTRIBUTE =
+ ContextualizedResourceMethodInvocationHandlerProvider.class.getCanonicalName() + ".UserContext";
+
+ @Context
+ private HttpServletRequest httpServletRequest;
+
+ @Override
+ public InvocationHandler create(Invocable invocable) {
+ return (proxy, method, args) -> RequestContext.current()
+ .with(getRequestContext())
+ .call(() -> method.invoke(proxy, args));
+ }
+
+ private Map, Object> getRequestContext() {
+ final Map, Object> contextMap = new HashMap<>();
+
+ contextMap.put(UserContext.CTX_KEY, httpServletRequest.getAttribute(USER_CONTEXT_ATTRIBUTE));
+
+ return contextMap;
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java b/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java
index 572746c5eb..65abec61e1 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java
@@ -15,6 +15,8 @@
*/
package com.dremio.dac.server;
+import static com.dremio.dac.server.ContextualizedResourceMethodInvocationHandlerProvider.USER_CONTEXT_ATTRIBUTE;
+
import java.util.List;
import java.util.Map;
@@ -29,6 +31,7 @@
import javax.ws.rs.ext.Provider;
import com.dremio.common.collections.Tuple;
+import com.dremio.context.UserContext;
import com.dremio.dac.annotations.Secured;
import com.dremio.dac.annotations.TemporaryAccess;
import com.dremio.dac.model.usergroup.UserName;
@@ -62,6 +65,7 @@ public void filter(ContainerRequestContext requestContext) {
final UserName userName = getUserNameFromToken(requestContext);
final User userConfig = userService.get().getUser(userName.getName());
requestContext.setSecurityContext(new DACSecurityContext(userName, userConfig, requestContext));
+ requestContext.setProperty(USER_CONTEXT_ATTRIBUTE, new UserContext(userConfig.getUID().getId()));
} catch (UserNotFoundException | NotAuthorizedException e) {
requestContext.abortWith(Response.status(Response.Status.UNAUTHORIZED).build());
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java b/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java
index 6eec2c61ba..965303f830 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java
@@ -146,7 +146,9 @@ protected ServerData.Builder getDataBuilder() {
.setShowNewJobsPage(options.getOption(UIOptions.JOBS_UI_CHECK))
.setShowOldReflectionsListing(options.getOption(UIOptions.REFLECTIONSLISTING_UI_CHECK))
.setAllowAutoComplete(options.getOption(UIOptions.ALLOW_AUTOCOMPLETE))
- .setAllowFormatting(options.getOption(UIOptions.ALLOW_FORMATTING));
+ .setAllowDownload(options.getOption(UIOptions.ALLOW_DOWNLOAD))
+ .setAllowFormatting(options.getOption(UIOptions.ALLOW_FORMATTING))
+ .setUseNewDatasetNavigation(options.getOption(UIOptions.DATASET_NAVIGATION_CHECK));
}
protected Provider getSupportService() {
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java b/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java
index a896ab3418..09be47fec6 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java
@@ -31,6 +31,7 @@ public class GenericErrorMessage {
public static final String NO_USER_MSG = "No User Available";
public static final String MISSING_VERSION_PARAM_MSG = "Missing Version Parameter";
+ public static final String UNKNOWN_FORMAT_MSG = "The table cannot be saved when the format is set to UNKNOWN. Please select the correct format for the table";
public static final String GENERIC_ERROR_MSG = "Something went wrong.";
private final String errorMessage;
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/NessieProxyRestServer.java b/dac/backend/src/main/java/com/dremio/dac/server/NessieProxyRestServer.java
new file mode 100644
index 0000000000..dc858b562d
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/server/NessieProxyRestServer.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.server;
+
+import org.glassfish.jersey.message.GZipEncoder;
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.server.filter.EncodingFilter;
+import org.projectnessie.services.restjavax.ContentKeyParamConverterProvider;
+import org.projectnessie.services.restjavax.NamespaceParamConverterProvider;
+import org.projectnessie.services.restjavax.NessieExceptionMapper;
+import org.projectnessie.services.restjavax.ReferenceTypeParamConverterProvider;
+
+import com.dremio.common.perf.Timer;
+import com.dremio.dac.resource.NessieSourceResource;
+import com.dremio.dac.resource.NessieTestSourceResource;
+import com.dremio.dac.service.errors.NotFoundExceptionMapper;
+import com.dremio.services.nessie.proxy.ProxyExceptionMapper;
+import com.dremio.services.nessie.proxy.ProxyNessieConfig;
+import com.dremio.services.nessie.proxy.ProxyRuntimeExceptionMapper;
+
+public class NessieProxyRestServer extends ResourceConfig {
+
+ public NessieProxyRestServer() {
+ try (Timer.TimedBlock b = Timer.time("new ProxyRestServer")) {
+ init();
+ }
+ }
+
+ protected void init() {
+ // FILTERS //
+ register(JSONPrettyPrintFilter.class);
+
+ // Enable request contextualization.
+ register(new AuthenticationBinder());
+
+ // FEATURES
+ register(DACAuthFilterFeature.class);
+ register(DACJacksonJaxbJsonFeature.class);
+
+ // LISTENERS //
+ register(TimingApplicationEventListener.class);
+
+ //Nessie
+ if (Boolean.getBoolean("nessie.source.resource.testing.enabled")) {
+ register(NessieTestSourceResource.class);
+ } else {
+ register(NessieSourceResource.class);
+ }
+ register(ContentKeyParamConverterProvider.class);
+ register(NamespaceParamConverterProvider.class);
+ register(ReferenceTypeParamConverterProvider.class);
+ register(new NessieExceptionMapper(new ProxyNessieConfig()), 10);
+ register(NotFoundExceptionMapper.class);
+ register(ProxyExceptionMapper.class, 10);
+ register(ProxyRuntimeExceptionMapper.class, 10);
+ register(EncodingFilter.class);
+ register(GZipEncoder.class);
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/RestApiJsonMappingExceptionMapper.java b/dac/backend/src/main/java/com/dremio/dac/server/RestApiJsonMappingExceptionMapper.java
new file mode 100644
index 0000000000..54075302fe
--- /dev/null
+++ b/dac/backend/src/main/java/com/dremio/dac/server/RestApiJsonMappingExceptionMapper.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2017-2019 Dremio Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.dremio.dac.server;
+
+import java.util.Iterator;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+
+/**
+ * The default {@code com.fasterxml.jackson.jaxrs.base.JsonMappingExceptionMapper}
+ * exposes internal class and package names when an incoming request contains input
+ * that does not correctly map to the internal request object for the requested path
+ * Example: the user supplied a value with the wrong type for a property, so a
+ * JsonMappingException is thrown when we attempt to deserialize the request body.
+ * This custom exception mapper generates responses that tell the user the path to
+ * the invalid input in their JSON request without exposing internal details about
+ * the system.
+ */
+public class RestApiJsonMappingExceptionMapper implements ExceptionMapper {
+ @Override
+ public Response toResponse(JsonMappingException exception) {
+ if (exception.getPath().isEmpty()) {
+ return errorResponse("Invalid value");
+ }
+
+ StringBuilder errorMessage = new StringBuilder("Invalid value found at: ");
+ Iterator iter = exception.getPath().iterator();
+
+ if (iter.hasNext()) {
+ errorMessage.append(referenceToString(iter.next()));
+ }
+
+ while (iter.hasNext()) {
+ JsonMappingException.Reference ref = iter.next();
+ errorMessage.append(referenceToString(ref, "."));
+ }
+
+ return errorResponse(errorMessage.toString());
+ }
+
+ private Response errorResponse(String errorMessage) {
+ return Response.status(Response.Status.BAD_REQUEST)
+ .entity(new GenericErrorMessage(errorMessage))
+ .type(MediaType.APPLICATION_JSON_TYPE)
+ .build();
+ }
+
+ private String referenceToString(JsonMappingException.Reference ref) {
+ return referenceToString(ref, "");
+ }
+
+ private String referenceToString(JsonMappingException.Reference ref, String fieldNamePrefix) {
+ if (ref.getFieldName() != null) {
+ return fieldNamePrefix + ref.getFieldName();
+ } else if (ref.getIndex() != -1) {
+ return String.format("[%s]", ref.getIndex());
+ }
+
+ return fieldNamePrefix + "UNKNOWN";
+ }
+}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java b/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java
index a2ecc217a1..00f84a2fb9 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java
@@ -21,6 +21,9 @@
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.server.ServerProperties;
import org.glassfish.jersey.server.mvc.freemarker.FreemarkerMvcFeature;
+import org.projectnessie.services.restjavax.ContentKeyParamConverterProvider;
+import org.projectnessie.services.restjavax.NamespaceParamConverterProvider;
+import org.projectnessie.services.restjavax.ReferenceTypeParamConverterProvider;
import com.dremio.common.perf.Timer;
import com.dremio.common.perf.Timer.TimedBlock;
@@ -53,6 +56,12 @@ public RestServerV2(ScanResult result) {
}
protected void init(ScanResult result) {
+ // PROVIDERS //
+ // We manually registered provider needed for nessie-as-a-source
+ register(ContentKeyParamConverterProvider.class);
+ register(NamespaceParamConverterProvider.class);
+ register(ReferenceTypeParamConverterProvider.class);
+
// FILTERS //
register(JSONPrettyPrintFilter.class);
register(MediaTypeFilter.class);
@@ -62,6 +71,9 @@ protected void init(ScanResult result) {
register(resource);
}
+ // Enable request contextualization.
+ register(new AuthenticationBinder());
+
// FEATURES
property(FreemarkerMvcFeature.TEMPLATE_OBJECT_FACTORY, getFreemarkerConfiguration());
register(FreemarkerMvcFeature.class);
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java b/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java
index 97717a32e1..d75da8b809 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java
@@ -108,6 +108,7 @@ public void serializeAsField(Object bean,
}
}
+ @Override
public void serializeAsElement(Object bean,
JsonGenerator gen,
SerializerProvider prov) throws Exception {
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java b/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java
index 63454afc59..3084a644aa 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java
@@ -32,6 +32,10 @@ public final class UIOptions {
public static final BooleanValidator ALLOW_AUTOCOMPLETE = new BooleanValidator("ui.autocomplete.allow", true);
+ public static final BooleanValidator AUTOCOMPLETE_V2 = new BooleanValidator("ui.autocomplete.v2.enabled", false);
+
+ public static final BooleanValidator ALLOW_DOWNLOAD = new BooleanValidator("ui.download.allow", true);
+
public static final StringValidator WHITE_LABEL_URL = new StringValidator("ui.whitelabel.url", "dremio");
public static final BooleanValidator ALLOW_FORMATTING = new BooleanValidator("ui.formatter.allow", true);
@@ -69,4 +73,9 @@ public final class UIOptions {
* Specifies whether new jobs profile UI should be shown
*/
public static final BooleanValidator JOBS_PROFILE_UI_CHECK = new BooleanValidator("dremio.query.visualiser.enabled", false);
+
+ /*
+ * Specifies whether the UI will use the new dataset navigation behavior
+ */
+ public static final BooleanValidator DATASET_NAVIGATION_CHECK = new BooleanValidator("ui.dataset.navigation.new", true);
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java b/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java
index d2ff8e3e58..edc2870a02 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java
@@ -110,6 +110,7 @@ public MediaType(String type, String subType) {
private final SingletonRegistry registry;
private final Provider restServerProvider;
private final Provider apiServerProvider;
+ private final Provider nessieProxyResetServerV2;
private final DremioServer server;
private final DACConfig config;
private final Provider credentialsServiceProvider;
@@ -123,6 +124,7 @@ public WebServer(
Provider credentialsServiceProvider,
Provider restServer,
Provider apiServer,
+ Provider nessieProxyResetServerV2,
Provider server,
DremioBinder dremioBinder,
String uiType,
@@ -132,6 +134,7 @@ public WebServer(
this.credentialsServiceProvider = credentialsServiceProvider;
this.restServerProvider = restServer;
this.apiServerProvider = apiServer;
+ this.nessieProxyResetServerV2 = nessieProxyResetServerV2;
this.dremioBinder = dremioBinder;
this.uiType = uiType;
this.isInternalUS = isInternalUS;
@@ -196,6 +199,16 @@ protected void registerEndpoints(ServletContextHandler servletContextHandler) {
final ServletHolder apiHolder = new ServletHolder(new ServletContainer(apiServer));
apiHolder.setInitOrder(3);
servletContextHandler.addServlet(apiHolder, "/api/v3/*");
+
+ // Nessie Source REST API
+ ResourceConfig nessieProxyRestServerV2 = nessieProxyResetServerV2.get();
+
+ nessieProxyRestServerV2.register(dremioBinder);
+ nessieProxyRestServerV2.register((DynamicFeature) (resourceInfo, context) -> context.register(DremioServer.TracingFilter.class));
+
+ final ServletHolder proxyNessieRestHolder = new ServletHolder(new ServletContainer(nessieProxyRestServerV2));
+ proxyNessieRestHolder.setInitOrder(4);
+ servletContextHandler.addServlet(proxyNessieRestHolder, "/nessie-proxy/*");
}
public int getPort() {
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java
index 996ce8dc02..05ada2eb3f 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java
@@ -26,6 +26,8 @@
import com.dremio.dac.model.job.acceleration.UiMapper;
import com.dremio.service.accelerator.proto.AccelerationDetails;
import com.dremio.service.accelerator.proto.ReflectionRelationship;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableMap;
@@ -52,6 +54,18 @@ public String getReflectionDatasetPath(String layoutId) {
}
}
+ public String getReflectionDatasetVersion(String layoutId) {
+ try {
+ String unparsedJson = relationshipMap.get(layoutId).getDataset().getId();
+ ObjectMapper mapper = new ObjectMapper();
+ JsonNode parsedJson = mapper.readTree(unparsedJson);
+ JsonNode versionContext = parsedJson.get("versionContext");
+ return String.format(" [%s %s] ", versionContext.get("type").textValue(), versionContext.get("value").textValue());
+ } catch (Exception e) {
+ return " ";
+ }
+ }
+
public Long getRefreshChainStartTime(String layoutId) {
return relationshipMap.get(layoutId).getMaterialization().getRefreshChainStartTime();
}
diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java
index 13047e1571..9f607d44bd 100644
--- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java
+++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java
@@ -33,48 +33,56 @@
interface Comparators {
Comparator majorId = new Comparator() {
+ @Override
public int compare(final MajorFragmentProfile o1, final MajorFragmentProfile o2) {
return Long.compare(o1.getMajorFragmentId(), o2.getMajorFragmentId());
}
};
Comparator minorId = new Comparator() {
+ @Override
public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) {
return Long.compare(o1.getMinorFragmentId(), o2.getMinorFragmentId());
}
};
Comparator startTime = new Comparator() {
+ @Override
public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) {
return Long.compare(o1.getStartTime(), o2.getStartTime());
}
};
Comparator