diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java b/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java index 958f21bb032c..7f2b08c13a9c 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java @@ -23,7 +23,6 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.metastore.utils.StringUtils; import java.util.List; @@ -47,7 +46,6 @@ public List readSchema(Table tbl, EnvironmentContext envContext, Co Deserializer s = HiveMetaStoreUtils.getDeserializer(conf, tbl, null, false); return HiveMetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), s, conf); } catch (Exception e) { - StringUtils.stringifyException(e); throw new MetaException(e.getMessage()); } finally { if (orgHiveLoader != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java index 474859f99bfc..231f57455b62 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.exec; import org.apache.hadoop.hive.metastore.ReplChangeManager; -import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.parse.EximUtil; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; @@ -189,10 +188,10 @@ private List filesInFileListing(FileSystem fs, Path ReplChangeManager.FileInfo f = ReplChangeManager .getFileInfo(new Path(fragments[0]), fragments[1], fragments[2], fragments[3], conf); filePaths.add(f); - } catch (MetaException e) { + } catch (IOException ioe) { // issue warning for missing file and throw exception LOG.warn("Cannot find {} in source repo or cmroot", fragments[0]); - throw new IOException(e.getMessage()); + throw ioe; } // Note - we need srcFs rather than fs, because it is possible that the _files lists files // which are from a different filesystem than the fs where the _files file itself was loaded diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java index f488b8577f6f..7f0830589d31 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java @@ -106,7 +106,7 @@ protected void writeEncodedDumpFiles(Context withinContext, Iterable fil } protected void writeFileEntry(Table table, Partition ptn, String file, Context withinContext) - throws IOException, LoginException, MetaException, HiveFatalException { + throws IOException, LoginException, HiveFatalException { HiveConf hiveConf = withinContext.hiveConf; String distCpDoAsUser = hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER); if (!Utils.shouldDumpMetaDataOnly(withinContext.hiveConf)) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java index 59eb0f552e00..638a22961dc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java @@ -75,7 +75,7 @@ public void handle(Context withinContext) throws Exception { } private void copyFunctionBinaries(List functionBinaryCopyPaths, HiveConf hiveConf) - throws MetaException, IOException, LoginException, HiveFatalException { + throws IOException, LoginException, HiveFatalException { if (!functionBinaryCopyPaths.isEmpty()) { String distCpDoAsUser = hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER); List filePaths = new ArrayList<>(); diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java index 3436fbc50601..43894ed4f6bf 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java @@ -205,7 +205,8 @@ private ReplChangeManager(Configuration conf) throws MetaException { inited = true; } } catch (IOException e) { - throw new MetaException(StringUtils.stringifyException(e)); + LOG.error("Failed to created ReplChangeManager", e); + throw new MetaException(e.getMessage()); } } @@ -378,32 +379,28 @@ static Path getCMPath(Configuration conf, String name, String checkSum, String c * @return Corresponding FileInfo object */ public static FileInfo getFileInfo(Path src, String checksumString, String srcCMRootURI, String subDir, - Configuration conf) throws MetaException { - try { - FileSystem srcFs = src.getFileSystem(conf); - if (checksumString == null) { - return new FileInfo(srcFs, src, subDir); - } + Configuration conf) throws IOException { + FileSystem srcFs = src.getFileSystem(conf); + if (checksumString == null) { + return new FileInfo(srcFs, src, subDir); + } - Path cmPath = getCMPath(conf, src.getName(), checksumString, srcCMRootURI); - if (!srcFs.exists(src)) { - return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir); - } + Path cmPath = getCMPath(conf, src.getName(), checksumString, srcCMRootURI); + if (!srcFs.exists(src)) { + return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir); + } - String currentChecksumString; - try { - currentChecksumString = checksumFor(src, srcFs); - } catch (IOException ex) { - // If the file is missing or getting modified, then refer CM path - return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir); - } - if ((currentChecksumString == null) || checksumString.equals(currentChecksumString)) { - return new FileInfo(srcFs, src, cmPath, checksumString, true, subDir); - } else { - return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir); - } - } catch (IOException e) { - throw new MetaException(StringUtils.stringifyException(e)); + String currentChecksumString; + try { + currentChecksumString = checksumFor(src, srcFs); + } catch (IOException ex) { + // If the file is missing or getting modified, then refer CM path + return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir); + } + if ((currentChecksumString == null) || checksumString.equals(currentChecksumString)) { + return new FileInfo(srcFs, src, cmPath, checksumString, true, subDir); + } else { + return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir); } } diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/StringUtils.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/StringUtils.java index e49a4233ce26..61258acbef70 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/StringUtils.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/StringUtils.java @@ -98,7 +98,9 @@ public static String normalizeIdentifier(String identifier) { * Make a string representation of the exception. * @param e The exception to stringify * @return A string with exception name and call stack. + * @deprecated */ + @Deprecated public static String stringifyException(Throwable e) { StringWriter stm = new StringWriter(); PrintWriter wrt = new PrintWriter(stm); diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 435ac4fc6bd1..99ea4b73e108 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -987,7 +987,8 @@ public boolean dropDatabase(String catName, String dbname) pm.deletePersistent(db); success = commitTransaction(); } catch (Exception e) { - throw new MetaException(e.getMessage() + " " + org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e)); + LOG.error("Failed to drop database", e); + throw new MetaException(e.getMessage()); } finally { rollbackAndCleanup(success, null); } @@ -1195,7 +1196,8 @@ public boolean dropDataConnector(String dcname) pm.deletePersistent(mdb); success = commitTransaction(); } catch (Exception e) { - throw new MetaException(e.getMessage() + " " + org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e)); + LOG.error("Failed to drop data connector", e); + throw new MetaException(e.getMessage()); } finally { rollbackAndCleanup(success, null); } @@ -3247,7 +3249,8 @@ protected List getJdoResult(GetHelper> ctx) throws Me try { return convertToParts(listMPartitions(catName, dbName, tblName, maxParts)); } catch (Exception e) { - throw new MetaException(e.getMessage() + " " + org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e)); + LOG.error("Failed to convert to parts", e); + throw new MetaException(e.getMessage()); } } }.run(false); diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java index 8793c8c7c674..5e3a3311294b 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java @@ -1217,8 +1217,8 @@ public static void setNestedProperty(Object bean, String propertyName, Object va } PropertyUtils.setNestedProperty(bean, propertyName, value); } catch (Exception e) { - throw new MetaException( - org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e)); + LOG.error("Failed to set nested property", e); + throw new MetaException(e.getMessage()); } }