Skip to content

Commit

Permalink
HIVE-22417: Remove stringifyException from MetaStore (#3478) (David M…
Browse files Browse the repository at this point in the history
…ollitor reviewed by Stamatis Zampetakis)
  • Loading branch information
belugabehr committed Aug 5, 2022
1 parent 470d6bc commit 778c838
Show file tree
Hide file tree
Showing 8 changed files with 36 additions and 37 deletions.
Expand Up @@ -23,7 +23,6 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.utils.StringUtils;

import java.util.List;

Expand All @@ -47,7 +46,6 @@ public List<FieldSchema> readSchema(Table tbl, EnvironmentContext envContext, Co
Deserializer s = HiveMetaStoreUtils.getDeserializer(conf, tbl, null, false);
return HiveMetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), s, conf);
} catch (Exception e) {
StringUtils.stringifyException(e);
throw new MetaException(e.getMessage());
} finally {
if (orgHiveLoader != null) {
Expand Down
5 changes: 2 additions & 3 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
Expand Up @@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.exec;

import org.apache.hadoop.hive.metastore.ReplChangeManager;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
import org.apache.hadoop.hive.ql.parse.EximUtil;
import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
Expand Down Expand Up @@ -189,10 +188,10 @@ private List<ReplChangeManager.FileInfo> filesInFileListing(FileSystem fs, Path
ReplChangeManager.FileInfo f = ReplChangeManager
.getFileInfo(new Path(fragments[0]), fragments[1], fragments[2], fragments[3], conf);
filePaths.add(f);
} catch (MetaException e) {
} catch (IOException ioe) {
// issue warning for missing file and throw exception
LOG.warn("Cannot find {} in source repo or cmroot", fragments[0]);
throw new IOException(e.getMessage());
throw ioe;
}
// Note - we need srcFs rather than fs, because it is possible that the _files lists files
// which are from a different filesystem than the fs where the _files file itself was loaded
Expand Down
Expand Up @@ -106,7 +106,7 @@ protected void writeEncodedDumpFiles(Context withinContext, Iterable<String> fil
}

protected void writeFileEntry(Table table, Partition ptn, String file, Context withinContext)
throws IOException, LoginException, MetaException, HiveFatalException {
throws IOException, LoginException, HiveFatalException {
HiveConf hiveConf = withinContext.hiveConf;
String distCpDoAsUser = hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
if (!Utils.shouldDumpMetaDataOnly(withinContext.hiveConf)) {
Expand Down
Expand Up @@ -75,7 +75,7 @@ public void handle(Context withinContext) throws Exception {
}

private void copyFunctionBinaries(List<DataCopyPath> functionBinaryCopyPaths, HiveConf hiveConf)
throws MetaException, IOException, LoginException, HiveFatalException {
throws IOException, LoginException, HiveFatalException {
if (!functionBinaryCopyPaths.isEmpty()) {
String distCpDoAsUser = hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
List<ReplChangeManager.FileInfo> filePaths = new ArrayList<>();
Expand Down
Expand Up @@ -205,7 +205,8 @@ private ReplChangeManager(Configuration conf) throws MetaException {
inited = true;
}
} catch (IOException e) {
throw new MetaException(StringUtils.stringifyException(e));
LOG.error("Failed to created ReplChangeManager", e);
throw new MetaException(e.getMessage());
}
}

Expand Down Expand Up @@ -378,32 +379,28 @@ static Path getCMPath(Configuration conf, String name, String checkSum, String c
* @return Corresponding FileInfo object
*/
public static FileInfo getFileInfo(Path src, String checksumString, String srcCMRootURI, String subDir,
Configuration conf) throws MetaException {
try {
FileSystem srcFs = src.getFileSystem(conf);
if (checksumString == null) {
return new FileInfo(srcFs, src, subDir);
}
Configuration conf) throws IOException {
FileSystem srcFs = src.getFileSystem(conf);
if (checksumString == null) {
return new FileInfo(srcFs, src, subDir);
}

Path cmPath = getCMPath(conf, src.getName(), checksumString, srcCMRootURI);
if (!srcFs.exists(src)) {
return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir);
}
Path cmPath = getCMPath(conf, src.getName(), checksumString, srcCMRootURI);
if (!srcFs.exists(src)) {
return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir);
}

String currentChecksumString;
try {
currentChecksumString = checksumFor(src, srcFs);
} catch (IOException ex) {
// If the file is missing or getting modified, then refer CM path
return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir);
}
if ((currentChecksumString == null) || checksumString.equals(currentChecksumString)) {
return new FileInfo(srcFs, src, cmPath, checksumString, true, subDir);
} else {
return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir);
}
} catch (IOException e) {
throw new MetaException(StringUtils.stringifyException(e));
String currentChecksumString;
try {
currentChecksumString = checksumFor(src, srcFs);
} catch (IOException ex) {
// If the file is missing or getting modified, then refer CM path
return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir);
}
if ((currentChecksumString == null) || checksumString.equals(currentChecksumString)) {
return new FileInfo(srcFs, src, cmPath, checksumString, true, subDir);
} else {
return new FileInfo(srcFs, src, cmPath, checksumString, false, subDir);
}
}

Expand Down
Expand Up @@ -98,7 +98,9 @@ public static String normalizeIdentifier(String identifier) {
* Make a string representation of the exception.
* @param e The exception to stringify
* @return A string with exception name and call stack.
* @deprecated
*/
@Deprecated
public static String stringifyException(Throwable e) {
StringWriter stm = new StringWriter();
PrintWriter wrt = new PrintWriter(stm);
Expand Down
Expand Up @@ -987,7 +987,8 @@ public boolean dropDatabase(String catName, String dbname)
pm.deletePersistent(db);
success = commitTransaction();
} catch (Exception e) {
throw new MetaException(e.getMessage() + " " + org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e));
LOG.error("Failed to drop database", e);
throw new MetaException(e.getMessage());
} finally {
rollbackAndCleanup(success, null);
}
Expand Down Expand Up @@ -1195,7 +1196,8 @@ public boolean dropDataConnector(String dcname)
pm.deletePersistent(mdb);
success = commitTransaction();
} catch (Exception e) {
throw new MetaException(e.getMessage() + " " + org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e));
LOG.error("Failed to drop data connector", e);
throw new MetaException(e.getMessage());
} finally {
rollbackAndCleanup(success, null);
}
Expand Down Expand Up @@ -3247,7 +3249,8 @@ protected List<Partition> getJdoResult(GetHelper<List<Partition>> ctx) throws Me
try {
return convertToParts(listMPartitions(catName, dbName, tblName, maxParts));
} catch (Exception e) {
throw new MetaException(e.getMessage() + " " + org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e));
LOG.error("Failed to convert to parts", e);
throw new MetaException(e.getMessage());
}
}
}.run(false);
Expand Down
Expand Up @@ -1217,8 +1217,8 @@ public static void setNestedProperty(Object bean, String propertyName, Object va
}
PropertyUtils.setNestedProperty(bean, propertyName, value);
} catch (Exception e) {
throw new MetaException(
org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e));
LOG.error("Failed to set nested property", e);
throw new MetaException(e.getMessage());
}
}

Expand Down

0 comments on commit 778c838

Please sign in to comment.