Skip to content

Commit

Permalink
[CARBONDATA-3411] [CARBONDATA-3414] Fix clear datamaps logs an except…
Browse files Browse the repository at this point in the history
…ion in SDK

problem: In sdk when datamaps are cleared, below exception is logged
java.io.IOException: File does not exist: ../carbondata/store/sdk/testWriteFiles/771604793030370/Metadata/schema

cause: CarbonTable is required for only launching the job, SDK there is no need to launch job. so , no need to build a carbon table.

solution: build carbon table only when need to launch job.

problem [CARBONDATA-3411]: when Insert into partition table fails, exception doesn't print reason.

cause: Exception was caught , but error message was not from that exception.

solution: throw the exception directly

This closes #3254
  • Loading branch information
ajantha-bhat authored and xubo245 committed Jun 11, 2019
1 parent 1baa485 commit e0743fd
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 29 deletions.
Expand Up @@ -511,44 +511,41 @@ public void clearDataMaps(AbsoluteTableIdentifier identifier) {
*/
public void clearDataMaps(AbsoluteTableIdentifier identifier, boolean launchJob) {
String tableUniqueName = identifier.getCarbonTableIdentifier().getTableUniqueName();
CarbonTable carbonTable = getCarbonTable(identifier);
if (launchJob && CarbonProperties.getInstance()
.isDistributedPruningEnabled(identifier.getDatabaseName(), identifier.getTableName())) {
if (launchJob) {
// carbon table need to lookup only if launch job is set.
CarbonTable carbonTable = getCarbonTable(identifier);
if (null != carbonTable) {
try {
DataMapUtil.executeClearDataMapJob(carbonTable, DataMapUtil.DISTRIBUTED_JOB_NAME);
} catch (IOException e) {
LOGGER.error("clear dataMap job failed", e);
// ignoring the exception
}
}
} else {
List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
if (tableIndices == null) {
String keyUsingTablePath = getKeyUsingTablePath(identifier.getTablePath());
if (keyUsingTablePath != null) {
tableUniqueName = keyUsingTablePath;
String jobClassName;
if (CarbonProperties.getInstance()
.isDistributedPruningEnabled(identifier.getDatabaseName(), identifier.getTableName())) {
jobClassName = DataMapUtil.DISTRIBUTED_JOB_NAME;
} else {
jobClassName = DataMapUtil.EMBEDDED_JOB_NAME;
}
}
if (launchJob && null != carbonTable) {
try {
DataMapUtil.executeClearDataMapJob(carbonTable, DataMapUtil.EMBEDDED_JOB_NAME);
DataMapUtil.executeClearDataMapJob(carbonTable, jobClassName);
} catch (IOException e) {
LOGGER.error("clear dataMap job failed", e);
// ignoring the exception
}
}
} else {
// remove carbon table from meta cache if launchJob is false as this would be called in
// executor side.
if (!launchJob) {
CarbonMetadata.getInstance()
.removeTable(identifier.getDatabaseName(), identifier.getTableName());
CarbonMetadata.getInstance()
.removeTable(identifier.getDatabaseName(), identifier.getTableName());
}
List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
if (tableIndices == null) {
String keyUsingTablePath = getKeyUsingTablePath(identifier.getTablePath());
if (keyUsingTablePath != null) {
tableUniqueName = keyUsingTablePath;
}
segmentRefreshMap.remove(identifier.uniqueName());
clearDataMaps(tableUniqueName);
allDataMaps.remove(tableUniqueName);
tablePathMap.remove(tableUniqueName);
}
segmentRefreshMap.remove(identifier.uniqueName());
clearDataMaps(tableUniqueName);
allDataMaps.remove(tableUniqueName);
tablePathMap.remove(tableUniqueName);
}

/**
Expand Down
Expand Up @@ -847,7 +847,7 @@ case class CarbonLoadDataCommand(
}
LOGGER.info(errorMessage)
LOGGER.error(ex)
throw new Exception(errorMessage)
throw ex
} finally {
CarbonSession.threadUnset("partition.operationcontext")
if (isOverwriteTable) {
Expand Down
Expand Up @@ -359,7 +359,8 @@ public <T> CarbonReader<T> build()
} catch (Exception ex) {
// Clear the datamap cache as it can get added in getSplits() method
DataMapStoreManager.getInstance().clearDataMaps(
format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier());
format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier(),
false);
throw ex;
}
}
Expand Down Expand Up @@ -417,7 +418,8 @@ public InputSplit[] getSplits(boolean enableBlockletDistribution) throws IOExcep
if (format != null) {
// Clear the datamap cache as it is added in getSplits() method
DataMapStoreManager.getInstance().clearDataMaps(
format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier());
format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier(),
false);
}
}
return splits.toArray(new InputSplit[splits.size()]);
Expand Down

0 comments on commit e0743fd

Please sign in to comment.