Skip to content
Permalink
Browse files
[NO ISSUE][MD] Ignore index not exists on compensating ops
- user model changes: no
- storage format changes: no
- interface changes: no

Details:

- When executing dataset drop compensating operations, ignore
  failures if the dataset files were already deleted.

Change-Id: Ia4b3ff6b3c3cd1a3327f193c7398f983bd891064
Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/9144
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
Reviewed-by: Till Westmann <tillw@apache.org>
  • Loading branch information
mhubail committed Dec 4, 2020
1 parent 70a7d58 commit 6903fa35e46e395cecac18a39fc62e0588d2294d
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 9 deletions.
@@ -227,6 +227,7 @@
import org.apache.hyracks.api.result.ResultSetId;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.common.controllers.CCConfig;
import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor.DropOption;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
@@ -871,7 +872,8 @@ protected void doCreateDatasetStatement(MetadataProvider metadataProvider, Datas
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
JobSpecification jobSpec = DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
JobSpecification jobSpec =
DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider, EnumSet.of(DropOption.IF_EXISTS));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec);
@@ -1654,6 +1656,7 @@ protected boolean doDropDataset(DataverseName dataverseName, String datasetName,
MutableBoolean bActiveTxn = new MutableBoolean(true);
metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
List<JobSpecification> jobsToExecute = new ArrayList<>();
Dataset ds = null;
try {
// Check if the dataverse exists
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx.getValue(), dataverseName);
@@ -1669,7 +1672,7 @@ protected boolean doDropDataset(DataverseName dataverseName, String datasetName,
throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
}
}
Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
if (ifExists) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
@@ -1682,11 +1685,12 @@ protected boolean doDropDataset(DataverseName dataverseName, String datasetName,
validateDatasetState(metadataProvider, ds, sourceLoc);

ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, bActiveTxn, progress, hcc, dropCorrespondingNodeGroup,
sourceLoc);
sourceLoc, Collections.emptySet());

MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
return true;
} catch (Exception e) {
LOGGER.error("failed to drop dataset; executing compensating operations", e);
if (bActiveTxn.booleanValue()) {
abort(e, e, mdTxnCtx.getValue());
}
@@ -1695,6 +1699,11 @@ protected boolean doDropDataset(DataverseName dataverseName, String datasetName,
// #. execute compensation operations
// remove the all indexes in NC
try {
if (ds != null) {
jobsToExecute.clear();
ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, bActiveTxn, progress, hcc,
dropCorrespondingNodeGroup, sourceLoc, EnumSet.of(DropOption.IF_EXISTS));
}
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
@@ -23,6 +23,7 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.IntStream;

import org.apache.asterix.common.api.IDatasetInfoProvider;
@@ -104,6 +105,7 @@
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor.DropOption;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.common.api.IFrameOperationCallbackFactory;
@@ -344,20 +346,20 @@ public boolean allow(ILogicalOperator topOp, byte operation) {
*/
public void drop(MetadataProvider metadataProvider, MutableObject<MetadataTransactionContext> mdTxnCtx,
List<JobSpecification> jobsToExecute, MutableBoolean bActiveTxn, MutableObject<ProgressState> progress,
IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup, SourceLocation sourceLoc)
throws Exception {
IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup, SourceLocation sourceLoc,
Set<DropOption> options) throws Exception {
Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<>();
if (getDatasetType() == DatasetType.INTERNAL) {
// #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes =
MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx.getValue(), dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
jobsToExecute
.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), metadataProvider, this, sourceLoc));
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), metadataProvider, this, options,
sourceLoc));
}
}
jobsToExecute.add(DatasetUtil.dropDatasetJobSpec(this, metadataProvider));
jobsToExecute.add(DatasetUtil.dropDatasetJobSpec(this, metadataProvider, options));
// #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(),
@@ -20,6 +20,7 @@

import java.io.DataOutput;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -289,6 +290,11 @@ public static ARecordType getMetaType(MetadataProvider metadataProvider, Dataset

public static JobSpecification dropDatasetJobSpec(Dataset dataset, MetadataProvider metadataProvider)
throws AlgebricksException, ACIDException {
return dropDatasetJobSpec(dataset, metadataProvider, Collections.emptySet());
}

public static JobSpecification dropDatasetJobSpec(Dataset dataset, MetadataProvider metadataProvider,
Set<IndexDropOperatorDescriptor.DropOption> options) throws AlgebricksException, ACIDException {
LOGGER.info("DROP DATASET: " + dataset);
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
return RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
@@ -298,7 +304,8 @@ public static JobSpecification dropDatasetJobSpec(Dataset dataset, MetadataProvi
metadataProvider.getSplitProviderAndConstraints(dataset);
IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory(
metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first);
IndexDropOperatorDescriptor primaryBtreeDrop = new IndexDropOperatorDescriptor(specPrimary, indexHelperFactory);
IndexDropOperatorDescriptor primaryBtreeDrop =
new IndexDropOperatorDescriptor(specPrimary, indexHelperFactory, options);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary, primaryBtreeDrop,
splitsAndConstraint.second);
specPrimary.addRoot(primaryBtreeDrop);

0 comments on commit 6903fa3

Please sign in to comment.