Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.labkey.api.query.LookupForeignKey;
import org.labkey.api.query.QueryUpdateService;
import org.labkey.api.query.QueryUpdateServiceException;
import org.labkey.api.query.ValidationException;
import org.labkey.api.security.User;
import org.labkey.api.util.Pair;

Expand Down Expand Up @@ -179,7 +180,7 @@ private Map<String, Object> createDatabaseMap(Pair<User, AnnouncementModel> targ
}

@Override
protected Map<String, Object> updateRow(User user, Container container, Map<String, Object> row, @NotNull Map<String, Object> oldRow, @Nullable Map<Enum, Object> configParameters) throws InvalidKeyException
protected Map<String, Object> updateRow(User user, Container container, Map<String, Object> row, @NotNull Map<String, Object> oldRow, @Nullable Map<Enum, Object> configParameters) throws InvalidKeyException, ValidationException
{
Map<String, Object> existingRow = getRow(user, container, oldRow);
if (existingRow != null)
Expand All @@ -189,6 +190,7 @@ protected Map<String, Object> updateRow(User user, Container container, Map<Stri
Map<String, Object> pks = new CaseInsensitiveHashMap<>();
pks.put("UserId", oldTargets.getKey().getUserId());
pks.put("MessageId", oldTargets.getValue().getRowId());
checkDuplicateUpdate(pks);
Table.update(user, getRealTable(), createDatabaseMap(newTargets), pks);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.labkey.api.query.QueryUpdateService;
import org.labkey.api.query.QueryUpdateServiceException;
import org.labkey.api.query.UserIdQueryForeignKey;
import org.labkey.api.query.ValidationException;
import org.labkey.api.security.User;
import org.labkey.api.security.UserManager;
import org.labkey.api.security.UserPrincipal;
Expand Down Expand Up @@ -263,7 +264,7 @@ private Map<String, Object> createDatabaseMap(User user, Map<String, Object> row
}

@Override
protected Map<String, Object> updateRow(User user, Container container, Map<String, Object> row, @NotNull Map<String, Object> oldRow, @Nullable Map<Enum, Object> configParameters) throws InvalidKeyException
protected Map<String, Object> updateRow(User user, Container container, Map<String, Object> row, @NotNull Map<String, Object> oldRow, @Nullable Map<Enum, Object> configParameters) throws InvalidKeyException, ValidationException
{
Map<String, Object> existingRow = getRow(user, container, oldRow);
if (existingRow != null)
Expand All @@ -275,6 +276,7 @@ protected Map<String, Object> updateRow(User user, Container container, Map<Stri
pks.put("Container", oldTargets.getContainer().getEntityId());
pks.put("Type", "messages");
pks.put("SrcIdentifier", oldTargets.getSrcIdentifier());
checkDuplicateUpdate(pks);
Table.update(user, CoreSchema.getInstance().getTableInfoEmailPrefs(), createDatabaseMap(user, row, newTargets), pks);
}

Expand Down
11 changes: 8 additions & 3 deletions api/src/org/labkey/api/dataiterator/DataIteratorContext.java
Original file line number Diff line number Diff line change
Expand Up @@ -238,12 +238,17 @@ public void addAlternateKeys(Set<String> alternateKeys)
_alternateKeys.addAll(alternateKeys);
}

public boolean shouldCancel()
{
if (!_errors.hasErrors())
return false;
return _failFast || _errors.getRowErrors().size() > _maxRowErrors;
}

/** if this etl should be killed, will execute <code>throw _errors;</code> */
public void checkShouldCancel() throws BatchValidationException
{
if (!_errors.hasErrors())
return;
if (_failFast || _errors.getRowErrors().size() > _maxRowErrors)
if (shouldCancel())
throw _errors;
}

Expand Down
54 changes: 41 additions & 13 deletions api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
Expand Down Expand Up @@ -64,9 +65,12 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator
final boolean _checkCrossFolderData;
final boolean _verifyExisting;

final Set<String> _sharedKeys = new CaseInsensitiveHashSet(); // common keys, such as data.classId, material.MaterialSourceId
final Set<Object> _pkKeysSeen = new HashSet<>();

final DataIteratorContext _context;

ExistingRecordDataIterator(DataIterator in, TableInfo target, @Nullable Set<String> keys, boolean useMark, DataIteratorContext context, boolean detailed)
ExistingRecordDataIterator(DataIterator in, TableInfo target, @Nullable Set<String> keys, @Nullable Set<String> sharedKeys, boolean useMark, DataIteratorContext context, boolean detailed)
{
super(in);
_context = context;
Expand All @@ -91,6 +95,9 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator

Collection<String> keyNames = null==keys ? target.getPkColumnNames() : keys;

if (sharedKeys != null)
_sharedKeys.addAll(sharedKeys);

_dataColumnNames.addAll(detailed ? map.keySet() : keyNames);

for (String name : keyNames)
Expand Down Expand Up @@ -170,12 +177,19 @@ public boolean next() throws BatchValidationException
if (!_context.getErrors().hasErrors() && ret && !pkColumns.isEmpty())
{
prefetchExisting();
if (_context.getErrors().hasErrors())
if (_context.shouldCancel())
return false;
}
return ret;
}

protected void checkDuplicateKeys(List<String> pkKeys)
{
Object pkKeysObj = pkKeys.size() == 1 ? pkKeys.get(0) : pkKeys;
Comment thread
labkey-susanh marked this conversation as resolved.
if (_pkKeysSeen.contains(pkKeysObj))
_context.getErrors().addRowError(new ValidationException("Duplicate key provided: " + StringUtils.join(pkKeys, ", ")));
_pkKeysSeen.add(pkKeysObj);
}

@Override
public boolean supportsGetExistingRecord()
Expand All @@ -194,10 +208,10 @@ public Map<String, Object> getExistingRecord()

public static DataIteratorBuilder createBuilder(DataIteratorBuilder dib, TableInfo target, @Nullable Set<String> keys)
{
return createBuilder(dib, target, keys, false);
return createBuilder(dib, target, keys, null, false);
}

public static DataIteratorBuilder createBuilder(DataIteratorBuilder dib, TableInfo target, @Nullable Set<String> keys, boolean useGetRows)
public static DataIteratorBuilder createBuilder(DataIteratorBuilder dib, TableInfo target, @Nullable Set<String> keys, @Nullable Set<String> sharedKeys, boolean useGetRows)
{
return context ->
{
Expand All @@ -215,9 +229,9 @@ public static DataIteratorBuilder createBuilder(DataIteratorBuilder dib, TableIn
auditType = target.getAuditBehavior((AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior));
boolean detailed = auditType == DETAILED;
if (useGetRows)
return new ExistingDataIteratorsGetRows(new CachingDataIterator(di), target, keys, context, detailed);
return new ExistingDataIteratorsGetRows(new CachingDataIterator(di), target, keys, sharedKeys, context, detailed);
else
return new ExistingDataIteratorsTableInfo(new CachingDataIterator(di), target, keys, context, detailed);
return new ExistingDataIteratorsTableInfo(new CachingDataIterator(di), target, keys, sharedKeys, context, detailed);
}
return di;
};
Expand All @@ -229,9 +243,9 @@ static class ExistingDataIteratorsTableInfo extends ExistingRecordDataIterator
{
final Set<String> allowedContainers = new HashSet<>();

ExistingDataIteratorsTableInfo(CachingDataIterator in, TableInfo target, @Nullable Set<String> keys, DataIteratorContext context, boolean detailed)
ExistingDataIteratorsTableInfo(CachingDataIterator in, TableInfo target, @Nullable Set<String> keys, @Nullable Set<String> sharedKeys, DataIteratorContext context, boolean detailed)
{
super(in, target, keys, true, context, detailed);
super(in, target, keys, sharedKeys, true, context, detailed);
if (c != null)
allowedContainers.add(c.getId());
}
Expand All @@ -255,12 +269,17 @@ private Pair<SQLFragment, Map<Integer, String>> getSelectExistingSql(int rows) t

sqlf.append(comma).append("(").appendValue(lastPrefetchRowNumber);
comma = "\n,";
List<String> pkKeys = new ArrayList<>();
for (int p = 0; p < pkColumns.size(); p++)
{
sqlf.append(",?");
sqlf.add(pkSuppliers.get(p).get());
Object pkVal = pkSuppliers.get(p).get();
sqlf.add(pkVal);
if (!_sharedKeys.contains(pkColumns.get(p).getColumnName()))
pkKeys.add(pkVal.toString());
}
sqlf.append(")");
checkDuplicateKeys(pkKeys);
rowNumContainers.put(lastPrefetchRowNumber, container);
}
while (--rows > 0 && _delegate.next());
Expand Down Expand Up @@ -347,9 +366,9 @@ static class ExistingDataIteratorsGetRows extends ExistingRecordDataIterator
{
final QueryUpdateService qus;

ExistingDataIteratorsGetRows(CachingDataIterator in, TableInfo target, @Nullable Set<String> keys, DataIteratorContext context, boolean detailed)
ExistingDataIteratorsGetRows(CachingDataIterator in, TableInfo target, @Nullable Set<String> keys, @Nullable Set<String> sharedKeys, DataIteratorContext context, boolean detailed)
{
super(in, target, keys, true, context, detailed);
super(in, target, keys, sharedKeys, true, context, detailed);
qus = target.getUpdateService();
}

Expand All @@ -370,8 +389,17 @@ protected void prefetchExisting() throws BatchValidationException
{
lastPrefetchRowNumber = (Integer) _delegate.get(0);
Map<String,Object> keyMap = CaseInsensitiveHashMap.of();
List<String> pkKeys = new ArrayList<>();
for (int p=0 ; p<pkColumns.size() ; p++)
keyMap.put(pkColumns.get(p).getColumnName(), pkSuppliers.get(p).get());
{
Object pkVal = pkSuppliers.get(p).get();
keyMap.put(pkColumns.get(p).getColumnName(), pkVal);
if (!_sharedKeys.contains(pkColumns.get(p).getColumnName()))
pkKeys.add(pkVal.toString());
}

checkDuplicateKeys(pkKeys);

keysMap.put(lastPrefetchRowNumber, keyMap);
existingRecords.put(lastPrefetchRowNumber, Map.of());
}
Expand Down Expand Up @@ -419,7 +447,7 @@ private DataIterator makeModulesDI()
assertFalse(di.supportsGetExistingRecord());
var context = new DataIteratorContext();
context.setInsertOption(QueryUpdateService.InsertOption.INSERT);
DataIterator existing = new ExistingDataIteratorsTableInfo(new CachingDataIterator(di), CoreSchema.getInstance().getTableInfoModules(), null, context, true);
DataIterator existing = new ExistingDataIteratorsTableInfo(new CachingDataIterator(di), CoreSchema.getInstance().getTableInfoModules(), null, null, context, true);
assertTrue(existing.supportsGetExistingRecord());
return existing;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ public DataIterator getDataIterator(DataIteratorContext context)
else if (context.getInsertOption().mergeRows && !_target.supportsInsertOption(QueryUpdateService.InsertOption.MERGE))
return LoggingDataIterator.wrap(new BeforeIterator(coerce, context));

coerce = ExistingRecordDataIterator.createBuilder(coerce, _target, mergeKeys, true).getDataIterator(context);
coerce = ExistingRecordDataIterator.createBuilder(coerce, _target, mergeKeys, null, true).getDataIterator(context);
return LoggingDataIterator.wrap(new BeforeIterator(new CachingDataIterator(coerce), context));
}
}
Expand Down
84 changes: 84 additions & 0 deletions api/src/org/labkey/api/query/AbstractQueryUpdateService.java
Original file line number Diff line number Diff line change
Expand Up @@ -107,10 +107,13 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.function.Function;

import static java.util.Objects.requireNonNull;
Expand All @@ -137,6 +140,7 @@ public abstract class AbstractQueryUpdateService implements QueryUpdateService
* If a subclass wants to disable some of these features (w/o subclassing), put flags here...
*/
protected boolean _enableExistingRecordsDataIterator = true;
protected Set<Object> _previouslyUpdatedRows = new HashSet<>();

protected AbstractQueryUpdateService(TableInfo queryTable)
{
Expand All @@ -150,6 +154,11 @@ protected TableInfo getQueryTable()
return _queryTable;
}

public @NotNull Set<Object> getPreviouslyUpdatedRows()
{
return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows;
}

@Override
public boolean hasPermission(@NotNull UserPrincipal user, Class<? extends Permission> acl)
{
Expand Down Expand Up @@ -834,6 +843,43 @@ public List<Map<String, Object>> updateRows(User user, Container container, List
return result;
}

protected void checkDuplicateUpdate(Object pkVals) throws ValidationException
{
if (pkVals == null)
return;

Set<Object> updatedRows = getPreviouslyUpdatedRows();

Object[] keysObj;
if (pkVals.getClass().isArray())
keysObj = (Object[]) pkVals;
else if (pkVals instanceof Map map)
{
List<Object> orderedKeyVals = new ArrayList<>();
SortedSet<String> sortedKeys = new TreeSet<>(map.keySet());
for (String key : sortedKeys)
orderedKeyVals.add(map.get(key));
keysObj = orderedKeyVals.toArray();
}
else
keysObj = new Object[]{pkVals};

if (keysObj.length == 1)
Comment thread
labkey-susanh marked this conversation as resolved.
{
if (updatedRows.contains(keysObj[0]))
throw new ValidationException("Duplicate key provided: " + keysObj[0]);
updatedRows.add(keysObj[0]);
return;
}

List<String> keys = new ArrayList<>();
for (Object key : keysObj)
keys.add(String.valueOf(key));
if (updatedRows.contains(keys))
throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", "));
updatedRows.add(keys);
}

@Override
public Map<String, Object> moveRows(User user, Container container, Container targetContainer, List<Map<String, Object>> rows, BatchValidationException errors, @Nullable Map<Enum, Object> configParameters, @Nullable Map<String, Object> extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException
{
Expand Down Expand Up @@ -1288,6 +1334,15 @@ public void addRowError(ValidationException vex)
// test new row
assertEquals("THREE", rows.get(3).get("s"));
assertNull(rows.get(3).get("i"));

// merge should fail if duplicate keys are provided
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue reference here?

errors = new BatchValidationException();
mergeRows = new ArrayList<Map<String,Object>>();
mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2"));
mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2"));
qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null);
assertTrue(errors.hasErrors());
assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2"));
}

@Test
Expand Down Expand Up @@ -1325,6 +1380,35 @@ public void UPDATE() throws Exception
updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2"));
count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null);
assertTrue(context.getErrors().hasErrors());

// Issue 52728: update should fail if duplicate key is provide
updateRows = new ArrayList<Map<String,Object>>();
updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2"));
updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2"));

// use DIB
context = new DataIteratorContext();
context.setInsertOption(InsertOption.UPDATE);
qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null);
assertTrue(context.getErrors().hasErrors());
assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2"));

// use updateRows
if (!_useAlias) // _update using alias is not supported
{
BatchValidationException errors = new BatchValidationException();
try
{
qus.updateRows(user, c, updateRows, null, errors, null, null);
}
catch (Exception e)
{

}
assertTrue(errors.hasErrors());
assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2"));

}
}

@Test
Expand Down
3 changes: 3 additions & 0 deletions api/src/org/labkey/api/query/DefaultQueryUpdateService.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import org.apache.commons.beanutils.ConversionException;
import org.apache.commons.beanutils.ConvertUtils;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.labkey.api.attachments.AttachmentFile;
Expand Down Expand Up @@ -587,6 +588,8 @@ protected Map<String, Object> _update(User user, Container c, Map<String, Object
throw e.getLastRowError();
}

checkDuplicateUpdate(keys);

return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen())
}

Expand Down
Loading