Skip to content

Commit

Permalink
DDIB should not output two subject columns. (#2263)
Browse files Browse the repository at this point in the history
* DDIB should not output two subject columns.
Currently it outputs the original column and the translated column, this depends on subtle behavior in matchColumns() to not break things.  Subtle is bad and we don't need both columns.

* Update api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java

Co-authored-by: Adam Rauch <adam@labkey.com>
  • Loading branch information
labkey-matthewb and labkey-adam committed May 19, 2021
1 parent 43fe753 commit 229b7bd
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 17 deletions.
Expand Up @@ -39,6 +39,7 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator
{
public static final String EXISTING_RECORD_COLUMN_NAME = "_" + ExistingRecordDataIterator.class.getName() + "#EXISTING_RECORD_COLUMN_NAME";

final CachingDataIterator _unwrapped;
final TableInfo target;
final ArrayList<ColumnInfo> pkColumns = new ArrayList<>();
final ArrayList<Supplier<Object>> pkSuppliers = new ArrayList<>();
Expand All @@ -52,6 +53,10 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator
ExistingRecordDataIterator(DataIterator in, TableInfo target, @Nullable Set<String> keys, boolean useMark)
{
super(in);

// NOTE it might get wrapped with a LoggingDataIterator, so remember the original DataIterator
this._unwrapped = useMark ? (CachingDataIterator)in : null;

this.target = target;
this.existingColIndex = in.getColumnCount()+1;
this.useMark = useMark;
Expand Down Expand Up @@ -130,7 +135,7 @@ public boolean next() throws BatchValidationException
{
// NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached
if (useMark)
((CachingDataIterator)_delegate).mark();
_unwrapped.mark(); // unwrapped _delegate
boolean ret = super.next();
if (ret && !pkColumns.isEmpty())
prefetchExisting();
Expand Down Expand Up @@ -251,7 +256,7 @@ protected void prefetchExisting() throws BatchValidationException
existingRecords.put(r,(Map<String,Object>)map);
});
// backup to where we started so caller can iterate through them one at a time
((CachingDataIterator)_delegate).reset();
_unwrapped.reset(); // unwrapped _delegate
_delegate.next();
}
}
Expand Down
35 changes: 20 additions & 15 deletions study/src/org/labkey/study/model/DatasetDataIteratorBuilder.java
Expand Up @@ -198,6 +198,22 @@ public DataIterator getDataIterator(DataIteratorContext context)
continue;
}

if (match == subjectCol)
{
try
{
// translate the incoming participant column
// do a conversion for PTID aliasing
it.translatePtid(in, user);
continue;
}
catch (ValidationException e)
{
setupError(e.getMessage());
return it;
}
}

int out;
if (DefaultStudyDesignWriter.isColumnNumericForeignKeyToDataspaceTable(match.getFk(), true))
{
Expand Down Expand Up @@ -249,19 +265,8 @@ else if (match.getPropertyType() == PropertyType.FILE_LINK)
Integer indexContainer = outputMap.get(containerColumn);
Integer indexReplace = outputMap.get("replace");

// do a conversion for PTID aliasing
Integer translatedIndexPTID = indexPTID;
try
{
translatedIndexPTID = it.translatePtid(indexPTIDInput, user);
}
catch (ValidationException e)
{
context.getErrors().addRowError(e);
}

// For now, just specify null for sequence num index... we'll add it below
it.setSpecialOutputColumns(translatedIndexPTID, null, indexVisitDate, indexKeyProperty, indexContainer);
it.setSpecialOutputColumns(indexPTID, null, indexVisitDate, indexKeyProperty, indexContainer);
it.setTimepointType(timetype);

/* NOTE: these columns must be added in dependency order
Expand Down Expand Up @@ -411,11 +416,11 @@ else if (_datasetDefinition.getUseTimeKeyField())
{
Integer indexVisit = timetype.isVisitBased() ? it.indexSequenceNumOutput : indexVisitDate;
// no point if required columns are missing
if (null != translatedIndexPTID && null != indexVisit)
if (null != indexPTID && null != indexVisit)
{
ScrollableDataIterator scrollable = DataIteratorUtil.wrapScrollable(ret);
_datasetDefinition.checkForDuplicates(scrollable, indexLSID,
translatedIndexPTID, null == indexVisit ? -1 : indexVisit, null == indexKeyProperty ? -1 : indexKeyProperty, null == indexReplace ? -1 : indexReplace,
indexPTID, null == indexVisit ? -1 : indexVisit, null == indexKeyProperty ? -1 : indexKeyProperty, null == indexReplace ? -1 : indexReplace,
context, null,
checkDuplicates);
scrollable.beforeFirst();
Expand Down Expand Up @@ -612,7 +617,7 @@ int translateSequenceNum(Integer indexSequenceNumInput, Integer indexVisitDateIn

int translatePtid(Integer indexPtidInput, User user) throws ValidationException
{
ColumnInfo col = new BaseColumnInfo("ParticipantId", JdbcType.VARCHAR);
ColumnInfo col = new BaseColumnInfo(_datasetDefinition.getStudy().getSubjectColumnName(), JdbcType.VARCHAR);
ParticipantIdImportHelper piih = new ParticipantIdImportHelper(_datasetDefinition.getStudy(), user, _datasetDefinition);
Callable call = piih.getCallable(getInput(), indexPtidInput);
return addColumn(col, call);
Expand Down

0 comments on commit 229b7bd

Please sign in to comment.