Skip to content

Commit

Permalink
HUEditorRow.streamRecursive()
Browse files Browse the repository at this point in the history
  • Loading branch information
teosarca committed May 3, 2017
1 parent 41e9ea7 commit ed132de
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 23 deletions.
30 changes: 22 additions & 8 deletions src/main/java/de/metas/ui/web/handlingunits/HUEditorRow.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import java.util.Objects;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Stream;

import javax.annotation.Nullable;

Expand All @@ -17,6 +18,7 @@
import org.compiere.model.I_C_UOM;
import org.compiere.util.Env;

import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;

Expand Down Expand Up @@ -74,18 +76,17 @@ public static final HUEditorRow cast(final IViewRow viewRow)
{
return (HUEditorRow)viewRow;
}

public static DocumentId rowIdFromM_HU_ID(final int huId)
{
return DocumentId.of(huId);
}

public static Set<DocumentId> rowIdsFromM_HU_IDs(final Collection<Integer> huIds)
{
return DocumentId.ofIntSet(huIds);
}


public static DocumentId rowIdFromM_HU_Storage(final int huId, final int productId)
{
return DocumentId.ofString(I_M_HU_Storage.Table_Name + "_HU" + huId + "_P" + productId);
Expand All @@ -95,13 +96,12 @@ public static int rowIdToM_HU_ID(final DocumentId rowId)
{
return rowId == null ? -1 : rowId.toInt();
}

public static Set<Integer> rowIdsToM_HU_IDs(final Collection<DocumentId> rowIds)
{
return DocumentId.toIntSet(rowIds);
}


private final DocumentPath documentPath;
private final DocumentId rowId;
private final HUEditorRowType type;
Expand All @@ -127,7 +127,7 @@ private HUEditorRow(final Builder builder)
{
documentPath = builder.getDocumentPath();
rowId = documentPath.getDocumentId();

type = builder.getType();
processed = builder.isProcessed();

Expand Down Expand Up @@ -241,6 +241,21 @@ public List<HUEditorRow> getIncludedRows()
return includedRows;
}

/** @return a stream of this row and all it's included rows recursively */
public Stream<HUEditorRow> streamRecursive()
{
return streamRecursive(this);
}

/** @return a stream of given row and all it's included rows recursively */
private static Stream<HUEditorRow> streamRecursive(final HUEditorRow row)
{
return row.getIncludedRows()
.stream()
.map(includedRow -> streamRecursive(includedRow))
.reduce(Stream.of(row), Stream::concat);
}

/**
*
* @return the ID of the wrapped HU or a value {@code <= 0} if there is none.
Expand Down Expand Up @@ -289,13 +304,12 @@ public boolean isHUStatusActive()
{
return X_M_HU.HUSTATUS_Active.equals(getHUStatusKey());
}

public boolean isHUStatusDestroyed()
{
return X_M_HU.HUSTATUS_Destroyed.equals(getHUStatusKey());
}


public boolean isPureHU()
{
return getType().isPureHU();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@

/**
* {@link HUEditorViewBuffer} implementation which fully caches the {@link HUEditorRow}s.
*
*
* This implementation shall be used when dealing with small amount of HUs.
*
*
* @author metas-dev <dev@metasfresh.com>
*
*/
Expand Down Expand Up @@ -150,7 +150,7 @@ public Set<DocumentId> getRowIdsMatchingBarcode(@NonNull final String barcode)
{
throw new IllegalArgumentException("Invalid barcode");
}

return streamAllRecursive()
.filter(row -> row.matchesBarcode(barcode))
.map(HUEditorRow::getId)
Expand Down Expand Up @@ -280,18 +280,7 @@ public Stream<HUEditorRow> stream()

public Stream<HUEditorRow> streamRecursive()
{
return rows.stream()
.map(row -> streamRecursive(row))
.reduce(Stream::concat)
.orElse(Stream.of());
}

private Stream<HUEditorRow> streamRecursive(final HUEditorRow row)
{
return row.getIncludedRows()
.stream()
.map(includedRow -> streamRecursive(includedRow))
.reduce(Stream.of(row), Stream::concat);
return rows.stream().flatMap(row -> row.streamRecursive());
}

public long size()
Expand Down

0 comments on commit ed132de

Please sign in to comment.