From 1532f9e36551eb695fe832d97f31b4dce3eda15b Mon Sep 17 00:00:00 2001 From: Chao Sun Date: Tue, 25 Oct 2016 14:12:02 -0700 Subject: [PATCH 1/3] HIVE-15056: Support index shifting for struct fields --- .../serde/ArrayWritableObjectInspector.java | 68 ++++++++---- .../ql/io/parquet/serde/ParquetHiveSerDe.java | 100 +++++++++++++++++- .../hive/ql/io/parquet/TestParquetSerDe.java | 34 ++++++ 3 files changed, 182 insertions(+), 20 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java index 5f852d036804..c9ee4acf6351 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java @@ -21,7 +21,6 @@ import org.apache.hadoop.hive.ql.io.parquet.serde.primitive.ParquetPrimitiveInspectorFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -49,25 +48,45 @@ public class ArrayWritableObjectInspector extends SettableStructObjectInspector private final List fields; private final HashMap fieldsByName; + // Whether this OI is for the row-level schema (as opposed to nested struct fields). + private final boolean isRoot; + public ArrayWritableObjectInspector(final StructTypeInfo rowTypeInfo) { + this(true, rowTypeInfo, null); + } + + public ArrayWritableObjectInspector(StructTypeInfo completeTypeInfo, StructTypeInfo prunedTypeInfo) { + this(true, completeTypeInfo, prunedTypeInfo); + } - typeInfo = rowTypeInfo; - fieldNames = rowTypeInfo.getAllStructFieldNames(); - fieldInfos = rowTypeInfo.getAllStructFieldTypeInfos(); - fields = new ArrayList(fieldNames.size()); - fieldsByName = new HashMap(); + public ArrayWritableObjectInspector(boolean isRoot, + StructTypeInfo completeTypeInfo, StructTypeInfo prunedTypeInfo) { + this.isRoot = isRoot; + typeInfo = completeTypeInfo; + fieldNames = completeTypeInfo.getAllStructFieldNames(); + fieldInfos = completeTypeInfo.getAllStructFieldTypeInfos(); + fields = new ArrayList<>(fieldNames.size()); + fieldsByName = new HashMap<>(); for (int i = 0; i < fieldNames.size(); ++i) { final String name = fieldNames.get(i); - final TypeInfo fieldInfo = fieldInfos.get(i); + TypeInfo fieldInfo = fieldInfos.get(i); - final StructFieldImpl field = new StructFieldImpl(name, getObjectInspector(fieldInfo), i); + StructFieldImpl field; + if (prunedTypeInfo != null && prunedTypeInfo.getAllStructFieldNames().indexOf(name) >= 0) { + int adjustedIndex = prunedTypeInfo.getAllStructFieldNames().indexOf(name); + TypeInfo prunedFieldInfo = prunedTypeInfo.getAllStructFieldTypeInfos().get(adjustedIndex); + field = new StructFieldImpl(name, getObjectInspector(fieldInfo, prunedFieldInfo), i, adjustedIndex); + } else { + field = new StructFieldImpl(name, getObjectInspector(fieldInfo, null), i, i); + } fields.add(field); fieldsByName.put(name.toLowerCase(), field); } } - private ObjectInspector getObjectInspector(final TypeInfo typeInfo) { + private ObjectInspector getObjectInspector( + TypeInfo typeInfo, TypeInfo prunedTypeInfo) { if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) { return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) { @@ -83,18 +102,20 @@ private ObjectInspector getObjectInspector(final TypeInfo typeInfo) { } else if (typeInfo instanceof DecimalTypeInfo) { return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector((DecimalTypeInfo) typeInfo); } else if (typeInfo.getCategory().equals(Category.STRUCT)) { - return new ArrayWritableObjectInspector((StructTypeInfo) typeInfo); + return new ArrayWritableObjectInspector(false, (StructTypeInfo) typeInfo, (StructTypeInfo) prunedTypeInfo); } else if (typeInfo.getCategory().equals(Category.LIST)) { final TypeInfo subTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo(); - return new ParquetHiveArrayInspector(getObjectInspector(subTypeInfo)); + return new ParquetHiveArrayInspector(getObjectInspector(subTypeInfo, null)); } else if (typeInfo.getCategory().equals(Category.MAP)) { final TypeInfo keyTypeInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo(); final TypeInfo valueTypeInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo(); if (keyTypeInfo.equals(TypeInfoFactory.stringTypeInfo) || keyTypeInfo.equals(TypeInfoFactory.byteTypeInfo) || keyTypeInfo.equals(TypeInfoFactory.shortTypeInfo)) { - return new DeepParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo)); + return new DeepParquetHiveMapInspector(getObjectInspector(keyTypeInfo, null), + getObjectInspector(valueTypeInfo, null)); } else { - return new StandardParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo)); + return new StandardParquetHiveMapInspector(getObjectInspector(keyTypeInfo, null), + getObjectInspector(valueTypeInfo, null)); } } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) { return ParquetPrimitiveInspectorFactory.parquetByteInspector; @@ -139,8 +160,9 @@ public Object getStructFieldData(final Object data, final StructField fieldRef) if (data instanceof ArrayWritable) { final ArrayWritable arr = (ArrayWritable) data; final StructFieldImpl structField = (StructFieldImpl) fieldRef; - if (structField.getIndex() < arr.get().length) { - return arr.get()[structField.getIndex()]; + int index = isRoot ? structField.getIndex() : structField.adjustedIndex; + if (index < arr.get().length) { + return arr.get()[index]; } else { return null; } @@ -170,7 +192,7 @@ public List getStructFieldsDataAsList(final Object data) { if (data instanceof ArrayWritable) { final ArrayWritable arr = (ArrayWritable) data; final Object[] arrWritable = arr.get(); - return new ArrayList(Arrays.asList(arrWritable)); + return new ArrayList<>(Arrays.asList(arrWritable)); } //since setStructFieldData and create return a list, getStructFieldData should be able to @@ -221,16 +243,26 @@ public int hashCode() { return hash; } - class StructFieldImpl implements StructField { + private class StructFieldImpl implements StructField { private final String name; private final ObjectInspector inspector; private final int index; - public StructFieldImpl(final String name, final ObjectInspector inspector, final int index) { + // This is the adjusted index after nested column pruning. + // For instance, given the struct type: s:> + // If only 's.b' is used, the pruned type is: s:>. + // Here, the index of field 'b' is changed from 1 to 0. + // When we look up the data from Parquet, index needs to be adjusted accordingly. + // Note: currently this is only used in the read path. + final int adjustedIndex; + + public StructFieldImpl(final String name, final ObjectInspector inspector, + final int index, int adjustedIndex) { this.name = name; this.inspector = inspector; this.index = index; + this.adjustedIndex = adjustedIndex; } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java index 995b965cc636..23a7fdd74af4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java @@ -15,12 +15,16 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Properties; +import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; +import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -108,8 +112,15 @@ public final void initialize(final Configuration conf, final Properties tbl) thr columnTypes); } // Create row related objects - rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes); - this.objInspector = new ArrayWritableObjectInspector((StructTypeInfo) rowTypeInfo); + StructTypeInfo completeTypeInfo = + (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes); + String prunedColumnPaths = conf.get(ColumnProjectionUtils.READ_NESTED_COLUMN_PATH_CONF_STR); + if (prunedColumnPaths != null) { + StructTypeInfo prunedTypeInfo = pruneFromPaths(completeTypeInfo, prunedColumnPaths); + this.objInspector = new ArrayWritableObjectInspector(completeTypeInfo, prunedTypeInfo); + } else { + this.objInspector = new ArrayWritableObjectInspector(completeTypeInfo); + } // Stats part serializedSize = 0; @@ -163,4 +174,89 @@ public SerDeStats getSerDeStats() { } return stats; } + + /** + * Given a complete struct type info and pruned paths containing selected fields + * from the type info, return a pruned struct type info only with the selected fields. + * + * For instance, if 'completeTypeInfo' is: s:struct, d:string> + * and 'prunedPaths' is "s.a.b,s.d", then the result will be: + * s:struct, d:string> + * + * @param completeTypeInfo the complete struct type info + * @param prunedPaths a string representing the pruned paths, separated by ',' + * @return the pruned struct type info + */ + private StructTypeInfo pruneFromPaths( + StructTypeInfo completeTypeInfo, String prunedPaths) { + PrunedTypeInfo prunedTypeInfo = new PrunedTypeInfo(completeTypeInfo); + + String[] prunedPathList = prunedPaths.split(","); + for (String path : prunedPathList) { + pruneFromSinglePath(prunedTypeInfo, path); + } + + return prunedTypeInfo.prune(); + } + + private void pruneFromSinglePath(PrunedTypeInfo prunedInfo, String path) { + Preconditions.checkArgument(prunedInfo != null, + "PrunedTypeInfo for path " + path + " should not be null"); + + int index = path.indexOf('.'); + if (index < 0) { + index = path.length(); + } + + String fieldName = path.substring(0, index); + prunedInfo.markSelected(fieldName); + if (index < path.length()) { + pruneFromSinglePath(prunedInfo.children.get(fieldName), path.substring(index + 1)); + } + } + + private static class PrunedTypeInfo { + final StructTypeInfo typeInfo; + final Map children; + final boolean[] selected; + + PrunedTypeInfo(StructTypeInfo typeInfo) { + this.typeInfo = typeInfo; + this.children = new HashMap<>(); + this.selected = new boolean[typeInfo.getAllStructFieldTypeInfos().size()]; + for (int i = 0; i < typeInfo.getAllStructFieldTypeInfos().size(); ++i) { + TypeInfo ti = typeInfo.getAllStructFieldTypeInfos().get(i); + if (ti.getCategory() == Category.STRUCT) { + this.children.put(typeInfo.getAllStructFieldNames().get(i), + new PrunedTypeInfo((StructTypeInfo) ti)); + } + } + } + + void markSelected(String fieldName) { + int index = typeInfo.getAllStructFieldNames().indexOf(fieldName); + if (index >= 0) { + selected[index] = true; + } + } + + StructTypeInfo prune() { + List newNames = new ArrayList<>(); + List newTypes = new ArrayList<>(); + List oldNames = typeInfo.getAllStructFieldNames(); + List oldTypes = typeInfo.getAllStructFieldTypeInfos(); + for (int i = 0; i < oldNames.size(); ++i) { + String fn = oldNames.get(i); + if (selected[i]) { + newNames.add(fn); + if (children.containsKey(fn)) { + newTypes.add(children.get(fn).prune()); + } else { + newTypes.add(oldTypes.get(i)); + } + } + } + return (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(newNames, newTypes); + } + } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java index dbb2795732fe..7aa293f2cea1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java @@ -19,12 +19,16 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.BytesWritable; @@ -81,6 +85,36 @@ public void testParquetHiveSerDe() throws Throwable { } } + public void testParquetHiveSerDeComplexTypes() throws Throwable { + // Initialize + ParquetHiveSerDe serDe = new ParquetHiveSerDe(); + Configuration conf = new Configuration(); + Properties tblProperties = new Properties(); + + tblProperties.setProperty(serdeConstants.LIST_COLUMNS, "a,s"); + tblProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "int,struct"); + conf.set(ColumnProjectionUtils.READ_NESTED_COLUMN_PATH_CONF_STR, "s.b"); + + serDe.initialize(conf, tblProperties); + + // Generate test data + Writable[] wb = new Writable[1]; + wb[0] = new BytesWritable("foo".getBytes("UTF-8")); + Writable[] ws = new Writable[2]; + ws[0] = null; + ArrayWritable awb = new ArrayWritable(Writable.class, wb); + ws[1] = awb; + ArrayWritable aws = new ArrayWritable(Writable.class, ws); + + // Inspect the test data + StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector(); + StructField s = soi.getStructFieldRef("s"); + assertEquals(awb, soi.getStructFieldData(aws, s)); + StructObjectInspector boi = (StructObjectInspector) s.getFieldObjectInspector(); + StructField b = boi.getStructFieldRef("b"); + assertEquals(wb[0], boi.getStructFieldData(awb, b)); + } + private void deserializeAndSerializeLazySimple(final ParquetHiveSerDe serDe, final ArrayWritable t) throws SerDeException { // Get the row structure From 4e8a7b6cfab1b22f6d1fdc1fb4b5b868bddcbe91 Mon Sep 17 00:00:00 2001 From: Chao Sun Date: Tue, 25 Oct 2016 23:04:55 -0700 Subject: [PATCH 2/3] Fix test failure --- .../hive/ql/io/parquet/serde/ParquetHiveSerDe.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java index 23a7fdd74af4..43d557044dfb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java @@ -114,13 +114,14 @@ public final void initialize(final Configuration conf, final Properties tbl) thr // Create row related objects StructTypeInfo completeTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes); - String prunedColumnPaths = conf.get(ColumnProjectionUtils.READ_NESTED_COLUMN_PATH_CONF_STR); - if (prunedColumnPaths != null) { - StructTypeInfo prunedTypeInfo = pruneFromPaths(completeTypeInfo, prunedColumnPaths); - this.objInspector = new ArrayWritableObjectInspector(completeTypeInfo, prunedTypeInfo); - } else { - this.objInspector = new ArrayWritableObjectInspector(completeTypeInfo); + StructTypeInfo prunedTypeInfo = null; + if (conf != null) { + String prunedColumnPaths = conf.get(ColumnProjectionUtils.READ_NESTED_COLUMN_PATH_CONF_STR); + if (prunedColumnPaths != null) { + prunedTypeInfo = pruneFromPaths(completeTypeInfo, prunedColumnPaths); + } } + this.objInspector = new ArrayWritableObjectInspector(completeTypeInfo, prunedTypeInfo); // Stats part serializedSize = 0; From 4935401f16d35695e194dd2d7cb8b016d24c3940 Mon Sep 17 00:00:00 2001 From: Chao Sun Date: Wed, 26 Oct 2016 11:12:14 -0700 Subject: [PATCH 3/3] Address review comments --- .../serde/ArrayWritableObjectInspector.java | 16 +++++++-------- .../ql/io/parquet/serde/ParquetHiveSerDe.java | 20 +++++++++---------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java index c9ee4acf6351..8df0cc14ca6f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java @@ -48,29 +48,29 @@ public class ArrayWritableObjectInspector extends SettableStructObjectInspector private final List fields; private final HashMap fieldsByName; - // Whether this OI is for the row-level schema (as opposed to nested struct fields). + // Whether this OI is for the column-level schema (as opposed to nested column fields). private final boolean isRoot; public ArrayWritableObjectInspector(final StructTypeInfo rowTypeInfo) { this(true, rowTypeInfo, null); } - public ArrayWritableObjectInspector(StructTypeInfo completeTypeInfo, StructTypeInfo prunedTypeInfo) { - this(true, completeTypeInfo, prunedTypeInfo); + public ArrayWritableObjectInspector(StructTypeInfo originalTypeInfo, StructTypeInfo prunedTypeInfo) { + this(true, originalTypeInfo, prunedTypeInfo); } public ArrayWritableObjectInspector(boolean isRoot, - StructTypeInfo completeTypeInfo, StructTypeInfo prunedTypeInfo) { + StructTypeInfo originalTypeInfo, StructTypeInfo prunedTypeInfo) { this.isRoot = isRoot; - typeInfo = completeTypeInfo; - fieldNames = completeTypeInfo.getAllStructFieldNames(); - fieldInfos = completeTypeInfo.getAllStructFieldTypeInfos(); + typeInfo = originalTypeInfo; + fieldNames = originalTypeInfo.getAllStructFieldNames(); + fieldInfos = originalTypeInfo.getAllStructFieldTypeInfos(); fields = new ArrayList<>(fieldNames.size()); fieldsByName = new HashMap<>(); for (int i = 0; i < fieldNames.size(); ++i) { final String name = fieldNames.get(i); - TypeInfo fieldInfo = fieldInfos.get(i); + final TypeInfo fieldInfo = fieldInfos.get(i); StructFieldImpl field; if (prunedTypeInfo != null && prunedTypeInfo.getAllStructFieldNames().indexOf(name) >= 0) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java index 43d557044dfb..ef79760c5af2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java @@ -180,17 +180,17 @@ public SerDeStats getSerDeStats() { * Given a complete struct type info and pruned paths containing selected fields * from the type info, return a pruned struct type info only with the selected fields. * - * For instance, if 'completeTypeInfo' is: s:struct, d:string> + * For instance, if 'originalTypeInfo' is: s:struct, d:string> * and 'prunedPaths' is "s.a.b,s.d", then the result will be: * s:struct, d:string> * - * @param completeTypeInfo the complete struct type info + * @param originalTypeInfo the complete struct type info * @param prunedPaths a string representing the pruned paths, separated by ',' * @return the pruned struct type info */ private StructTypeInfo pruneFromPaths( - StructTypeInfo completeTypeInfo, String prunedPaths) { - PrunedTypeInfo prunedTypeInfo = new PrunedTypeInfo(completeTypeInfo); + StructTypeInfo originalTypeInfo, String prunedPaths) { + PrunedStructTypeInfo prunedTypeInfo = new PrunedStructTypeInfo(originalTypeInfo); String[] prunedPathList = prunedPaths.split(","); for (String path : prunedPathList) { @@ -200,9 +200,9 @@ private StructTypeInfo pruneFromPaths( return prunedTypeInfo.prune(); } - private void pruneFromSinglePath(PrunedTypeInfo prunedInfo, String path) { + private void pruneFromSinglePath(PrunedStructTypeInfo prunedInfo, String path) { Preconditions.checkArgument(prunedInfo != null, - "PrunedTypeInfo for path " + path + " should not be null"); + "PrunedStructTypeInfo for path " + path + " should not be null"); int index = path.indexOf('.'); if (index < 0) { @@ -216,12 +216,12 @@ private void pruneFromSinglePath(PrunedTypeInfo prunedInfo, String path) { } } - private static class PrunedTypeInfo { + private static class PrunedStructTypeInfo { final StructTypeInfo typeInfo; - final Map children; + final Map children; final boolean[] selected; - PrunedTypeInfo(StructTypeInfo typeInfo) { + PrunedStructTypeInfo(StructTypeInfo typeInfo) { this.typeInfo = typeInfo; this.children = new HashMap<>(); this.selected = new boolean[typeInfo.getAllStructFieldTypeInfos().size()]; @@ -229,7 +229,7 @@ private static class PrunedTypeInfo { TypeInfo ti = typeInfo.getAllStructFieldTypeInfos().get(i); if (ti.getCategory() == Category.STRUCT) { this.children.put(typeInfo.getAllStructFieldNames().get(i), - new PrunedTypeInfo((StructTypeInfo) ti)); + new PrunedStructTypeInfo((StructTypeInfo) ti)); } } }