Skip to content

Commit

Permalink
DRILL-7092: Rename map to struct in schema definition
Browse files Browse the repository at this point in the history
1. Renamed map to struct in schema parser.
2. Updated sqlTypeOf function to return STRUCT instead of MAP, drillTypeOf function will return MAP as before until internal renaming is done.
3. Add is_struct alias to already existing is_map function. Function should be revisited once Drill supports true maps.
4. Updated unit tests.

closes #1688
  • Loading branch information
arina-ielchiieva authored and sohami committed Mar 14, 2019
1 parent b20a2e6 commit fbbfbef
Show file tree
Hide file tree
Showing 7 changed files with 29 additions and 26 deletions.
4 changes: 2 additions & 2 deletions common/src/main/java/org/apache/drill/common/types/Types.java
Expand Up @@ -185,7 +185,7 @@ public static String getBaseSqlTypeName(final MajorType type) {
// Composite types and other types that are not atomic types (SQL standard
// or not) except ARRAY types (handled above):

case MAP: return "MAP";
case MAP: return "STRUCT"; // Drill map represents struct and in future will be renamed
case LATE: return "ANY";
case NULL: return "NULL";
case UNION: return "UNION";
Expand Down Expand Up @@ -267,7 +267,7 @@ public static int getJdbcTypeCode(final String sqlTypeName) {
case "INTERVAL": return java.sql.Types.OTHER; // JDBC (4.1) has nothing for INTERVAL
case "INTERVAL YEAR TO MONTH": return java.sql.Types.OTHER;
case "INTERVAL DAY TO SECOND": return java.sql.Types.OTHER;
case "MAP": return java.sql.Types.OTHER; // Drill doesn't support java.sql.Struct
case "STRUCT": return java.sql.Types.OTHER; // Drill doesn't support java.sql.Struct
case "NATIONAL CHARACTER VARYING": return java.sql.Types.NVARCHAR;
case "NATIONAL CHARACTER": return java.sql.Types.NCHAR;
case "NULL": return java.sql.Types.NULL;
Expand Down
Expand Up @@ -55,6 +55,7 @@ MINUTE: 'MINUTE';
SECOND: 'SECOND';

MAP: 'MAP';
STRUCT: 'STRUCT';
ARRAY: 'ARRAY';

// symbols
Expand Down
Expand Up @@ -31,13 +31,13 @@ columns: column_def (COMMA column_def)*;

column_def: column property_values?;

column: (primitive_column | map_column | simple_array_column | complex_array_column);
column: (primitive_column | struct_column | simple_array_column | complex_array_column);

primitive_column: column_id simple_type nullability? format_value? default_value?;

simple_array_column: column_id simple_array_type nullability?;

map_column: column_id map_type nullability?;
struct_column: column_id struct_type nullability?;

complex_array_column: column_id complex_array_type nullability?;

Expand Down Expand Up @@ -65,11 +65,11 @@ simple_type

complex_type: (simple_array_type | complex_array_type);

simple_array_type: ARRAY LEFT_ANGLE_BRACKET (simple_type | map_type) RIGHT_ANGLE_BRACKET;
simple_array_type: ARRAY LEFT_ANGLE_BRACKET (simple_type | struct_type) RIGHT_ANGLE_BRACKET;

complex_array_type: ARRAY LEFT_ANGLE_BRACKET complex_type RIGHT_ANGLE_BRACKET;

map_type: MAP LEFT_ANGLE_BRACKET columns RIGHT_ANGLE_BRACKET;
struct_type: STRUCT LEFT_ANGLE_BRACKET columns RIGHT_ANGLE_BRACKET;

nullability: NOT NULL;

Expand Down
Expand Up @@ -330,7 +330,7 @@ public void eval() {
}

@SuppressWarnings("unused")
@FunctionTemplate(name = "IS_MAP", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.INTERNAL)
@FunctionTemplate(names = {"IS_MAP", "IS_STRUCT"}, scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.INTERNAL)
public static class UnionIsMap implements DrillSimpleFunc {

@Param UnionHolder in;
Expand Down
Expand Up @@ -110,10 +110,10 @@ public ColumnMetadata visitSimple_array_column(SchemaParser.Simple_array_columnC
}

@Override
public ColumnMetadata visitMap_column(SchemaParser.Map_columnContext ctx) {
public ColumnMetadata visitStruct_column(SchemaParser.Struct_columnContext ctx) {
String name = ctx.column_id().accept(new IdVisitor());
// Drill does not distinguish between nullable and not null map, by default they are not null
return ctx.map_type().accept(new TypeVisitor(name, TypeProtos.DataMode.REQUIRED));
// Drill does not distinguish between nullable and not null structs, by default they are not null
return ctx.struct_type().accept(new TypeVisitor(name, TypeProtos.DataMode.REQUIRED));
}

@Override
Expand Down Expand Up @@ -277,7 +277,9 @@ public ColumnMetadata visitInterval(SchemaParser.IntervalContext ctx) {
}

@Override
public ColumnMetadata visitMap_type(SchemaParser.Map_typeContext ctx) {
public ColumnMetadata visitStruct_type(SchemaParser.Struct_typeContext ctx) {
// internally Drill refers to structs as maps and currently does not have true map notion
// Drill maps will be renamed to structs in future
MapBuilder builder = new MapBuilder(null, name, mode);
ColumnDefVisitor visitor = new ColumnDefVisitor();
ctx.columns().column_def().forEach(
Expand Down Expand Up @@ -308,7 +310,7 @@ private static class ArrayTypeVisitor extends SchemaParserBaseVisitor<ColumnMeta
@Override
public ColumnMetadata visitSimple_array_type(SchemaParser.Simple_array_typeContext ctx) {
TypeVisitor visitor = new TypeVisitor(name, TypeProtos.DataMode.REPEATED);
return ctx.map_type() == null ? ctx.simple_type().accept(visitor) : ctx.map_type().accept(visitor);
return ctx.struct_type() == null ? ctx.simple_type().accept(visitor) : ctx.struct_type().accept(visitor);
}

@Override
Expand Down
Expand Up @@ -104,15 +104,15 @@ public void testMissingAngleBracket() {

@Test
public void testUnclosedAngleBracket() {
String schema = "col map<m array<int> not null";
String schema = "col struct<m array<int> not null";
thrown.expect(SchemaParsingException.class);
thrown.expectMessage("missing '>' at '<EOF>'");
SchemaExprParser.parseSchema(schema);
}

@Test
public void testMissingColumnNameForMap() {
String schema = "col map<int> not null";
public void testMissingColumnNameForStruct() {
String schema = "col struct<int> not null";
thrown.expect(SchemaParsingException.class);
thrown.expectMessage("mismatched input 'int' expecting {ID, QUOTED_ID}");
SchemaExprParser.parseSchema(schema);
Expand Down
Expand Up @@ -178,11 +178,11 @@ public void testArray() {
.addRepeatedList("nested_array")
.addArray(TypeProtos.MinorType.INT)
.resumeSchema()
.addMapArray("map_array")
.addMapArray("struct_array")
.addNullable("m1", TypeProtos.MinorType.INT)
.addNullable("m2", TypeProtos.MinorType.VARCHAR)
.resumeSchema()
.addRepeatedList("nested_array_map")
.addRepeatedList("nested_array_struct")
.addMapArray()
.addNullable("nm1", TypeProtos.MinorType.INT)
.addNullable("nm2", TypeProtos.MinorType.VARCHAR)
Expand All @@ -192,26 +192,26 @@ public void testArray() {

checkSchema("simple_array array<int>"
+ ", nested_array array<array<int>>"
+ ", map_array array<map<m1 int, m2 varchar>>"
+ ", nested_array_map array<array<map<nm1 int, nm2 varchar>>>",
+ ", struct_array array<struct<m1 int, m2 varchar>>"
+ ", nested_array_struct array<array<struct<nm1 int, nm2 varchar>>>",
schema);

}

@Test
public void testMap() {
public void testStruct() {
TupleMetadata schema = new SchemaBuilder()
.addMap("map_col")
.addMap("struct_col")
.addNullable("int_col", TypeProtos.MinorType.INT)
.addArray("array_col", TypeProtos.MinorType.INT)
.addMap("nested_map")
.addMap("nested_struct")
.addNullable("m1", TypeProtos.MinorType.INT)
.addNullable("m2", TypeProtos.MinorType.VARCHAR)
.resumeMap()
.resumeSchema()
.buildSchema();

checkSchema("map_col map<int_col int, array_col array<int>, nested_map map<m1 int, m2 varchar>>", schema);
checkSchema("struct_col struct<int_col int, array_col array<int>, nested_struct struct<m1 int, m2 varchar>>", schema);
}

@Test
Expand All @@ -222,8 +222,8 @@ public void testModeForSimpleType() {
}

@Test
public void testModeForMapType() {
TupleMetadata schema = SchemaExprParser.parseSchema("m map<m1 int not null, m2 varchar>");
public void testModeForStructType() {
TupleMetadata schema = SchemaExprParser.parseSchema("m struct<m1 int not null, m2 varchar>");
ColumnMetadata map = schema.metadata("m");
assertTrue(map.isMap());
assertEquals(TypeProtos.DataMode.REQUIRED, map.mode());
Expand All @@ -236,7 +236,7 @@ public void testModeForMapType() {
@Test
public void testModeForRepeatedType() {
TupleMetadata schema = SchemaExprParser.parseSchema(
"a array<int>, aa array<array<int>>, ma array<map<m1 int not null, m2 varchar>>");
"a array<int>, aa array<array<int>>, ma array<struct<m1 int not null, m2 varchar>>");

assertTrue(schema.metadata("a").isArray());

Expand Down

0 comments on commit fbbfbef

Please sign in to comment.