diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnWildcardIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnWildcardIT.java new file mode 100644 index 00000000000..39a703b8c65 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnWildcardIT.java @@ -0,0 +1,521 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.phoenix.end2end; + +import static java.sql.Types.BIGINT; +import static java.sql.Types.INTEGER; +import static java.sql.Types.VARCHAR; +import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_ENCODED_BYTES; +import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IMMUTABLE_STORAGE_SCHEME; +import static org.apache.phoenix.schema.PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN; +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.Maps; +import org.apache.phoenix.query.BaseTest; +import org.apache.phoenix.schema.PTable.ImmutableStorageScheme; +import org.apache.phoenix.util.ReadOnlyProps; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; + +import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; + +/** + * Tests to check whether we correctly expose dynamic columns for wildcard queries when + * {@link org.apache.phoenix.query.QueryServices#WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB} config is + * turned on + */ +@RunWith(Parameterized.class) +public class DynamicColumnWildcardIT extends BaseTest { + private final boolean mutableTable; + private final ImmutableStorageScheme storageScheme; + + // name is used by failsafe as file name in reports + @Parameterized.Parameters(name="DynamicColumnWildcardIT_mutable={0}, storageScheme={1}") + public static Collection data() { + // TODO: Once PHOENIX-5107 is fixed, add a case for SINGLE_CELL_ARRAY_WITH_OFFSETS + return Arrays.asList(new Object[][] { + {true, null}, {false, ONE_CELL_PER_COLUMN}}); + } + + public DynamicColumnWildcardIT(boolean mutableTable, ImmutableStorageScheme storageScheme) { + this.mutableTable = mutableTable; + this.storageScheme = storageScheme; + } + + @BeforeClass + public static void doSetup() throws Exception { + Map clientProps = Maps.newHashMapWithExpectedSize(1); + clientProps.put(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, "true"); + setUpTestDriver(ReadOnlyProps.EMPTY_PROPS, new ReadOnlyProps(clientProps)); + } + + // Create either a mutable table or an immutable table with the specified storage scheme + private String generateTableCreateDDL(String tableName, String schema) { + StringBuilder sb = new StringBuilder("CREATE "); + if (!this.mutableTable) { + sb.append("IMMUTABLE "); + } + sb.append("TABLE ").append(tableName).append(schema); + if (!this.mutableTable && this.storageScheme != null) { + sb.append(" ").append(IMMUTABLE_STORAGE_SCHEME).append("=").append(this.storageScheme); + sb.append(", ").append(COLUMN_ENCODED_BYTES).append("=1"); + } + return sb.toString(); + } + + @Test + // Test the case where the table DDL only contains 1 column which is the primary key + public void testOnlySinglePkWithDynamicColumns() throws SQLException { + Connection conn = DriverManager.getConnection(getUrl()); + conn.setAutoCommit(true); + String tableName = generateUniqueName(); + conn.createStatement().execute(generateTableCreateDDL(tableName, + " (A INTEGER PRIMARY KEY)")); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A) VALUES(10)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, DYN1 INTEGER) VALUES(90, 3)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, DYN1 VARCHAR) VALUES(100, 'test')"); + + ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName); + int rsCounter = 0; + while(rs.next()) { + ResultSetMetaData rmd = rs.getMetaData(); + int count = rmd.getColumnCount(); + assertEquals(rsCounter == 0 ? 1 : 2, count); + for (int i = 1; i <= count; i++) { + if (rsCounter == 0) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(10, rs.getObject(i)); + } else if (rsCounter == 1) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(90, rs.getObject(i)); + } else if (i == 2) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(3, rs.getObject(i)); + } + } else if (rsCounter == 2) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(100, rs.getObject(i)); + } else if (i ==2) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test", rs.getObject(i)); + } + } + } + rsCounter++; + } + } + + @Test + // Test the case where the table DDL contains 1 primary key column and other columns as well + public void testSinglePkAndOtherColsWithDynamicColumns() throws SQLException { + Connection conn = DriverManager.getConnection(getUrl()); + conn.setAutoCommit(true); + String tableName = generateUniqueName(); + + conn.createStatement().execute(generateTableCreateDDL(tableName, + " (A INTEGER PRIMARY KEY, B VARCHAR)")); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B) VALUES(10, 'test1')"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, DYN1 INTEGER) VALUES(90, 'test2', 3)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, DYN1 INTEGER, DYN2 VARCHAR) VALUES(100, 5, 'test3')"); + + ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName); + int rsCounter = 0; + while(rs.next()) { + ResultSetMetaData rmd = rs.getMetaData(); + int count = rmd.getColumnCount(); + assertEquals(rsCounter == 0 ? 2 : rsCounter == 1 ? 3 : 4, + count); + for (int i = 1; i <= count; i++) { + if (rsCounter == 0) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(10, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test1", rs.getObject(i)); + } + } else if (rsCounter == 1) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(90, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test2", rs.getObject(i)); + } else if (i == 3) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(3, rs.getObject(i)); + } + } else if (rsCounter == 2) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(100, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + // Note that we didn't upsert any value for column 'B' so we should get null + assertEquals(null, rs.getObject(i)); + } else if (i == 3) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(5, rs.getObject(i)); + } else if (i == 4) { + assertEquals("DYN2", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test3", rs.getObject(i)); + } + } + } + rsCounter++; + } + } + + @Test + // Test the case where the table DDL contains just the composite key and no other columns + public void testCompositeKeyWithDynamicColumns() throws SQLException { + Connection conn = DriverManager.getConnection(getUrl()); + conn.setAutoCommit(true); + String tableName = generateUniqueName(); + + conn.createStatement().execute(generateTableCreateDDL(tableName, + " (A INTEGER NOT NULL, B INTEGER NOT NULL CONSTRAINT PK PRIMARY KEY (A, B))")); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B) VALUES(10, 500)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, DYN1 INTEGER) VALUES(90, 100, 3)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, DYN2 VARCHAR) VALUES(999, 50, 'test1')"); + + ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName); + int rsCounter = 0; + while(rs.next()) { + ResultSetMetaData rmd = rs.getMetaData(); + int count = rmd.getColumnCount(); + assertEquals(rsCounter == 0 ? 2 : 3, count); + for (int i = 1; i <= count; i++) { + if (rsCounter == 0) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(10, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(500, rs.getObject(i)); + } + } else if (rsCounter == 1) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(90, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(100, rs.getObject(i)); + } else if (i == 3) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(3, rs.getObject(i)); + } + } else if (rsCounter == 2) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(999, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(50, rs.getObject(i)); + } else if (i == 3) { + assertEquals("DYN2", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test1", rs.getObject(i)); + } + } + } + rsCounter++; + } + } + + @Test + // Test the case where the table DDL contains the composite key and other columns + public void testCompositeKeyAndOtherColsWithDynamicColumns() throws SQLException { + Connection conn = DriverManager.getConnection(getUrl()); + conn.setAutoCommit(true); + String tableName = generateUniqueName(); + + conn.createStatement().execute(generateTableCreateDDL(tableName, + " (A INTEGER NOT NULL, B INTEGER NOT NULL, C VARCHAR" + + " CONSTRAINT PK PRIMARY KEY (A, B))")); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B) VALUES(10, 500)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, C) VALUES(20, 7, 'test1')"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, DYN1 INTEGER) VALUES(30, 100, 3)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, C, DYN2 VARCHAR, DYN3 BIGINT) VALUES(40, 60, 'test1', 'test2', 8)"); + + ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName); + int rsCounter = 0; + while(rs.next()) { + ResultSetMetaData rmd = rs.getMetaData(); + int count = rmd.getColumnCount(); + assertEquals(rsCounter <= 1 ? + 3 : rsCounter == 2 ? 4 : 5, count); + for (int i = 1; i <= count; i++) { + if (rsCounter == 0) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(10, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(500, rs.getObject(i)); + } else if (i == 3) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals(null, rs.getObject(i)); + } + } else if (rsCounter == 1) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(20, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(7, rs.getObject(i)); + } else if (i == 3) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test1", rs.getObject(i)); + } + } else if (rsCounter == 2) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(30, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(100, rs.getObject(i)); + } else if (i == 3) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals(null, rs.getObject(i)); + } else if (i == 4) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(3, rs.getObject(i)); + } + } else if (rsCounter == 3) { + if (i == 1) { + assertEquals("A", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(40, rs.getObject(i)); + } else if (i == 2) { + assertEquals("B", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(60, rs.getObject(i)); + } else if (i == 3) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test1", rs.getObject(i)); + } else if (i == 4) { + assertEquals("DYN2", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test2", rs.getObject(i)); + } else if (i == 5) { + assertEquals("DYN3", rmd.getColumnName(i)); + assertEquals(BIGINT, rmd.getColumnType(i)); + assertEquals(8L, rs.getObject(i)); + } + } + } + rsCounter++; + } + } + + @Test + // Test if dynamic columns are properly exposed in column family wildcard queries + public void testColumnFamilyWildcards() throws SQLException { + Connection conn = DriverManager.getConnection(getUrl()); + conn.setAutoCommit(true); + String tableName = generateUniqueName(); + + conn.createStatement().execute(generateTableCreateDDL(tableName, + " (A INTEGER PRIMARY KEY, B VARCHAR, CF1.C INTEGER, CF2.D VARCHAR)")); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, C, D) VALUES(10, 'test1', 2, 'test2')"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, C, D, DYN0 INTEGER) VALUES(20, 'test3', 4, 'test4', 100)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, C, D, CF1.DYN1 VARCHAR, CF1.DYN2 INTEGER)" + + " VALUES(30, 'test5', 5, 'test6', 'test7', 70)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, C, D, CF2.DYN1 VARCHAR, CF2.DYN2 INTEGER)" + + " VALUES(40, 'test8', 6, 'test9', 'test10', 80)"); + conn.createStatement().execute("UPSERT INTO " + tableName + + " (A, B, C, D, CF1.DYN3 VARCHAR, CF2.DYN4 INTEGER)" + + " VALUES(50, 'test11', 7, 'test12', 'test13', 90)"); + + ResultSet rs = conn.createStatement().executeQuery("SELECT CF1.* FROM " + tableName); + int rsCounter = 0; + while(rs.next()) { + ResultSetMetaData rmd = rs.getMetaData(); + int count = rmd.getColumnCount(); + assertEquals(rsCounter <= 1 || rsCounter == 3 ? + 1 : rsCounter == 2 ? 3 : 2, count); + for (int i = 1; i <= count; i++) { + if (rsCounter == 0) { + if (i == 1) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(2, rs.getObject(i)); + } + } else if (rsCounter == 1) { + if (i == 1) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(4, rs.getObject(i)); + } + } else if (rsCounter == 2) { + if (i == 1) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(5, rs.getObject(i)); + } else if (i == 2) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test7", rs.getObject(i)); + } else if (i == 3) { + assertEquals("DYN2", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(70, rs.getObject(i)); + } + } else if (rsCounter == 3) { + if (i == 1) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(6, rs.getObject(i)); + } + } else if (rsCounter == 4) { + if (i == 1) { + assertEquals("C", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(7, rs.getObject(i)); + } else if (i == 2) { + assertEquals("DYN3", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test13", rs.getObject(i)); + } + } + } + rsCounter++; + } + + rs = conn.createStatement().executeQuery("SELECT CF2.* FROM " + tableName); + rsCounter = 0; + while(rs.next()) { + ResultSetMetaData rmd = rs.getMetaData(); + int count = rmd.getColumnCount(); + assertEquals(rsCounter <= 2 ? + 1 : rsCounter == 3 ? 3 : 2, count); + for (int i = 1; i <= count; i++) { + if (rsCounter == 0) { + if (i == 1) { + assertEquals("D", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test2", rs.getObject(i)); + } + } else if (rsCounter == 1) { + if (i == 1) { + assertEquals("D", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test4", rs.getObject(i)); + } + } else if (rsCounter == 2) { + if (i == 1) { + assertEquals("D", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test6", rs.getObject(i)); + } + } else if (rsCounter == 3) { + if (i == 1) { + assertEquals("D", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test9", rs.getObject(i)); + } else if (i == 2) { + assertEquals("DYN1", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test10", rs.getObject(i)); + } else if (i == 3) { + assertEquals("DYN2", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(80, rs.getObject(i)); + } + } else if (rsCounter == 4) { + if (i == 1) { + assertEquals("D", rmd.getColumnName(i)); + assertEquals(VARCHAR, rmd.getColumnType(i)); + assertEquals("test12", rs.getObject(i)); + } else if (i == 2) { + assertEquals("DYN4", rmd.getColumnName(i)); + assertEquals(INTEGER, rmd.getColumnType(i)); + assertEquals(90, rs.getObject(i)); + } + } + } + rsCounter++; + } + + } + +} diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java index 1b97b508fd6..bfbbb3ae06b 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java @@ -17,6 +17,9 @@ */ package org.apache.phoenix.compile; +import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; +import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; + import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; @@ -110,9 +113,13 @@ private static void projectColumnFamily(PTable table, Scan scan, byte[] family) } public static RowProjector compile(StatementContext context, SelectStatement statement, GroupBy groupBy) throws SQLException { - return compile(context, statement, groupBy, Collections.emptyList(), - NULL_EXPRESSION// Pass null expression because we don't want empty key value to be projected - ); + boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices() + .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, + DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); + return compile(context, statement, groupBy, Collections.emptyList(), + // Pass null expression because we don't want empty key value to be projected + NULL_EXPRESSION, + wildcardIncludesDynamicCols); } private static int getMinPKOffset(PTable table, PName tenantId) { @@ -337,23 +344,29 @@ private static Expression coerceIfNecessary(int index, List ta /** * Builds the projection for the scan * @param context query context kept between compilation of different query clauses - * @param statement TODO + * @param statement the statement being compiled * @param groupBy compiled GROUP BY clause * @param targetColumns list of columns, parallel to aliasedNodes, that are being set for an * UPSERT SELECT statement. Used to coerce expression types to the expected target type. + * @param where the where clause expression + * @param wildcardIncludesDynamicCols true if wildcard queries should include dynamic columns * @return projector used to access row values during scan * @throws SQLException */ - public static RowProjector compile(StatementContext context, SelectStatement statement, GroupBy groupBy, List targetColumns, Expression where) throws SQLException { - List arrayKVRefs = new ArrayList(); - List arrayProjectedColumnRefs = new ArrayList(); - List arrayKVFuncs = new ArrayList(); - List arrayOldFuncs = new ArrayList(); + public static RowProjector compile(StatementContext context, SelectStatement statement, + GroupBy groupBy, List targetColumns, Expression where, + boolean wildcardIncludesDynamicCols) throws SQLException { + List arrayKVRefs = new ArrayList<>(); + List arrayProjectedColumnRefs = new ArrayList<>(); + List arrayKVFuncs = new ArrayList<>(); + List arrayOldFuncs = new ArrayList<>(); Map arrayExpressionCounts = new HashMap<>(); List aliasedNodes = statement.getSelect(); // Setup projected columns in Scan - SelectClauseVisitor selectVisitor = new SelectClauseVisitor(context, groupBy, arrayKVRefs, arrayKVFuncs, arrayExpressionCounts, arrayProjectedColumnRefs, arrayOldFuncs, statement); - List projectedColumns = new ArrayList(); + SelectClauseVisitor selectVisitor = new SelectClauseVisitor(context, groupBy, arrayKVRefs, + arrayKVFuncs, arrayExpressionCounts, arrayProjectedColumnRefs, arrayOldFuncs, + statement); + List projectedColumns = new ArrayList<>(); ColumnResolver resolver = context.getResolver(); TableRef tableRef = context.getCurrentTable(); PTable table = tableRef.getTable(); @@ -468,7 +481,9 @@ public static RowProjector compile(StatementContext context, SelectStatement sta } boolean isProjectEmptyKeyValue = false; - if (isWildcard) { + // Don't project known/declared column families into the scan if we want to support + // surfacing dynamic columns in wildcard queries + if (isWildcard && !wildcardIncludesDynamicCols) { projectAllColumnFamilies(table, scan); } else { isProjectEmptyKeyValue = where == null || LiteralExpression.isTrue(where) || where.requiresFinalEvaluation(); @@ -501,7 +516,9 @@ public static RowProjector compile(StatementContext context, SelectStatement sta // Ignore as this can happen for local indexes when the data table has a column family, but there are no covered columns in the family } } - return new RowProjector(projectedColumns, Math.max(estimatedKeySize, estimatedByteSize), isProjectEmptyKeyValue, resolver.hasUDFs(), isWildcard); + return new RowProjector(projectedColumns, Math.max(estimatedKeySize, estimatedByteSize), + isProjectEmptyKeyValue, resolver.hasUDFs(), isWildcard, + isWildcard || !projectedFamilies.isEmpty()); } private static void projectAllColumnFamilies(PTable table, Scan scan) { diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java index 6e3615803a4..edac7349011 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java @@ -17,6 +17,9 @@ */ package org.apache.phoenix.compile; +import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; +import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; + import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.ArrayList; @@ -50,7 +53,6 @@ import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr; import org.apache.phoenix.iterate.ParallelIteratorFactory; import org.apache.phoenix.jdbc.PhoenixConnection; -import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; import org.apache.phoenix.jdbc.PhoenixStatement; import org.apache.phoenix.join.HashJoinInfo; import org.apache.phoenix.optimize.Cost; @@ -213,7 +215,11 @@ protected QueryPlan compileJoinQuery(StatementContext context, List bind context.setCurrentTable(table.getTableRef()); PTable projectedTable = table.createProjectedTable(!projectPKColumns, context); TupleProjector projector = new TupleProjector(projectedTable); - TupleProjector.serializeProjectorIntoScan(context.getScan(), projector); + boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices() + .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, + DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); + TupleProjector.serializeProjectorIntoScan(context.getScan(), projector, + wildcardIncludesDynamicCols); context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), subquery.getUdfParseNodes())); table.projectColumns(context.getScan()); return compileSingleFlatQuery(context, subquery, binds, asSubquery, !asSubquery, null, projectPKColumns ? projector : null, true); @@ -252,6 +258,9 @@ protected QueryPlan compileJoinQuery(StatementContext context, List bind protected QueryPlan compileJoinQuery(JoinCompiler.Strategy strategy, StatementContext context, List binds, JoinTable joinTable, boolean asSubquery, boolean projectPKColumns, List orderBy) throws SQLException { byte[] emptyByteArray = new byte[0]; List joinSpecs = joinTable.getJoinSpecs(); + boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices() + .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, + DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); switch (strategy) { case HASH_BUILD_RIGHT: { boolean[] starJoinVector = joinTable.getStarJoinVector(); @@ -318,7 +327,8 @@ protected QueryPlan compileJoinQuery(JoinCompiler.Strategy strategy, StatementCo } hashPlans[i] = new HashSubPlan(i, subPlans[i], optimized ? null : hashExpressions, joinSpec.isSingleValueOnly(), usePersistentCache, keyRangeLhsExpression, keyRangeRhsExpression); } - TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector); + TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector, + wildcardIncludesDynamicCols); QueryPlan plan = compileSingleFlatQuery(context, query, binds, asSubquery, !asSubquery && joinTable.isAllLeftJoin(), null, !table.isSubselect() && projectPKColumns ? tupleProjector : null, true); Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context, table); Integer limit = null; @@ -370,7 +380,8 @@ protected QueryPlan compileJoinQuery(JoinCompiler.Strategy strategy, StatementCo PTable lhsTable = needsMerge ? lhsCtx.getResolver().getTables().get(0).getTable() : null; int fieldPosition = needsMerge ? rhsProjTable.getColumns().size() - rhsProjTable.getPKColumns().size() : 0; PTable projectedTable = needsMerge ? JoinCompiler.joinProjectedTables(rhsProjTable, lhsTable, type == JoinType.Right ? JoinType.Left : type) : rhsProjTable; - TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector); + TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector, + wildcardIncludesDynamicCols); context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), rhs.getUdfParseNodes())); QueryPlan rhsPlan = compileSingleFlatQuery(context, rhs, binds, asSubquery, !asSubquery && type == JoinType.Right, null, !rhsTable.isSubselect() && projectPKColumns ? tupleProjector : null, true); Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context, rhsTable); @@ -561,7 +572,12 @@ protected QueryPlan compileSingleFlatQuery(StatementContext context, SelectState // definitively whether or not we'll traverse in row key order. groupBy = groupBy.compile(context, innerPlanTupleProjector); context.setResolver(resolver); // recover resolver - RowProjector projector = ProjectionCompiler.compile(context, select, groupBy, asSubquery ? Collections.emptyList() : targetColumns, where); + boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices() + .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, + DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); + RowProjector projector = ProjectionCompiler.compile(context, select, groupBy, + asSubquery ? Collections.emptyList() : targetColumns, where, + wildcardIncludesDynamicCols); OrderBy orderBy = OrderByCompiler.compile( context, select, @@ -586,7 +602,9 @@ protected QueryPlan compileSingleFlatQuery(StatementContext context, SelectState } if (projectedTable != null) { - TupleProjector.serializeProjectorIntoScan(context.getScan(), new TupleProjector(projectedTable)); + TupleProjector.serializeProjectorIntoScan(context.getScan(), + new TupleProjector(projectedTable), wildcardIncludesDynamicCols && + projector.projectDynColsInWildcardQueries()); } QueryPlan plan = innerPlan; diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/RowProjector.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/RowProjector.java index 8532e0c30e8..356e7a39596 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/RowProjector.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/RowProjector.java @@ -52,9 +52,12 @@ public class RowProjector { private final boolean isProjectEmptyKeyValue; private final boolean cloneRequired; private final boolean hasUDFs; - + private final boolean isProjectDynColsInWildcardQueries; + public RowProjector(RowProjector projector, boolean isProjectEmptyKeyValue) { - this(projector.getColumnProjectors(), projector.getEstimatedRowByteSize(), isProjectEmptyKeyValue, projector.hasUDFs, projector.isProjectAll); + this(projector.getColumnProjectors(), projector.getEstimatedRowByteSize(), + isProjectEmptyKeyValue, projector.hasUDFs, projector.isProjectAll, + projector.isProjectDynColsInWildcardQueries); } /** * Construct RowProjector based on a list of ColumnProjectors. @@ -62,20 +65,25 @@ public RowProjector(RowProjector projector, boolean isProjectEmptyKeyValue) { * aggregating coprocessor. Only required in the case of an aggregate query with a limit clause and otherwise may * be null. * @param estimatedRowSize + * @param isProjectEmptyKeyValue */ public RowProjector(List columnProjectors, int estimatedRowSize, boolean isProjectEmptyKeyValue) { - this(columnProjectors, estimatedRowSize, isProjectEmptyKeyValue, false, false); + this(columnProjectors, estimatedRowSize, isProjectEmptyKeyValue, false, false, false); } /** * Construct RowProjector based on a list of ColumnProjectors. * @param columnProjectors ordered list of ColumnProjectors corresponding to projected columns in SELECT clause * aggregating coprocessor. Only required in the case of an aggregate query with a limit clause and otherwise may * be null. - * @param estimatedRowSize + * @param estimatedRowSize * @param isProjectEmptyKeyValue * @param hasUDFs + * @param isProjectAll + * @param isProjectDynColsInWildcardQueries */ - public RowProjector(List columnProjectors, int estimatedRowSize, boolean isProjectEmptyKeyValue, boolean hasUDFs, boolean isProjectAll) { + public RowProjector(List columnProjectors, int estimatedRowSize, + boolean isProjectEmptyKeyValue, boolean hasUDFs, boolean isProjectAll, + boolean isProjectDynColsInWildcardQueries) { this.columnProjectors = Collections.unmodifiableList(columnProjectors); int position = columnProjectors.size(); reverseIndex = ArrayListMultimap.create(); @@ -107,6 +115,7 @@ public RowProjector(List columnProjectors, int estima } } this.cloneRequired = cloneRequired || hasUDFs; + this.isProjectDynColsInWildcardQueries = isProjectDynColsInWildcardQueries; } public RowProjector cloneIfNecessary() { @@ -129,7 +138,8 @@ public RowProjector cloneIfNecessary() { } } return new RowProjector(clonedColProjectors, - this.estimatedSize, this.isProjectEmptyKeyValue, this.hasUDFs, this.isProjectAll); + this.estimatedSize, this.isProjectEmptyKeyValue, this.hasUDFs, this.isProjectAll, + this.isProjectDynColsInWildcardQueries); } public boolean projectEveryRow() { @@ -139,6 +149,14 @@ public boolean projectEveryRow() { public boolean projectEverything() { return isProjectAll; } + + public boolean hasUDFs() { + return hasUDFs; + } + + public boolean projectDynColsInWildcardQueries() { + return isProjectDynColsInWildcardQueries; + } public List getColumnProjectors() { return columnProjectors; diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java index dab2457aa2a..065e0cb6534 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java @@ -167,7 +167,6 @@ public static PTable createProjectedTable(SelectStatement select, StatementConte .setBaseColumnCount(BASE_TABLE_BASE_COLUMN_COUNT) .setExcludedColumns(ImmutableList.of()) .setPhysicalNames(ImmutableList.of()) - .setColumns(projectedColumns) .build(); } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java index 7ec96a3e8c6..0d65ae29786 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java @@ -728,7 +728,6 @@ public MutationPlan compile(UpsertStatement upsert) throws SQLException { PTable projectedTable = PTableImpl.builderWithColumns(table, projectedColumns) .setExcludedColumns(ImmutableList.of()) .setDefaultFamilyName(PNameFactory.newName(SchemaUtil.getEmptyColumnFamily(table))) - .setColumns(projectedColumns) .build(); SelectStatement select = SelectStatement.create(SelectStatement.COUNT_ONE, upsert.getHint()); diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DynamicColumnWildcard.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DynamicColumnWildcard.java new file mode 100644 index 00000000000..4c5d82d2068 --- /dev/null +++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DynamicColumnWildcard.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.coprocessor; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.RegionObserver; +import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos; +import org.apache.phoenix.coprocessor.generated.PTableProtos; +import org.apache.phoenix.schema.PColumn; +import org.apache.phoenix.schema.PColumnImpl; +import org.apache.phoenix.util.ServerUtil; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.NavigableMap; +import java.util.Optional; + +/** + * Coprocessor for exposing dynamic columns in wildcard queries. This coprocessor will be registered + * to physical HBase tables only i.e. PTableType.TABLE and is driven by the client-side config + * QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB + * See PHOENIX-374 + */ +public class DynamicColumnWildcard implements RegionObserver, RegionCoprocessor { + + private static final Log LOG = LogFactory.getLog(DynamicColumnWildcard.class); + // Scan attribute that is set in case we want to project dynamic columns + public static final String WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS = + "_WildcardScanIncludesDynCols"; + public static final byte[] DYN_COLS_METADATA_CELL_QUALIFIER = Bytes.toBytes("#Dyn_"); + + @Override + public Optional getRegionObserver() { + return Optional.of(this); + } + + @Override + public void preBatchMutate(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + try { + preBatchMutateWithExceptions(c, miniBatchOp); + } catch(Throwable t) { + // Wrap all exceptions in an IOException to prevent region server crashes + throw ServerUtil.createIOException("Unable to Put cells corresponding to dynamic" + + "column metadata for " + + c.getEnvironment().getRegion().getRegionInfo().getTable().getNameAsString(), t); + } + } + + /** + * Iterate over all Put mutations and add metadata for the list of dynamic columns for each + * column family in its own cell under reserved qualifiers. This is used for resolving dynamic + * columns for wildcard queries. See PHOENIX-374 + * @param c the region server environment + * @param miniBatchOp batch of mutations getting applied to region + * @throws IOException If an I/O error occurs when parsing protobuf + */ + private void preBatchMutateWithExceptions(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + for (int i = 0; i < miniBatchOp.size(); i++) { + Mutation m = miniBatchOp.getOperation(i); + List putDynColMetaData = new ArrayList<>(1); + boolean isThereAnExtraPut = false; + if (m instanceof Put) { + // There is at max 1 extra Put (for dynamic column shadow cells) per original Put + putDynColMetaData.add(new Put(m.getRow())); + NavigableMap> famCellMap = m.getFamilyCellMap(); + for (byte[] fam : famCellMap.keySet()) { + byte[] serializedDynColsList = m.getAttribute(Bytes.toString(fam)); + if (serializedDynColsList == null) { + // There are no dynamic columns for this column family + continue; + } + List dynColsInThisFam = DynamicColumnMetaDataProtos. + DynamicColumnMetaData.parseFrom(serializedDynColsList) + .getDynamicColumnsList(); + for (PTableProtos.PColumn dynColProto : dynColsInThisFam) { + // Add a column for this dynamic column to the metadata Put operation + ((Put)putDynColMetaData.get(0)).addColumn(fam, + getQualifierForDynamicColumnMetaDataCell(dynColProto), + dynColProto.toByteArray()); + isThereAnExtraPut = true; + } + } + } + if (isThereAnExtraPut) { + miniBatchOp.addOperationsFromCP(i, + putDynColMetaData.toArray(new Mutation[putDynColMetaData.size()])); + } + } + } + + /** + * We store the metadata for each dynamic cell in a separate cell in the same column family. + * The column qualifier for this cell is: + * {@link DynamicColumnWildcard#DYN_COLS_METADATA_CELL_QUALIFIER} concatenated with the + * qualifier of the actual dynamic column + * @param dynColProto Protobuf representation of the dynamic column PColumn + * @return Final qualifier for the metadata cell + * @throws IOException If an I/O error occurs when parsing the byte array output stream + */ + private static byte[] getQualifierForDynamicColumnMetaDataCell(PTableProtos.PColumn dynColProto) + throws IOException { + PColumn dynCol = PColumnImpl.createFromProto(dynColProto); + ByteArrayOutputStream qual = new ByteArrayOutputStream(); + qual.write(DYN_COLS_METADATA_CELL_QUALIFIER); + qual.write(dynCol.getColumnQualifierBytes()); + if (LOG.isDebugEnabled()) { + LOG.debug("Storing shadow cell for dynamic column metadata for dynamic column : " + + dynCol.getFamilyName().getString() + "." + dynCol.getName().getString()); + } + return qual.toByteArray(); + } + +} diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java index 3489b7907b4..404a7adc257 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java @@ -364,14 +364,14 @@ private Tuple mergeProjectedValue( if (dest instanceof ProjectedValueTuple) { return TupleProjector.mergeProjectedValue( - (ProjectedValueTuple) dest, destSchema, destBitSet, src, - srcSchema, srcBitSet, offset, useNewValueColumnQualifier); + (ProjectedValueTuple) dest, destBitSet, src, + srcBitSet, offset, useNewValueColumnQualifier); } ProjectedValueTuple first = projector.projectResults( new SingleKeyValueTuple(dest.getValue(0))); ProjectedValueTuple merged = TupleProjector.mergeProjectedValue( - first, destSchema, destBitSet, src, srcSchema, + first, destBitSet, src, srcBitSet, offset, useNewValueColumnQualifier); int size = dest.size(); diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/DynamicColumnMetaDataProtos.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/DynamicColumnMetaDataProtos.java new file mode 100644 index 00000000000..760f8f9adf4 --- /dev/null +++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/DynamicColumnMetaDataProtos.java @@ -0,0 +1,774 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: DynamicColumnMetaData.proto + +package org.apache.phoenix.coprocessor.generated; + +public final class DynamicColumnMetaDataProtos { + private DynamicColumnMetaDataProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface DynamicColumnMetaDataOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .PColumn dynamicColumns = 1; + /** + * repeated .PColumn dynamicColumns = 1; + */ + java.util.List + getDynamicColumnsList(); + /** + * repeated .PColumn dynamicColumns = 1; + */ + org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn getDynamicColumns(int index); + /** + * repeated .PColumn dynamicColumns = 1; + */ + int getDynamicColumnsCount(); + /** + * repeated .PColumn dynamicColumns = 1; + */ + java.util.List + getDynamicColumnsOrBuilderList(); + /** + * repeated .PColumn dynamicColumns = 1; + */ + org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder getDynamicColumnsOrBuilder( + int index); + } + /** + * Protobuf type {@code DynamicColumnMetaData} + */ + public static final class DynamicColumnMetaData extends + com.google.protobuf.GeneratedMessage + implements DynamicColumnMetaDataOrBuilder { + // Use DynamicColumnMetaData.newBuilder() to construct. + private DynamicColumnMetaData(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private DynamicColumnMetaData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final DynamicColumnMetaData defaultInstance; + public static DynamicColumnMetaData getDefaultInstance() { + return defaultInstance; + } + + public DynamicColumnMetaData getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DynamicColumnMetaData( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + dynamicColumns_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + dynamicColumns_.add(input.readMessage(org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + dynamicColumns_ = java.util.Collections.unmodifiableList(dynamicColumns_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.class, org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DynamicColumnMetaData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DynamicColumnMetaData(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .PColumn dynamicColumns = 1; + public static final int DYNAMICCOLUMNS_FIELD_NUMBER = 1; + private java.util.List dynamicColumns_; + /** + * repeated .PColumn dynamicColumns = 1; + */ + public java.util.List getDynamicColumnsList() { + return dynamicColumns_; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public java.util.List + getDynamicColumnsOrBuilderList() { + return dynamicColumns_; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public int getDynamicColumnsCount() { + return dynamicColumns_.size(); + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn getDynamicColumns(int index) { + return dynamicColumns_.get(index); + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder getDynamicColumnsOrBuilder( + int index) { + return dynamicColumns_.get(index); + } + + private void initFields() { + dynamicColumns_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getDynamicColumnsCount(); i++) { + if (!getDynamicColumns(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < dynamicColumns_.size(); i++) { + output.writeMessage(1, dynamicColumns_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < dynamicColumns_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, dynamicColumns_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData)) { + return super.equals(obj); + } + org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData other = (org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData) obj; + + boolean result = true; + result = result && getDynamicColumnsList() + .equals(other.getDynamicColumnsList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getDynamicColumnsCount() > 0) { + hash = (37 * hash) + DYNAMICCOLUMNS_FIELD_NUMBER; + hash = (53 * hash) + getDynamicColumnsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code DynamicColumnMetaData} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaDataOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.class, org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder.class); + } + + // Construct using org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getDynamicColumnsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (dynamicColumnsBuilder_ == null) { + dynamicColumns_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + dynamicColumnsBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_descriptor; + } + + public org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData getDefaultInstanceForType() { + return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.getDefaultInstance(); + } + + public org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData build() { + org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData buildPartial() { + org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData result = new org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData(this); + int from_bitField0_ = bitField0_; + if (dynamicColumnsBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + dynamicColumns_ = java.util.Collections.unmodifiableList(dynamicColumns_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.dynamicColumns_ = dynamicColumns_; + } else { + result.dynamicColumns_ = dynamicColumnsBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData) { + return mergeFrom((org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData other) { + if (other == org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.getDefaultInstance()) return this; + if (dynamicColumnsBuilder_ == null) { + if (!other.dynamicColumns_.isEmpty()) { + if (dynamicColumns_.isEmpty()) { + dynamicColumns_ = other.dynamicColumns_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureDynamicColumnsIsMutable(); + dynamicColumns_.addAll(other.dynamicColumns_); + } + onChanged(); + } + } else { + if (!other.dynamicColumns_.isEmpty()) { + if (dynamicColumnsBuilder_.isEmpty()) { + dynamicColumnsBuilder_.dispose(); + dynamicColumnsBuilder_ = null; + dynamicColumns_ = other.dynamicColumns_; + bitField0_ = (bitField0_ & ~0x00000001); + dynamicColumnsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getDynamicColumnsFieldBuilder() : null; + } else { + dynamicColumnsBuilder_.addAllMessages(other.dynamicColumns_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getDynamicColumnsCount(); i++) { + if (!getDynamicColumns(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .PColumn dynamicColumns = 1; + private java.util.List dynamicColumns_ = + java.util.Collections.emptyList(); + private void ensureDynamicColumnsIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + dynamicColumns_ = new java.util.ArrayList(dynamicColumns_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder> dynamicColumnsBuilder_; + + /** + * repeated .PColumn dynamicColumns = 1; + */ + public java.util.List getDynamicColumnsList() { + if (dynamicColumnsBuilder_ == null) { + return java.util.Collections.unmodifiableList(dynamicColumns_); + } else { + return dynamicColumnsBuilder_.getMessageList(); + } + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public int getDynamicColumnsCount() { + if (dynamicColumnsBuilder_ == null) { + return dynamicColumns_.size(); + } else { + return dynamicColumnsBuilder_.getCount(); + } + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn getDynamicColumns(int index) { + if (dynamicColumnsBuilder_ == null) { + return dynamicColumns_.get(index); + } else { + return dynamicColumnsBuilder_.getMessage(index); + } + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder setDynamicColumns( + int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn value) { + if (dynamicColumnsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDynamicColumnsIsMutable(); + dynamicColumns_.set(index, value); + onChanged(); + } else { + dynamicColumnsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder setDynamicColumns( + int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder builderForValue) { + if (dynamicColumnsBuilder_ == null) { + ensureDynamicColumnsIsMutable(); + dynamicColumns_.set(index, builderForValue.build()); + onChanged(); + } else { + dynamicColumnsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder addDynamicColumns(org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn value) { + if (dynamicColumnsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDynamicColumnsIsMutable(); + dynamicColumns_.add(value); + onChanged(); + } else { + dynamicColumnsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder addDynamicColumns( + int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn value) { + if (dynamicColumnsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDynamicColumnsIsMutable(); + dynamicColumns_.add(index, value); + onChanged(); + } else { + dynamicColumnsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder addDynamicColumns( + org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder builderForValue) { + if (dynamicColumnsBuilder_ == null) { + ensureDynamicColumnsIsMutable(); + dynamicColumns_.add(builderForValue.build()); + onChanged(); + } else { + dynamicColumnsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder addDynamicColumns( + int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder builderForValue) { + if (dynamicColumnsBuilder_ == null) { + ensureDynamicColumnsIsMutable(); + dynamicColumns_.add(index, builderForValue.build()); + onChanged(); + } else { + dynamicColumnsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder addAllDynamicColumns( + java.lang.Iterable values) { + if (dynamicColumnsBuilder_ == null) { + ensureDynamicColumnsIsMutable(); + super.addAll(values, dynamicColumns_); + onChanged(); + } else { + dynamicColumnsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder clearDynamicColumns() { + if (dynamicColumnsBuilder_ == null) { + dynamicColumns_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + dynamicColumnsBuilder_.clear(); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public Builder removeDynamicColumns(int index) { + if (dynamicColumnsBuilder_ == null) { + ensureDynamicColumnsIsMutable(); + dynamicColumns_.remove(index); + onChanged(); + } else { + dynamicColumnsBuilder_.remove(index); + } + return this; + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder getDynamicColumnsBuilder( + int index) { + return getDynamicColumnsFieldBuilder().getBuilder(index); + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder getDynamicColumnsOrBuilder( + int index) { + if (dynamicColumnsBuilder_ == null) { + return dynamicColumns_.get(index); } else { + return dynamicColumnsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public java.util.List + getDynamicColumnsOrBuilderList() { + if (dynamicColumnsBuilder_ != null) { + return dynamicColumnsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(dynamicColumns_); + } + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder addDynamicColumnsBuilder() { + return getDynamicColumnsFieldBuilder().addBuilder( + org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.getDefaultInstance()); + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder addDynamicColumnsBuilder( + int index) { + return getDynamicColumnsFieldBuilder().addBuilder( + index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.getDefaultInstance()); + } + /** + * repeated .PColumn dynamicColumns = 1; + */ + public java.util.List + getDynamicColumnsBuilderList() { + return getDynamicColumnsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder> + getDynamicColumnsFieldBuilder() { + if (dynamicColumnsBuilder_ == null) { + dynamicColumnsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder>( + dynamicColumns_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + dynamicColumns_ = null; + } + return dynamicColumnsBuilder_; + } + + // @@protoc_insertion_point(builder_scope:DynamicColumnMetaData) + } + + static { + defaultInstance = new DynamicColumnMetaData(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DynamicColumnMetaData) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DynamicColumnMetaData_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DynamicColumnMetaData_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\033DynamicColumnMetaData.proto\032\014PTable.pr" + + "oto\"9\n\025DynamicColumnMetaData\022 \n\016dynamicC" + + "olumns\030\001 \003(\0132\010.PColumnBO\n(org.apache.pho" + + "enix.coprocessor.generatedB\033DynamicColum" + + "nMetaDataProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_DynamicColumnMetaData_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_DynamicColumnMetaData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DynamicColumnMetaData_descriptor, + new java.lang.String[] { "DynamicColumns", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.phoenix.coprocessor.generated.PTableProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java index 25980b9c183..656bcde426c 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java @@ -214,7 +214,7 @@ public final ResultIterator iterator(ParallelScanGrouper scanGrouper) throws SQL @Override public final ResultIterator iterator(ParallelScanGrouper scanGrouper, Scan scan) throws SQLException { - return iterator(Collections.emptyMap(), scanGrouper, scan); + return iterator(Collections.emptyMap(), scanGrouper, scan); } private ResultIterator getWrappedIterator(final Map dependencies, diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/CorrelatePlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/CorrelatePlan.java index e3e02646faf..b5491aad9a9 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/execute/CorrelatePlan.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/CorrelatePlan.java @@ -162,8 +162,8 @@ public Tuple next() throws SQLException { try { joined = rhsBitSet == ValueBitSet.EMPTY_VALUE_BITSET ? current : TupleProjector.mergeProjectedValue( - convertLhs(current), joinedSchema, destBitSet, - rhsCurrent, rhsSchema, rhsBitSet, rhsFieldPosition, true); + convertLhs(current), destBitSet, + rhsCurrent, rhsBitSet, rhsFieldPosition, true); } catch (IOException e) { throw new SQLException(e); } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java index ccefa6e63bf..194b5a9e209 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java @@ -22,6 +22,8 @@ import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_BATCH_SIZE; import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_BYTES; import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_COMMIT_TIME; +import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; +import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; import java.io.IOException; import java.sql.SQLException; @@ -40,7 +42,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -507,7 +508,7 @@ private Iterator>> addRowMutations(final TableRef tab final Iterator indexes = indexList.iterator(); final List mutationList = Lists.newArrayListWithExpectedSize(values.size()); final List mutationsPertainingToIndex = indexes.hasNext() ? Lists - . newArrayListWithExpectedSize(values.size()) : null; + .newArrayListWithExpectedSize(values.size()) : null; generateMutations(tableRef, mutationTimestamp, serverTimestamp, values, mutationList, mutationsPertainingToIndex); return new Iterator>>() { @@ -523,7 +524,7 @@ public boolean hasNext() { public Pair> next() { if (isFirst) { isFirst = false; - return new Pair>(table.getPhysicalName(), mutationList); + return new Pair<>(table.getPhysicalName(), mutationList); } PTable index = indexes.next(); @@ -595,6 +596,8 @@ private void generateMutations(final TableRef tableRef, final long mutationTimes Iterator> iterator = values.entrySet().iterator(); long timestampToUse = mutationTimestamp; MultiRowMutationState modifiedValues = new MultiRowMutationState(16); + boolean wildcardIncludesDynamicCols = connection.getQueryServices().getProps().getBoolean( + WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); while (iterator.hasNext()) { Map.Entry rowEntry = iterator.next(); byte[] onDupKeyBytes = rowEntry.getValue().getOnDupKeyBytes(); @@ -628,6 +631,9 @@ private void generateMutations(final TableRef tableRef, final long mutationTimes for (Map.Entry valueEntry : rowEntry.getValue().getColumnValues().entrySet()) { row.setValue(valueEntry.getKey(), valueEntry.getValue()); } + if (wildcardIncludesDynamicCols) { + row.setAttributesForDynamicColumnsIfReqd(); + } rowMutations = row.toRowMutations(); // Pass through ON DUPLICATE KEY info through mutations // In the case of the same clause being used on many statements, this will be diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/SortMergeJoinPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/SortMergeJoinPlan.java index b23db35253e..1d79384b9e4 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/execute/SortMergeJoinPlan.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/SortMergeJoinPlan.java @@ -460,9 +460,8 @@ private Tuple join(Tuple lhs, Tuple rhs) throws SQLException { } return rhsBitSet == ValueBitSet.EMPTY_VALUE_BITSET ? - t : TupleProjector.mergeProjectedValue( - t, joinedSchema, destBitSet, - rhs, rhsSchema, rhsBitSet, rhsFieldPosition, true); + t : TupleProjector.mergeProjectedValue(t, destBitSet, + rhs, rhsBitSet, rhsFieldPosition, true); } catch (IOException e) { throw new SQLException(e); } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java index 753c11d6cc9..993470a1df0 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java @@ -17,37 +17,51 @@ */ package org.apache.phoenix.execute; +import static org.apache.phoenix.coprocessor.DynamicColumnWildcard.DYN_COLS_METADATA_CELL_QUALIFIER; +import static org.apache.phoenix.coprocessor.DynamicColumnWildcard.WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS; import static org.apache.phoenix.query.QueryConstants.VALUE_COLUMN_FAMILY; import static org.apache.phoenix.query.QueryConstants.VALUE_COLUMN_QUALIFIER; +import static org.apache.phoenix.schema.types.PDataType.TRUE_BYTES; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.nio.ByteBuffer; import java.sql.SQLException; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; +import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.WritableUtils; import org.apache.phoenix.compile.ColumnProjector; import org.apache.phoenix.compile.RowProjector; +import org.apache.phoenix.coprocessor.generated.PTableProtos; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.expression.ExpressionType; +import org.apache.phoenix.expression.KeyValueColumnExpression; import org.apache.phoenix.schema.KeyValueSchema; import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder; import org.apache.phoenix.schema.PColumn; +import org.apache.phoenix.schema.PColumnImpl; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.PTableType; import org.apache.phoenix.schema.ProjectedColumn; import org.apache.phoenix.schema.ValueBitSet; import org.apache.phoenix.schema.tuple.BaseTuple; import org.apache.phoenix.schema.tuple.Tuple; +import org.apache.phoenix.util.ByteUtil; import org.apache.phoenix.util.PhoenixKeyValueUtil; +import org.apache.phoenix.util.PhoenixRuntime; import org.apache.phoenix.util.SchemaUtil; import com.google.common.base.Preconditions; @@ -112,7 +126,20 @@ public void setValueBitSet(ValueBitSet bitSet) { this.valueSet = bitSet; } - public static void serializeProjectorIntoScan(Scan scan, TupleProjector projector) { + public static void serializeProjectorIntoScan(Scan scan, TupleProjector projector, + boolean projectDynColsInWildcardQueries) { + scan.setAttribute(SCAN_PROJECTOR, serializeProjectorIntoBytes(projector)); + if (projectDynColsInWildcardQueries) { + scan.setAttribute(WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS, TRUE_BYTES); + } + } + + /** + * Serialize the projector into a byte array + * @param projector projector to serialize + * @return byte array + */ + static byte[] serializeProjectorIntoBytes(TupleProjector projector) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { DataOutputStream output = new DataOutputStream(stream); @@ -120,10 +147,11 @@ public static void serializeProjectorIntoScan(Scan scan, TupleProjector projecto int count = projector.expressions.length; WritableUtils.writeVInt(output, count); for (int i = 0; i < count; i++) { - WritableUtils.writeVInt(output, ExpressionType.valueOf(projector.expressions[i]).ordinal()); - projector.expressions[i].write(output); + WritableUtils.writeVInt(output, + ExpressionType.valueOf(projector.expressions[i]).ordinal()); + projector.expressions[i].write(output); } - scan.setAttribute(SCAN_PROJECTOR, stream.toByteArray()); + return stream.toByteArray(); } catch (IOException e) { throw new RuntimeException(e); } finally { @@ -133,11 +161,18 @@ public static void serializeProjectorIntoScan(Scan scan, TupleProjector projecto throw new RuntimeException(e); } } - } public static TupleProjector deserializeProjectorFromScan(Scan scan) { - byte[] proj = scan.getAttribute(SCAN_PROJECTOR); + return deserializeProjectorFromBytes(scan.getAttribute(SCAN_PROJECTOR)); + } + + /** + * Deserialize the byte array to form a projector + * @param proj byte array to deserialize + * @return projector + */ + static TupleProjector deserializeProjectorFromBytes(byte[] proj) { if (proj == null) { return null; } @@ -149,9 +184,9 @@ public static TupleProjector deserializeProjectorFromScan(Scan scan) { int count = WritableUtils.readVInt(input); Expression[] expressions = new Expression[count]; for (int i = 0; i < count; i++) { - int ordinal = WritableUtils.readVInt(input); - expressions[i] = ExpressionType.values()[ordinal].newInstance(); - expressions[i].readFields(input); + int ordinal = WritableUtils.readVInt(input); + expressions[i] = ExpressionType.values()[ordinal].newInstance(); + expressions[i].readFields(input); } return new TupleProjector(schema, expressions); } catch (IOException e) { @@ -164,6 +199,86 @@ public static TupleProjector deserializeProjectorFromScan(Scan scan) { } } } + + /** + * Iterate over the list of cells returned from the scan and return a tuple projector for the + * dynamic columns by parsing the metadata stored for the list of dynamic columns + * @param result list of cells + * @param dynCols list of dynamic columns to be populated + * @param dynColCells list of cells corresponding to dynamic columns to be populated + * @return The tuple projector corresponding to dynamic columns or null if there are no dynamic + * columns to process + * @throws InvalidProtocolBufferException Thrown if there is an error parsing byte[] to protobuf + */ + public static TupleProjector getDynamicColumnsTupleProjector(List result, + List dynCols, List dynColCells) throws InvalidProtocolBufferException { + Set> dynColCellQualifiers = new HashSet<>(); + populateDynColsFromResult(result, dynCols, dynColCellQualifiers); + if (dynCols.isEmpty()) { + return null; + } + populateDynamicColumnCells(result, dynColCellQualifiers, dynColCells); + if (dynColCells.isEmpty()) { + return null; + } + KeyValueSchema dynColsSchema = PhoenixRuntime.buildKeyValueSchema(dynCols); + Expression[] expressions = new Expression[dynCols.size()]; + for (int i = 0; i < dynCols.size(); i++) { + expressions[i] = new KeyValueColumnExpression(dynCols.get(i)); + } + return new TupleProjector(dynColsSchema, expressions); + } + + /** + * Populate cells corresponding to dynamic columns + * @param result list of cells + * @param dynColCellQualifiers Set of pairs corresponding to + * cells of dynamic columns + * @param dynColCells Populated list of cells corresponding to dynamic columns + */ + private static void populateDynamicColumnCells(List result, + Set> dynColCellQualifiers, List dynColCells) { + for (Cell c : result) { + Pair famQualPair = new Pair<>(ByteBuffer.wrap(CellUtil.cloneFamily(c)), + ByteBuffer.wrap(CellUtil.cloneQualifier(c))); + if (dynColCellQualifiers.contains(famQualPair)) { + dynColCells.add(c); + } + } + } + + /** + * Iterate over the list of cells and populate dynamic columns + * @param result list of cells + * @param dynCols Populated list of PColumns corresponding to dynamic columns + * @param dynColCellQualifiers Populated set of pairs + * for the cells in the list, which correspond to dynamic columns + * @throws InvalidProtocolBufferException Thrown if there is an error parsing byte[] to protobuf + */ + private static void populateDynColsFromResult(List result, List dynCols, + Set> dynColCellQualifiers) + throws InvalidProtocolBufferException { + for (Cell c : result) { + byte[] qual = CellUtil.cloneQualifier(c); + byte[] fam = CellUtil.cloneFamily(c); + int index = Bytes.indexOf(qual, DYN_COLS_METADATA_CELL_QUALIFIER); + + // Contains dynamic column metadata, so add it to the list of dynamic columns + if (index != -1) { + byte[] dynColMetaDataProto = CellUtil.cloneValue(c); + dynCols.add(PColumnImpl.createFromProto( + PTableProtos.PColumn.parseFrom(dynColMetaDataProto))); + // Add the pair for the actual dynamic column. The column qualifier + // of the dynamic column is got by parsing out the known bytes from the shadow cell + // containing the metadata for that column i.e. + // DYN_COLS_METADATA_CELL_QUALIFIER + byte[] dynColQual = Arrays.copyOfRange(qual, + index + DYN_COLS_METADATA_CELL_QUALIFIER.length, qual.length); + dynColCellQualifiers.add( + new Pair<>(ByteBuffer.wrap(fam), ByteBuffer.wrap(dynColQual))); + } + } + } public static class ProjectedValueTuple extends BaseTuple { ImmutableBytesWritable keyPtr = new ImmutableBytesWritable(); @@ -207,6 +322,28 @@ public void getKey(ImmutableBytesWritable ptr) { ptr.set(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength()); } + @Override + public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList) { + if (index != 0) { + throw new IndexOutOfBoundsException(Integer.toString(index)); + } + if (dynColsList == null || dynColsList.length == 0) { + return getValue(VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER); + } + // We put the known reserved bytes before the serialized list of dynamic column + // PColumns to easily parse out the column list on the client + byte[] concatBytes = ByteUtil.concat(projectedValue.get(), + DYN_COLS_METADATA_CELL_QUALIFIER, dynColsList); + ImmutableBytesWritable projectedValueWithDynColsListBytes = + new ImmutableBytesWritable(concatBytes); + keyValue = PhoenixKeyValueUtil.newKeyValue(keyPtr.get(), keyPtr.getOffset(), + keyPtr.getLength(), VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, timestamp, + projectedValueWithDynColsListBytes.get(), + projectedValueWithDynColsListBytes.getOffset(), + projectedValueWithDynColsListBytes.getLength()); + return keyValue; + } + @Override public Cell getValue(int index) { if (index != 0) { @@ -305,8 +442,9 @@ public static void decodeProjectedValue(Tuple tuple, ImmutableBytesWritable ptr) if (!b) throw new IOException("Trying to decode a non-projected value."); } - public static ProjectedValueTuple mergeProjectedValue(ProjectedValueTuple dest, KeyValueSchema destSchema, ValueBitSet destBitSet, - Tuple src, KeyValueSchema srcSchema, ValueBitSet srcBitSet, int offset, boolean useNewValueColumnQualifier) throws IOException { + public static ProjectedValueTuple mergeProjectedValue(ProjectedValueTuple dest, + ValueBitSet destBitSet, Tuple src, ValueBitSet srcBitSet, int offset, + boolean useNewValueColumnQualifier) throws IOException { ImmutableBytesWritable destValue = dest.getProjectedValue(); int origDestBitSetLen = dest.getBitSetLength(); destBitSet.clear(); diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java index e60bf00baaf..52db52c08be 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java @@ -80,6 +80,10 @@ public KeyValueSchema getSchema() { public int getPosition() { return position; } + + public Collection getColumns() { + return columns; + } @Override public String toString() { diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/KeyValueBuilder.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/KeyValueBuilder.java index c6967cb289d..7957774543e 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/KeyValueBuilder.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/KeyValueBuilder.java @@ -42,11 +42,11 @@ public abstract class KeyValueBuilder { * @throws RuntimeException if there is an IOException thrown from the underlying {@link Put} */ @SuppressWarnings("javadoc") - public static void addQuietly(Mutation m, KeyValueBuilder builder, KeyValue kv) { + public static void addQuietly(Mutation m, KeyValue kv) { byte [] family = CellUtil.cloneFamily(kv); List list = m.getFamilyCellMap().get(family); if (list == null) { - list = new ArrayList(); + list = new ArrayList<>(); m.getFamilyCellMap().put(family, list); } list.add(kv); diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java index a30a86b44c7..0cb6b141aea 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java @@ -23,6 +23,8 @@ import static org.apache.phoenix.coprocessor.BaseScannerRegionObserver.SCAN_STOP_ROW_SUFFIX; import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_FAILED_QUERY_COUNTER; import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_QUERY_TIMEOUT_COUNTER; +import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; +import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; import static org.apache.phoenix.schema.PTable.IndexType.LOCAL; import static org.apache.phoenix.schema.PTableType.INDEX; import static org.apache.phoenix.util.ByteUtil.EMPTY_BYTE_ARRAY; @@ -72,6 +74,7 @@ import org.apache.phoenix.compile.ScanRanges; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.coprocessor.BaseScannerRegionObserver; +import org.apache.phoenix.coprocessor.DynamicColumnWildcard; import org.apache.phoenix.coprocessor.HashJoinCacheNotFoundException; import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver; import org.apache.phoenix.exception.SQLExceptionCode; @@ -109,6 +112,7 @@ import org.apache.phoenix.schema.stats.GuidePostsInfo; import org.apache.phoenix.schema.stats.GuidePostsKey; import org.apache.phoenix.schema.stats.StatisticsUtil; +import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.util.ByteUtil; import org.apache.phoenix.util.Closeables; import org.apache.phoenix.util.EncodedColumnsUtil; @@ -190,6 +194,9 @@ protected boolean useStats() { private static void initializeScan(QueryPlan plan, Integer perScanLimit, Integer offset, Scan scan) throws SQLException { StatementContext context = plan.getContext(); TableRef tableRef = plan.getTableRef(); + boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices() + .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, + DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); PTable table = tableRef.getTable(); Map> familyMap = scan.getFamilyMap(); @@ -208,7 +215,8 @@ private static void initializeScan(QueryPlan plan, Integer perScanLimit, Integer FilterableStatement statement = plan.getStatement(); RowProjector projector = plan.getProjector(); boolean optimizeProjection = false; - boolean keyOnlyFilter = familyMap.isEmpty() && context.getWhereConditionColumns().isEmpty(); + boolean keyOnlyFilter = familyMap.isEmpty() && !wildcardIncludesDynamicCols && + context.getWhereConditionColumns().isEmpty(); if (!projector.projectEverything()) { // If nothing projected into scan and we only have one column family, just allow everything // to be projected and use a FirstKeyOnlyFilter to skip from row to row. This turns out to diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/RegionScannerFactory.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/RegionScannerFactory.java index 703c9dc25bd..3be68bb8b0a 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/RegionScannerFactory.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/RegionScannerFactory.java @@ -18,9 +18,14 @@ package org.apache.phoenix.iterate; +import static org.apache.phoenix.coprocessor.DynamicColumnWildcard.WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS; +import static org.apache.phoenix.schema.types.PDataType.TRUE_BYTES; + import com.google.common.collect.ImmutableList; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; @@ -31,6 +36,8 @@ import org.apache.hadoop.hbase.regionserver.ScannerContext; import org.apache.hadoop.hbase.regionserver.ScannerContextUtil; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos; import org.apache.phoenix.execute.TupleProjector; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.expression.KeyValueColumnExpression; @@ -38,17 +45,21 @@ import org.apache.phoenix.index.IndexMaintainer; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.KeyValueSchema; -import org.apache.phoenix.schema.PTable; +import org.apache.phoenix.schema.PColumn; +import org.apache.phoenix.schema.PColumnImpl; import org.apache.phoenix.schema.ValueBitSet; -import org.apache.phoenix.schema.tuple.*; +import org.apache.phoenix.schema.tuple.MultiKeyValueTuple; +import org.apache.phoenix.schema.tuple.PositionBasedResultTuple; +import org.apache.phoenix.schema.tuple.ResultTuple; +import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.transaction.PhoenixTransactionContext; import org.apache.phoenix.util.EncodedColumnsUtil; import org.apache.phoenix.util.IndexUtil; import org.apache.phoenix.util.ScanUtil; import org.apache.phoenix.util.ServerUtil; -import org.apache.tephra.Transaction; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.ListIterator; import java.util.Set; @@ -183,11 +194,17 @@ public boolean nextRaw(List result) throws IOException { tupleProjector, dataRegion, indexMaintainer, viewConstants, ptr); } if (projector != null) { - Tuple toProject = useQualifierAsListIndex ? new PositionBasedResultTuple(result) : new ResultTuple( - Result.create(result)); - Tuple tuple = projector.projectResults(toProject, useNewValueColumnQualifier); + Tuple toProject = useQualifierAsListIndex ? new PositionBasedResultTuple(result) : + new ResultTuple(Result.create(result)); + + Pair mergedTupleDynColsPair = getTupleWithDynColsIfRequired(result, + projector.projectResults(toProject, useNewValueColumnQualifier)); + Tuple tupleWithDynColsIfReqd = mergedTupleDynColsPair.getFirst(); + byte[] serializedDynColsList = mergedTupleDynColsPair.getSecond(); + result.clear(); - result.add(tuple.getValue(0)); + result.add(tupleWithDynColsIfReqd.mergeWithDynColsListBytesAndGetValue(0, + serializedDynColsList)); if (arrayElementCell != null) { result.add(arrayElementCell); } @@ -200,6 +217,59 @@ public boolean nextRaw(List result) throws IOException { } } + /** + * Iterate over the list of cells returned from the scan and use the dynamic column metadata + * to create a tuple projector for dynamic columns. Finally, merge this with the projected + * values corresponding to the known columns + * @param result list of cells returned from the scan + * @param tuple projected value tuple from known schema/columns + * @return A pair, whose first part is a combined projected value tuple containing the + * known column values along with resolved dynamic column values and whose second part is + * the serialized list of dynamic column PColumns. In case dynamic columns are not + * to be exposed or are not present, this returns the original tuple and an empty byte array. + * @throws IOException Thrown if there is an error parsing protobuf or merging projected + * values + */ + private Pair getTupleWithDynColsIfRequired(List result, Tuple tuple) + throws IOException { + // We only care about dynamic column cells if the scan has this attribute set + if (Bytes.equals(scan.getAttribute(WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS), TRUE_BYTES)) { + List dynCols = new ArrayList<>(); + List dynColCells = new ArrayList<>(); + TupleProjector dynColTupleProj = TupleProjector.getDynamicColumnsTupleProjector(result, + dynCols, dynColCells); + if (dynColTupleProj != null) { + Tuple toProject = useQualifierAsListIndex ? new PositionBasedResultTuple(dynColCells) : + new ResultTuple(Result.create(dynColCells)); + Tuple dynColsProjectedTuple = dynColTupleProj + .projectResults(toProject, useNewValueColumnQualifier); + + ValueBitSet destBitSet = projector.getValueBitSet(); + // In case we are not projecting any non-row key columns, the field count for the + // current projector will be 0, so we simply use the dynamic column projector's + // value bitset as the destination bitset. + if (projector.getSchema().getFieldCount() == 0) { + destBitSet = dynColTupleProj.getValueBitSet(); + } + // Add dynamic column data at the end of the projected tuple + Tuple mergedTuple = TupleProjector.mergeProjectedValue( + (TupleProjector.ProjectedValueTuple)tuple, destBitSet, dynColsProjectedTuple, + dynColTupleProj.getValueBitSet(), projector.getSchema().getFieldCount(), + useNewValueColumnQualifier); + + // We send the serialized list of PColumns for dynamic columns back to the client + // so that the client can process the corresponding projected values + DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder dynColsListBuilder = + DynamicColumnMetaDataProtos.DynamicColumnMetaData.newBuilder(); + for (PColumn dynCol : dynCols) { + dynColsListBuilder.addDynamicColumns(PColumnImpl.toProto(dynCol)); + } + return new Pair<>(mergedTuple, dynColsListBuilder.build().toByteArray()); + } + } + return new Pair<>(tuple, new byte[0]); + } + @Override public boolean nextRaw(List result, ScannerContext scannerContext) throws IOException { diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java index 84816a0c17f..2778276c577 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java @@ -17,6 +17,9 @@ */ package org.apache.phoenix.jdbc; + +import static org.apache.phoenix.coprocessor.DynamicColumnWildcard.DYN_COLS_METADATA_CELL_QUALIFIER; + import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; @@ -38,17 +41,32 @@ import java.sql.Time; import java.sql.Timestamp; import java.text.Format; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Calendar; +import java.util.List; import java.util.Map; +import com.google.common.primitives.Bytes; +import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.commons.lang.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.compile.ColumnProjector; +import org.apache.phoenix.compile.ExpressionProjector; import org.apache.phoenix.compile.RowProjector; import org.apache.phoenix.compile.StatementContext; +import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos; +import org.apache.phoenix.coprocessor.generated.PTableProtos; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.exception.SQLExceptionInfo; +import org.apache.phoenix.execute.TupleProjector; +import org.apache.phoenix.expression.Expression; +import org.apache.phoenix.expression.ProjectedColumnExpression; import org.apache.phoenix.iterate.ResultIterator; import org.apache.phoenix.log.QueryLogInfo; import org.apache.phoenix.log.QueryLogger; @@ -56,6 +74,8 @@ import org.apache.phoenix.monitoring.MetricType; import org.apache.phoenix.monitoring.OverAllQueryMetrics; import org.apache.phoenix.monitoring.ReadMetricQueue; +import org.apache.phoenix.schema.PColumn; +import org.apache.phoenix.schema.PColumnImpl; import org.apache.phoenix.schema.tuple.ResultTuple; import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.schema.types.PBoolean; @@ -76,8 +96,10 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Throwables; +import org.apache.phoenix.util.SchemaUtil; - +import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; +import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; /** * @@ -121,7 +143,11 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable { private final ReadMetricQueue readMetricsQueue; private final OverAllQueryMetrics overAllQueryMetrics; private final ImmutableBytesWritable ptr = new ImmutableBytesWritable(); + private final boolean wildcardIncludesDynamicCols; + private final List staticColumns; + private final int startPositionForDynamicCols; + private RowProjector rowProjectorWithDynamicCols; private Tuple currentRow = BEFORE_FIRST; private boolean isClosed = false; private boolean wasNull = false; @@ -133,9 +159,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable { private Object exception; - - - public PhoenixResultSet(ResultIterator resultIterator, RowProjector rowProjector, StatementContext ctx) throws SQLException { + public PhoenixResultSet(ResultIterator resultIterator, RowProjector rowProjector, + StatementContext ctx) throws SQLException { this.rowProjector = rowProjector; this.scanner = resultIterator; this.context = ctx; @@ -143,6 +168,17 @@ public PhoenixResultSet(ResultIterator resultIterator, RowProjector rowProjector this.readMetricsQueue = context.getReadMetricsQueue(); this.overAllQueryMetrics = context.getOverallQueryMetrics(); this.queryLogger = context.getQueryLogger() != null ? context.getQueryLogger() : QueryLogger.NO_OP_INSTANCE; + this.wildcardIncludesDynamicCols = this.context.getConnection().getQueryServices() + .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, + DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); + if (this.wildcardIncludesDynamicCols) { + Pair, Integer> res = getStaticColsAndStartingPosForDynCols(); + this.staticColumns = res.getFirst(); + this.startPositionForDynamicCols = res.getSecond(); + } else { + this.staticColumns = null; + this.startPositionForDynamicCols = 0; + } } @Override @@ -202,7 +238,7 @@ public void deleteRow() throws SQLException { @Override public int findColumn(String columnLabel) throws SQLException { - Integer index = rowProjector.getColumnIndex(columnLabel); + Integer index = getRowProjector().getColumnIndex(columnLabel); return index + 1; } @@ -216,7 +252,7 @@ public Array getArray(int columnIndex) throws SQLException { checkCursorState(); // Get the value using the expected type instead of trying to coerce to VARCHAR. // We can't coerce using our formatter because we don't have enough context in PDataType. - ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1); + ColumnProjector projector = getRowProjector().getColumnProjector(columnIndex-1); Array value = (Array)projector.getValue(currentRow, projector.getExpression().getDataType(), ptr); wasNull = (value == null); return value; @@ -255,8 +291,8 @@ private void checkCursorState() throws SQLException { @Override public BigDecimal getBigDecimal(int columnIndex) throws SQLException { checkCursorState(); - BigDecimal value = (BigDecimal)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, - PDecimal.INSTANCE, ptr); + BigDecimal value = (BigDecimal)getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PDecimal.INSTANCE, ptr); wasNull = (value == null); return value; } @@ -303,7 +339,7 @@ public Blob getBlob(String columnLabel) throws SQLException { @Override public boolean getBoolean(int columnIndex) throws SQLException { checkCursorState(); - ColumnProjector colProjector = rowProjector.getColumnProjector(columnIndex-1); + ColumnProjector colProjector = getRowProjector().getColumnProjector(columnIndex-1); PDataType type = colProjector.getExpression().getDataType(); Object value = colProjector.getValue(currentRow, type, ptr); wasNull = (value == null); @@ -332,8 +368,8 @@ public boolean getBoolean(String columnLabel) throws SQLException { @Override public byte[] getBytes(int columnIndex) throws SQLException { checkCursorState(); - byte[] value = (byte[])rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, - PVarbinary.INSTANCE, ptr); + byte[] value = (byte[])getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PVarbinary.INSTANCE, ptr); wasNull = (value == null); return value; } @@ -347,7 +383,7 @@ public byte[] getBytes(String columnLabel) throws SQLException { public byte getByte(int columnIndex) throws SQLException { // throw new SQLFeatureNotSupportedException(); checkCursorState(); - Byte value = (Byte)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, + Byte value = (Byte)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow, PTinyint.INSTANCE, ptr); wasNull = (value == null); if (value == null) { @@ -394,7 +430,7 @@ public String getCursorName() throws SQLException { @Override public Date getDate(int columnIndex) throws SQLException { checkCursorState(); - Date value = (Date)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, + Date value = (Date)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow, PDate.INSTANCE, ptr); wasNull = (value == null); if (value == null) { @@ -411,7 +447,7 @@ public Date getDate(String columnLabel) throws SQLException { @Override public Date getDate(int columnIndex, Calendar cal) throws SQLException { checkCursorState(); - Date value = (Date)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, + Date value = (Date)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow, PDate.INSTANCE, ptr); wasNull = (value == null); if (wasNull) { @@ -429,8 +465,8 @@ public Date getDate(String columnLabel, Calendar cal) throws SQLException { @Override public double getDouble(int columnIndex) throws SQLException { checkCursorState(); - Double value = (Double)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, - PDouble.INSTANCE, ptr); + Double value = (Double)getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PDouble.INSTANCE, ptr); wasNull = (value == null); if (value == null) { return 0; @@ -456,8 +492,8 @@ public int getFetchSize() throws SQLException { @Override public float getFloat(int columnIndex) throws SQLException { checkCursorState(); - Float value = (Float)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, - PFloat.INSTANCE, ptr); + Float value = (Float)getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PFloat.INSTANCE, ptr); wasNull = (value == null); if (value == null) { return 0; @@ -478,8 +514,8 @@ public int getHoldability() throws SQLException { @Override public int getInt(int columnIndex) throws SQLException { checkCursorState(); - Integer value = (Integer)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, - PInteger.INSTANCE, ptr); + Integer value = (Integer)getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PInteger.INSTANCE, ptr); wasNull = (value == null); if (value == null) { return 0; @@ -495,7 +531,7 @@ public int getInt(String columnLabel) throws SQLException { @Override public long getLong(int columnIndex) throws SQLException { checkCursorState(); - Long value = (Long)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, + Long value = (Long)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow, PLong.INSTANCE, ptr); wasNull = (value == null); if (value == null) { @@ -511,7 +547,7 @@ public long getLong(String columnLabel) throws SQLException { @Override public ResultSetMetaData getMetaData() throws SQLException { - return new PhoenixResultSetMetaData(statement.getConnection(), rowProjector); + return new PhoenixResultSetMetaData(statement.getConnection(), getRowProjector()); } @Override @@ -547,7 +583,7 @@ public String getNString(String columnLabel) throws SQLException { @Override public Object getObject(int columnIndex) throws SQLException { checkCursorState(); - ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1); + ColumnProjector projector = getRowProjector().getColumnProjector(columnIndex-1); Object value = projector.getValue(currentRow, projector.getExpression().getDataType(), ptr); wasNull = (value == null); return value; @@ -607,7 +643,8 @@ public SQLXML getSQLXML(String columnLabel) throws SQLException { @Override public short getShort(int columnIndex) throws SQLException { checkCursorState(); - Short value = (Short)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, PSmallint.INSTANCE, ptr); + Short value = (Short)getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PSmallint.INSTANCE, ptr); wasNull = (value == null); if (value == null) { return 0; @@ -630,7 +667,7 @@ public String getString(int columnIndex) throws SQLException { checkCursorState(); // Get the value using the expected type instead of trying to coerce to VARCHAR. // We can't coerce using our formatter because we don't have enough context in PDataType. - ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1); + ColumnProjector projector = getRowProjector().getColumnProjector(columnIndex-1); PDataType type = projector.getExpression().getDataType(); Object value = projector.getValue(currentRow,type, ptr); if (wasNull = (value == null)) { @@ -651,7 +688,7 @@ public String getString(String columnLabel) throws SQLException { @Override public Time getTime(int columnIndex) throws SQLException { checkCursorState(); - Time value = (Time)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, + Time value = (Time)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow, PTime.INSTANCE, ptr); wasNull = (value == null); return value; @@ -665,7 +702,7 @@ public Time getTime(String columnLabel) throws SQLException { @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { checkCursorState(); - Time value = (Time)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, + Time value = (Time)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow, PTime.INSTANCE, ptr); wasNull = (value == null); if (value == null) { @@ -684,8 +721,8 @@ public Time getTime(String columnLabel, Calendar cal) throws SQLException { @Override public Timestamp getTimestamp(int columnIndex) throws SQLException { checkCursorState(); - Timestamp value = (Timestamp)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, - PTimestamp.INSTANCE, ptr); + Timestamp value = (Timestamp)getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PTimestamp.INSTANCE, ptr); wasNull = (value == null); return value; } @@ -713,7 +750,8 @@ public int getType() throws SQLException { @Override public URL getURL(int columnIndex) throws SQLException { checkCursorState(); - String value = (String)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, PVarchar.INSTANCE, ptr); + String value = (String)getRowProjector().getColumnProjector(columnIndex-1) + .getValue(currentRow, PVarchar.INSTANCE, ptr); wasNull = (value == null); if (value == null) { return null; @@ -807,8 +845,16 @@ public boolean next() throws SQLException { close(); }else{ count++; + // Reset this projector with each row + if (this.rowProjectorWithDynamicCols != null) { + this.rowProjectorWithDynamicCols = null; + } + processDynamicColumnsIfRequired(); } rowProjector.reset(); + if (rowProjectorWithDynamicCols != null) { + rowProjectorWithDynamicCols.reset(); + } } catch (RuntimeException e) { // FIXME: Expression.evaluate does not throw SQLException // so this will unwrap throws from that. @@ -1354,4 +1400,150 @@ public StatementContext getContext() { return context; } + /** + * Return the row projector to use + * @return the row projector including dynamic column projectors in case we are including + * dynamic columns, otherwise the regular row projector containing static column projectors + */ + private RowProjector getRowProjector() { + if (this.rowProjectorWithDynamicCols != null) { + return this.rowProjectorWithDynamicCols; + } + return this.rowProjector; + } + + /** + * Populate the static columns and the starting position for dynamic columns which we use when + * merging column projectors of static and dynamic columns + * @return Pair whose first part is the list of static column PColumns and the second part is + * the starting position for dynamic columns + */ + private Pair, Integer> getStaticColsAndStartingPosForDynCols(){ + List staticCols = new ArrayList<>(); + for (ColumnProjector cp : this.rowProjector.getColumnProjectors()) { + Expression exp = cp.getExpression(); + if (exp instanceof ProjectedColumnExpression) { + staticCols.addAll(((ProjectedColumnExpression) exp).getColumns()); + break; + } + } + int startingPosForDynCols = 0; + for (PColumn col : staticCols) { + if (!SchemaUtil.isPKColumn(col)) { + startingPosForDynCols++; + } + } + return new Pair<>(staticCols, startingPosForDynCols); + } + + /** + * Process the dynamic column metadata for the current row and store the complete projector for + * all static and dynamic columns for this row + */ + private void processDynamicColumnsIfRequired() { + if (!this.wildcardIncludesDynamicCols || this.currentRow == null || + !this.rowProjector.projectDynColsInWildcardQueries()) { + return; + } + List dynCols = getDynColsListAndSeparateFromActualData(); + if (dynCols == null) { + return; + } + + RowProjector rowProjectorWithDynamicColumns = null; + if (this.rowProjector.getColumnCount() > 0 && + dynCols.size() > 0) { + rowProjectorWithDynamicColumns = mergeRowProjectorWithDynColProjectors(dynCols, + this.rowProjector.getColumnProjector(0).getTableName()); + } + // Set the combined row projector + if (rowProjectorWithDynamicColumns != null) { + this.rowProjectorWithDynamicCols = rowProjectorWithDynamicColumns; + } + } + + /** + * Separate the actual cell data from the serialized list of dynamic column PColumns and + * return the deserialized list of dynamic column PColumns for the current row + * @return Deserialized list of dynamic column PColumns or null if there are no dynamic columns + */ + private List getDynColsListAndSeparateFromActualData() { + Cell base = this.currentRow.getValue(0); + final byte[] valueArray = CellUtil.cloneValue(base); + // We inserted the known byte array before appending the serialized list of dynamic columns + final byte[] anchor = Arrays.copyOf(DYN_COLS_METADATA_CELL_QUALIFIER, + DYN_COLS_METADATA_CELL_QUALIFIER.length); + // Reverse the arrays to find the last occurrence of the sub-array in the value array + ArrayUtils.reverse(valueArray); + ArrayUtils.reverse(anchor); + final int pos = valueArray.length - Bytes.indexOf(valueArray, anchor); + // There are no dynamic columns to process so return immediately + if (pos >= valueArray.length) { + return null; + } + ArrayUtils.reverse(valueArray); + + // Separate the serialized list of dynamic column PColumns from the actual cell data + byte[] actualCellDataBytes = Arrays.copyOfRange(valueArray, 0, + pos - DYN_COLS_METADATA_CELL_QUALIFIER.length); + ImmutableBytesWritable actualCellData = new ImmutableBytesWritable(actualCellDataBytes); + ImmutableBytesWritable key = new ImmutableBytesWritable(); + currentRow.getKey(key); + // Store only the actual cell data as part of the current row + this.currentRow = new TupleProjector.ProjectedValueTuple(key.get(), key.getOffset(), + key.getLength(), base.getTimestamp(), + actualCellData.get(), actualCellData.getOffset(), actualCellData.getLength(), 0); + + byte[] dynColsListBytes = Arrays.copyOfRange(valueArray, pos, valueArray.length); + List dynCols = new ArrayList<>(); + try { + List dynColsProtos = DynamicColumnMetaDataProtos + .DynamicColumnMetaData.parseFrom(dynColsListBytes).getDynamicColumnsList(); + for (PTableProtos.PColumn colProto : dynColsProtos) { + dynCols.add(PColumnImpl.createFromProto(colProto)); + } + } catch (InvalidProtocolBufferException e) { + return null; + } + return dynCols; + } + + /** + * Add the dynamic column projectors at the end of the current row's row projector + * @param dynCols list of dynamic column PColumns for the current row + * @param tableName table name + * @return The combined row projector containing column projectors for both static and dynamic + * columns + */ + private RowProjector mergeRowProjectorWithDynColProjectors(List dynCols, + String tableName) { + List allColumnProjectors = + new ArrayList<>(this.rowProjector.getColumnProjectors()); + List allCols = new ArrayList<>(); + if (this.staticColumns != null) { + allCols.addAll(this.staticColumns); + } + // Add dynamic columns to the end + allCols.addAll(dynCols); + + int startingPos = this.startPositionForDynamicCols; + // Get the ProjectedColumnExpressions for dynamic columns + for (PColumn currentDynCol : dynCols) { + // Note that we refer to all the existing static columns along with all dynamic columns + // in each of the newly added dynamic column projectors. + // This is required for correctly building the schema for each of the dynamic columns + Expression exp = new ProjectedColumnExpression(currentDynCol, allCols, + startingPos++, currentDynCol.getName().getString()); + + ColumnProjector dynColProj = new ExpressionProjector( + currentDynCol.getName().getString(), tableName, exp, false); + allColumnProjectors.add(dynColProj); + } + + return new RowProjector(allColumnProjectors, this.rowProjector.getEstimatedRowByteSize(), + this.rowProjector.projectEveryRow(), this.rowProjector.hasUDFs(), + this.rowProjector.projectEverything(), + this.rowProjector.projectDynColsInWildcardQueries()); + } + } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java index 8b8133e48ae..7e76d10739d 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java @@ -487,7 +487,10 @@ public QueryPlan compilePlan(PhoenixStatement stmt, Sequence.ValueOp seqAction) select = StatementNormalizer.normalize(transformedSelect, resolver); } - QueryPlan plan = new QueryCompiler(stmt, select, resolver, Collections.emptyList(), stmt.getConnection().getIteratorFactory(), new SequenceManager(stmt), true, false, null).compile(); + QueryPlan plan = new QueryCompiler(stmt, select, resolver, Collections.emptyList(), + stmt.getConnection().getIteratorFactory(), new SequenceManager(stmt), + true, false, null) + .compile(); plan.getContext().getSequenceManager().validateSequences(seqAction); return plan; } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java index f33fb81b6f6..edf68d88446 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java @@ -60,6 +60,7 @@ import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_THREAD_POOL_SIZE; import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RENEW_LEASE_THRESHOLD_MILLISECONDS; import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS; +import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB; import static org.apache.phoenix.util.UpgradeUtil.addParentToChildLinks; import static org.apache.phoenix.util.UpgradeUtil.addViewIndexToParentLinks; import static org.apache.phoenix.util.UpgradeUtil.getSysCatalogSnapshotName; @@ -121,6 +122,8 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.CoprocessorDescriptor; +import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; @@ -148,6 +151,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.hadoop.ipc.RemoteException; import org.apache.phoenix.compile.MutationPlan; +import org.apache.phoenix.coprocessor.DynamicColumnWildcard; import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver; import org.apache.phoenix.coprocessor.MetaDataEndpointImpl; import org.apache.phoenix.coprocessor.MetaDataProtocol; @@ -934,6 +938,20 @@ private void addCoprocessors(byte[] tableName, TableDescriptorBuilder builder, P } } } + + boolean wildcardIncludesDynamicCols = props.getBoolean( + WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB); + // We register the coprocessor for dynamic columns to be exposed when issuing wildcard + // queries for PTableType.TABLE and if the corresponding config is true + if (tableType == PTableType.TABLE && wildcardIncludesDynamicCols && + !newDesc.hasCoprocessor(DynamicColumnWildcard.class.getName())) { + CoprocessorDescriptor cpDescDynColsWildCard = CoprocessorDescriptorBuilder + .newBuilder(DynamicColumnWildcard.class.getName()) + .setPriority(priority) + .build(); + builder.setCoprocessor(cpDescDynColsWildCard); + } + if ((SchemaUtil.isStatsTable(tableName) || SchemaUtil.isMetaTable(tableName)) && !newDesc.hasCoprocessor(MultiRowMutationEndpoint.class.getName())) { builder.addCoprocessor(MultiRowMutationEndpoint.class.getName(), diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java index 8e177498512..eddff75df61 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java @@ -319,6 +319,8 @@ public interface QueryServices extends SQLCloseable { // Whether to enable cost-based-decision in the query optimizer public static final String COST_BASED_OPTIMIZER_ENABLED = "phoenix.costbased.optimizer.enabled"; public static final String SMALL_SCAN_THRESHOLD_ATTRIB = "phoenix.query.smallScanThreshold"; + public static final String WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB = + "phoenix.query.wildcard.dynamicColumns"; public static final String LOG_LEVEL = "phoenix.log.level"; public static final String LOG_BUFFER_SIZE = "phoenix.log.buffer.size"; public static final String LOG_BUFFER_WAIT_STRATEGY = "phoenix.log.wait.strategy"; diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java index c5076302d6c..b8cfe1ff570 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java @@ -369,6 +369,7 @@ public class QueryServicesOptions { public static final boolean DEFAULT_ENABLE_SERVER_SIDE_MUTATIONS = true; public static final boolean DEFAULT_COST_BASED_OPTIMIZER_ENABLED = false; + public static final boolean DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB = false; public static final String DEFAULT_LOGGING_LEVEL = LogLevel.OFF.name(); public static final String DEFAULT_LOG_SAMPLE_RATE = "1.0"; public static final int DEFAULT_LOG_SALT_BUCKETS = 32; diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PRow.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PRow.java index fde83ba7c1e..1c2657c302c 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PRow.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PRow.java @@ -55,7 +55,15 @@ public interface PRow { * constraint */ public void setValue(PColumn col, byte[] value); - + + /** + * Set attributes for the Put operations involving dynamic columns. These attributes are + * persisted as cells under a reserved qualifier for the dynamic column metadata so that we + * can resolve them for wildcard queries without requiring the user to provide the data type + * of the dynamic columns. See PHOENIX-374 + */ + public void setAttributesForDynamicColumnsIfReqd(); + /** * Delete the row. Note that a delete take precedence over any * values that may have been set before or after the delete call. diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java index 6143bac0d46..e3d237a6cc1 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java @@ -20,6 +20,7 @@ import static org.apache.phoenix.hbase.index.util.KeyValueBuilder.addQuietly; import static org.apache.phoenix.hbase.index.util.KeyValueBuilder.deleteQuietly; import static org.apache.phoenix.schema.SaltingUtil.SALTING_COLUMN; +import static org.apache.phoenix.query.QueryConstants.NAME_SEPARATOR; import java.io.IOException; import java.sql.DriverManager; @@ -45,12 +46,15 @@ import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.compile.ExpressionCompiler; import org.apache.phoenix.compile.StatementContext; +import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos; import org.apache.phoenix.coprocessor.generated.PTableProtos; import org.apache.phoenix.exception.DataExceedsCapacityException; import org.apache.phoenix.expression.Expression; @@ -1124,9 +1128,11 @@ private class PRowImpl implements PRow { private final long ts; private final boolean hasOnDupKey; // map from column name to value - private Map columnToValueMap; + private Map columnToValueMap; + // map from the column family name to the list of dynamic columns in that column family + private Map> colFamToDynamicColumnsMapping; - public PRowImpl(KeyValueBuilder kvBuilder, ImmutableBytesWritable key, long ts, Integer bucketNum, boolean hasOnDupKey) { + PRowImpl(KeyValueBuilder kvBuilder, ImmutableBytesWritable key, long ts, Integer bucketNum, boolean hasOnDupKey) { this.kvBuilder = kvBuilder; this.ts = ts; this.hasOnDupKey = hasOnDupKey; @@ -1138,6 +1144,7 @@ public PRowImpl(KeyValueBuilder kvBuilder, ImmutableBytesWritable key, long ts, this.key = ByteUtil.copyKeyBytesIfNecessary(key); } this.columnToValueMap = Maps.newHashMapWithExpectedSize(1); + this.colFamToDynamicColumnsMapping = Maps.newHashMapWithExpectedSize(1); newMutations(); } @@ -1188,16 +1195,21 @@ public List toRowMutations() { ImmutableBytesWritable ptr = new ImmutableBytesWritable(); singleCellConstructorExpression.evaluate(null, ptr); ImmutableBytesPtr colFamilyPtr = new ImmutableBytesPtr(columnFamily); - addQuietly(put, kvBuilder, kvBuilder.buildPut(keyPtr, + addQuietly(put, kvBuilder.buildPut(keyPtr, colFamilyPtr, QueryConstants.SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES_PTR, ts, ptr)); } + // Preserve the attributes of the original mutation + Map attrsMap = setValues.getAttributesMap(); setValues = put; + for (String attrKey : attrsMap.keySet()) { + setValues.setAttribute(attrKey, attrsMap.get(attrKey)); + } } // Because we cannot enforce a not null constraint on a KV column (since we don't know if the row exists when // we upsert it), so instead add a KV that is always empty. This allows us to imitate SQL semantics given the // way HBase works. Pair emptyKvInfo = EncodedColumnsUtil.getEmptyKeyValueInfo(PTableImpl.this); - addQuietly(setValues, kvBuilder, kvBuilder.buildPut(keyPtr, + addQuietly(setValues, kvBuilder.buildPut(keyPtr, SchemaUtil.getEmptyColumnFamilyPtr(PTableImpl.this), new ImmutableBytesPtr(emptyKvInfo.getFirst()), ts, new ImmutableBytesPtr(emptyKvInfo.getSecond()))); @@ -1270,10 +1282,40 @@ public void setValue(PColumn column, byte[] byteValue) { } else { removeIfPresent(unsetValues, family, qualifier); - addQuietly(setValues, kvBuilder, kvBuilder.buildPut(keyPtr, + addQuietly(setValues, kvBuilder.buildPut(keyPtr, column.getFamilyName().getBytesPtr(), qualifierPtr, ts, ptr)); } + String fam = Bytes.toString(family); + this.colFamToDynamicColumnsMapping.putIfAbsent(fam, new ArrayList<>()); + if (column.isDynamic()) { + this.colFamToDynamicColumnsMapping.get(fam).add(column); + } + } + } + + /** + * Add attributes to the Put mutations indicating that we need to add shadow cells to Puts + * to store dynamic column metadata. See + * {@link org.apache.phoenix.coprocessor.DynamicColumnWildcard#preBatchMutateWithExceptions} + */ + public void setAttributesForDynamicColumnsIfReqd() { + if (this.colFamToDynamicColumnsMapping == null) { + return; + } + for (Entry> colFamToDynColsList : + this.colFamToDynamicColumnsMapping.entrySet()) { + DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder builder = + DynamicColumnMetaDataProtos.DynamicColumnMetaData.newBuilder(); + for (PColumn dynCol : colFamToDynColsList.getValue()) { + builder.addDynamicColumns(PColumnImpl.toProto(dynCol)); + } + if (builder.getDynamicColumnsCount() != 0) { + // The attribute key is the column family name and the value is the + // serialized list of dynamic columns + setValues.setAttribute(colFamToDynColsList.getKey(), + builder.build().toByteArray()); + } } } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/BaseTuple.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/BaseTuple.java index 8028eb22aae..058c0e48679 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/BaseTuple.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/BaseTuple.java @@ -38,6 +38,11 @@ public boolean isImmutable() { public void getKey(ImmutableBytesWritable ptr) { throw new UnsupportedOperationException(); } + + @Override + public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList) { + throw new UnsupportedOperationException(); + } @Override public Cell getValue(int index) { diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/DelegateTuple.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/DelegateTuple.java index 3430f5b4437..7cd3acc6a7c 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/DelegateTuple.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/DelegateTuple.java @@ -44,6 +44,11 @@ public void getKey(ImmutableBytesWritable ptr) { delegate.getKey(ptr); } + @Override + public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList) { + return delegate.mergeWithDynColsListBytesAndGetValue(index, dynColsList); + } + @Override public Cell getValue(int index) { return delegate.getValue(index); diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/Tuple.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/Tuple.java index e4a887bbe5e..d42cd2d3873 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/Tuple.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/Tuple.java @@ -51,7 +51,17 @@ public interface Tuple { * the key buffer. */ public void getKey(ImmutableBytesWritable ptr); - + + /** + * Get the KeyValue at the given index whose value is concatenated with the serialized list of + * dynamic column PColumns for that row key. + * @param index the zero-based KeyValue index between 0 and {@link #size()} exclusive + * @param dynColsList the serialized list of dynamic column PColumns + * @return the KeyValue at the given index + * @throws IndexOutOfBoundsException if an invalid index is used + */ + public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList); + /** * Get the KeyValue at the given index. * @param index the zero-based KeyValue index between 0 and {@link #size()} exclusive diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java index e67ddebff07..d5e3db09d84 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java @@ -64,9 +64,9 @@ public static void setColumns(PColumn column, PTable table, Scan scan) { } } - public static final boolean useNewValueColumnQualifier(Scan s) { + public static boolean useNewValueColumnQualifier(Scan s) { // null check for backward compatibility - return s.getAttribute(BaseScannerRegionObserver.USE_NEW_VALUE_COLUMN_QUALIFIER) == null ? false : true; + return s.getAttribute(BaseScannerRegionObserver.USE_NEW_VALUE_COLUMN_QUALIFIER) != null; } public static QualifierEncodingScheme getQualifierEncodingScheme(Scan s) { diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/MetaDataUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/MetaDataUtil.java index a35409f750f..4b06d4650a2 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/MetaDataUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/MetaDataUtil.java @@ -460,7 +460,7 @@ public static boolean setMutationValue(Mutation headerRow, byte[] key, for (Cell cell : kvs) { KeyValue kv = org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValue(cell); if (builder.compareQualifier(kv, key, 0, key.length) ==0) { - KeyValueBuilder.addQuietly(headerRow, builder, keyValue); + KeyValueBuilder.addQuietly(headerRow, keyValue); return true; } } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java index 85883ec86ef..d4f1177bb69 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java @@ -1154,7 +1154,7 @@ public static Object[] decodeColumnValues(Connection conn, String fullTableName, return values.toArray(); } - private static KeyValueSchema buildKeyValueSchema(List columns) { + public static KeyValueSchema buildKeyValueSchema(List columns) { KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(getMinNullableIndex(columns)); for (PColumn col : columns) { builder.addField(col); diff --git a/phoenix-core/src/test/java/org/apache/phoenix/util/MetaDataUtilTest.java b/phoenix-core/src/test/java/org/apache/phoenix/util/MetaDataUtilTest.java index 783417d0404..b16e401434c 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/util/MetaDataUtilTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/util/MetaDataUtilTest.java @@ -174,7 +174,7 @@ public void testGetMutationKeyValue() throws Exception { KeyValue kv = builder.buildPut(wrap(ROW), wrap(TABLE_FAMILY_BYTES), wrap(QUALIFIER), wrap(ORIGINAL_VALUE)); Put put = new Put(ROW); - KeyValueBuilder.addQuietly(put, builder, kv); + KeyValueBuilder.addQuietly(put, kv); // read back out the value ImmutableBytesPtr ptr = new ImmutableBytesPtr(); @@ -189,7 +189,7 @@ public void testGetMutationKeyValue() throws Exception { byte[] value = Bytes.toBytes("client-value"); kv = builder.buildPut(wrap(ROW), wrap(TABLE_FAMILY_BYTES), wrap(QUALIFIER), wrap(value)); put = new Put(ROW); - KeyValueBuilder.addQuietly(put, builder, kv); + KeyValueBuilder.addQuietly(put, kv); // read back out the value assertTrue(MetaDataUtil.getMutationValue(put, QUALIFIER, builder, ptr)); @@ -244,7 +244,7 @@ private Put generateOriginalPut() { KeyValue kv = builder.buildPut(wrap(ROW), wrap(TABLE_FAMILY_BYTES), wrap(QUALIFIER), wrap(ORIGINAL_VALUE)); Put put = new Put(ROW); - KeyValueBuilder.addQuietly(put, builder, kv); + KeyValueBuilder.addQuietly(put, kv); return put; } diff --git a/phoenix-protocol/src/main/DynamicColumnMetaData.proto b/phoenix-protocol/src/main/DynamicColumnMetaData.proto new file mode 100644 index 00000000000..33466ee890b --- /dev/null +++ b/phoenix-protocol/src/main/DynamicColumnMetaData.proto @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.phoenix.coprocessor.generated"; +option java_outer_classname = "DynamicColumnMetaDataProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "PTable.proto"; + +message DynamicColumnMetaData { + repeated PColumn dynamicColumns = 1; +} \ No newline at end of file