Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,087 changes: 1,087 additions & 0 deletions 1

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion phoenix-assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
<parent>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>5.0.0-HBase-2.0</version>
<version>5.0.0-HBase-2.1.0-cdh6.1.1</version>
</parent>
<artifactId>phoenix-assembly</artifactId>
<name>Phoenix Assembly</name>
Expand Down
2 changes: 1 addition & 1 deletion phoenix-client/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
<parent>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>5.0.0-HBase-2.0</version>
<version>5.0.0-HBase-2.1.0-cdh6.1.1</version>
</parent>
<artifactId>phoenix-client</artifactId>
<name>Phoenix Client</name>
Expand Down
6 changes: 5 additions & 1 deletion phoenix-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>5.0.0-HBase-2.0</version>
<version>5.0.0-HBase-2.1.0-cdh6.1.1</version>
</parent>
<artifactId>phoenix-core</artifactId>
<name>Phoenix Core</name>
Expand Down Expand Up @@ -386,6 +386,10 @@
<groupId>xom</groupId>
<artifactId>xom</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- TODO remove after HBASE-19256 -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
import java.util.Map;
import java.util.Properties;

import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.query.QueryServices;
Expand Down Expand Up @@ -278,7 +278,7 @@ private String[] getRecordsOutofCursorTable(String tableOrViewName, boolean quer
values[i] = rs.getObject(i + 1);
}
conn = getTenantSpecificConnection(tenantId);
pkIds.add(Base64.encodeBytes(PhoenixRuntime.encodeColumnValues(conn, tableOrViewName.toUpperCase(), values, columns)));
pkIds.add(Base64.getEncoder().encodeToString(PhoenixRuntime.encodeColumnValues(conn, tableOrViewName.toUpperCase(), values, columns)));
}
return pkIds.toArray(new String[pkIds.size()]);
}
Expand All @@ -296,7 +296,7 @@ private List<String> doQueryMore(boolean queryAgainstTenantView, String tenantId
PreparedStatement stmt = conn.prepareStatement(query);
int bindCounter = 1;
for (int i = 0; i < cursorIds.length; i++) {
Object[] pkParts = PhoenixRuntime.decodeColumnValues(conn, tableName.toUpperCase(), Base64.decode(cursorIds[i]), columns);
Object[] pkParts = PhoenixRuntime.decodeColumnValues(conn, tableName.toUpperCase(), Base64.getDecoder().decode(cursorIds[i]), columns);
for (int j = 0; j < pkParts.length; j++) {
stmt.setObject(bindCounter++, pkParts[j]);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.hbase.index.covered.data;

import java.util.Comparator;

import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;

public class DelegateComparator implements CellComparator {

private CellComparator delegate;

public DelegateComparator(CellComparator delegate) {
this.delegate=delegate;
}

@Override
public int compare(Cell leftCell, Cell rightCell) {
return delegate.compare(leftCell, rightCell);
}

@Override
public int compareRows(Cell leftCell, Cell rightCell) {
return delegate.compareRows(leftCell, rightCell);
}

@Override
public int compareRows(Cell cell, byte[] bytes, int offset, int length) {
return delegate.compareRows(cell, bytes, offset, length);
}

@Override
public int compareWithoutRow(Cell leftCell, Cell rightCell) {
return delegate.compareWithoutRow(leftCell, rightCell);
}

@Override
public int compareFamilies(Cell leftCell, Cell rightCell) {
return delegate.compareFamilies(leftCell, rightCell);
}

@Override
public int compareQualifiers(Cell leftCell, Cell rightCell) {
return delegate.compareQualifiers(leftCell, rightCell);
}

@Override
public int compareTimestamps(Cell leftCell, Cell rightCell) {
return delegate.compareTimestamps(leftCell, rightCell);
}

@Override
public int compareTimestamps(long leftCellts, long rightCellts) {
return delegate.compareTimestamps(leftCellts, rightCellts);
}

@Override
public int compare(Cell leftCell, Cell rightCell, boolean ignoreSequenceid) {
return delegate.compare(leftCell, rightCell, ignoreSequenceid);
}

@Override
public Comparator getSimpleComparator() {
return delegate.getSimpleComparator();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,10 @@ public class IndexMemStore implements KeyValueStore {
private CellComparator comparator;

public IndexMemStore() {
this(new CellComparatorImpl(){
this(new DelegateComparator(new CellComparatorImpl()){
@Override
public int compare(Cell a, Cell b) {
return super.compare(a, b, true);
public int compare(Cell leftCell, Cell rightCell) {
return super.compare(leftCell, rightCell, true);
}
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.query.QueryServices;
Expand Down Expand Up @@ -68,7 +68,7 @@ public static void configurePreUpsertProcessor(Configuration conf,

@VisibleForTesting
static void setChar(Configuration conf, String confKey, char charValue) {
conf.set(confKey, Base64.encodeBytes(Character.toString(charValue).getBytes()));
conf.set(confKey, Base64.getEncoder().encodeToString(Character.toString(charValue).getBytes()));
}

@VisibleForTesting
Expand All @@ -77,7 +77,7 @@ static Character getCharacter(Configuration conf, String confKey) {
if (strValue == null) {
return null;
}
return new String(Base64.decode(strValue)).charAt(0);
return new String(Base64.getDecoder().decode(strValue)).charAt(0);
}

public static Path getOutputPath(Path outputdir, String tableName) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
Expand Down Expand Up @@ -506,14 +506,14 @@ public static ImportPreUpsertKeyValueProcessor loadPreUpsertProcessor(Configurat

public static byte[] getIndexMaintainers(final Configuration configuration){
Preconditions.checkNotNull(configuration);
return Base64.decode(configuration.get(INDEX_MAINTAINERS));
return Base64.getDecoder().decode(configuration.get(INDEX_MAINTAINERS));
}

public static void setIndexMaintainers(final Configuration configuration,
final ImmutableBytesWritable indexMetaDataPtr) {
Preconditions.checkNotNull(configuration);
Preconditions.checkNotNull(indexMetaDataPtr);
configuration.set(INDEX_MAINTAINERS, Base64.encodeBytes(indexMetaDataPtr.get()));
configuration.set(INDEX_MAINTAINERS, Base64.getEncoder().encodeToString(indexMetaDataPtr.get()));
}

public static void setDisableIndexes(Configuration configuration, String indexName) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import java.sql.Types;
import java.text.Format;

import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.util.ByteUtil;
Expand Down Expand Up @@ -131,7 +131,7 @@ public Object toObject(String value) {
if (value == null || value.length() == 0) {
return null;
}
Object object = Base64.decode(value);
Object object = Base64.getDecoder().decode(value);
if (object == null) { throw newIllegalDataException(
"Input: [" + value + "] is not base64 encoded"); }
return object;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import javax.annotation.Nullable;

import org.apache.commons.csv.CSVRecord;
import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.expression.function.EncodeFormat;
import org.apache.phoenix.query.QueryServices;
Expand Down Expand Up @@ -189,7 +189,7 @@ public Object apply(@Nullable String input) {
Object object = null;
switch (format) {
case BASE64:
object = Base64.decode(input);
object = Base64.getDecoder().decode(input);
if (object == null) { throw new IllegalDataException(
"Input: [" + input + "] is not base64 encoded"); }
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

import javax.annotation.Nullable;

import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.expression.function.EncodeFormat;
import org.apache.phoenix.query.QueryServices;
Expand Down Expand Up @@ -212,7 +212,7 @@ public Object apply(@Nullable Object input) {
Object object = null;
switch (format) {
case BASE64:
object = Base64.decode(input.toString());
object = Base64.getDecoder().decode(input.toString());
if (object == null) { throw new IllegalDataException(
"Input: [" + input + "] is not base64 encoded"); }
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ public class TestIndexMemStore {

@Test
public void testCorrectOverwritting() throws Exception {
IndexMemStore store = new IndexMemStore(new CellComparatorImpl(){
IndexMemStore store = new IndexMemStore(new DelegateComparator(new CellComparatorImpl()){
@Override
public int compare(Cell a, Cell b) {
return super.compare(a, b, true);
public int compare(Cell leftCell, Cell rightCell) {
return super.compare(leftCell, rightCell, true);
}
});
long ts = 10;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import java.util.List;
import java.util.Properties;

import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.query.BaseConnectionlessQueryTest;
import org.apache.phoenix.query.QueryServices;
Expand Down Expand Up @@ -81,7 +81,7 @@ public void tearDown() throws SQLException {
@Test
public void testExecute() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
String encodedBinaryData = Base64.encodeBytes(binaryData);
String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData);
getUpsertExecutor().execute(createRecord(123L, "NameValue", 42,
Arrays.asList(1, 2, 3), true, encodedBinaryData));

Expand Down Expand Up @@ -110,7 +110,7 @@ public void testExecute_TooFewFields() throws Exception {
@Test
public void testExecute_TooManyFields() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
String encodedBinaryData = Base64.encodeBytes(binaryData);
String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData);
R recordWithTooManyFields = createRecord(123L, "NameValue", 42, Arrays.asList(1, 2, 3),
true, encodedBinaryData, "garbage");
getUpsertExecutor().execute(recordWithTooManyFields);
Expand All @@ -131,7 +131,7 @@ public void testExecute_TooManyFields() throws Exception {
@Test
public void testExecute_NullField() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
String encodedBinaryData = Base64.encodeBytes(binaryData);
String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData);
getUpsertExecutor().execute(createRecord(123L, "NameValue", null,
Arrays.asList(1, 2, 3), false, encodedBinaryData));

Expand All @@ -151,7 +151,7 @@ public void testExecute_NullField() throws Exception {
@Test
public void testExecute_InvalidType() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
String encodedBinaryData = Base64.encodeBytes(binaryData);
String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData);
R recordWithInvalidType = createRecord(123L, "NameValue", "ThisIsNotANumber",
Arrays.asList(1, 2, 3), true, encodedBinaryData);
getUpsertExecutor().execute(recordWithInvalidType);
Expand All @@ -163,7 +163,7 @@ public void testExecute_InvalidType() throws Exception {
@Test
public void testExecute_InvalidBoolean() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
String encodedBinaryData = Base64.encodeBytes(binaryData);
String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData);
R csvRecordWithInvalidType = createRecord("123,NameValue,42,1:2:3,NotABoolean,"+encodedBinaryData);
getUpsertExecutor().execute(csvRecordWithInvalidType);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import static org.junit.Assert.fail;

import java.sql.SQLException;
import org.apache.hadoop.hbase.util.Base64;
import java.util.Base64;
import java.util.Collection;
import java.util.List;

Expand Down Expand Up @@ -201,7 +201,7 @@ public static Collection<Object[]> data() {

//Binary
byte[] bytes = new byte[] {0, 1, 2, 3};
String byteString = new String( Base64.encodeBytes(bytes) );
String byteString = new String( Base64.getEncoder().encode(bytes) );
testCases.add(new Object[] {
getDataSchema(PBinary.INSTANCE, SortOrder.getDefault()),
false,
Expand Down
2 changes: 1 addition & 1 deletion phoenix-flume/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
<parent>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>5.0.0-HBase-2.0</version>
<version>5.0.0-HBase-2.1.0-cdh6.1.1</version>
</parent>
<artifactId>phoenix-flume</artifactId>
<name>Phoenix - Flume</name>
Expand Down
2 changes: 1 addition & 1 deletion phoenix-hive/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
<parent>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>5.0.0-HBase-2.0</version>
<version>5.0.0-HBase-2.1.0-cdh6.1.1</version>
</parent>
<artifactId>phoenix-hive</artifactId>
<name>Phoenix - Hive</name>
Expand Down
2 changes: 1 addition & 1 deletion phoenix-kafka/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
<parent>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>5.0.0-HBase-2.0</version>
<version>5.0.0-HBase-2.1.0-cdh6.1.1</version>
</parent>
<artifactId>phoenix-kafka</artifactId>
<name>Phoenix - Kafka</name>
Expand Down
2 changes: 1 addition & 1 deletion phoenix-load-balancer/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
<parent>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>5.0.0-HBase-2.0</version>
<version>5.0.0-HBase-2.1.0-cdh6.1.1</version>
</parent>
<artifactId>phoenix-load-balancer</artifactId>
<name>Phoenix Load Balancer</name>
Expand Down
Loading