Skip to content

Commit

Permalink
PHOENIX-1739 Ensure RuntimeException nests SQLException so that prope…
Browse files Browse the repository at this point in the history
…r exception gets thrown
  • Loading branch information
jtaylor-sfdc committed Mar 17, 2015
1 parent e14fd32 commit 86ed110
Show file tree
Hide file tree
Showing 16 changed files with 62 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,14 @@

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;

import org.apache.phoenix.schema.IllegalDataException;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.junit.Test;

/**
Expand Down Expand Up @@ -115,7 +117,7 @@ public void nullInSecondTimezoneParameter() throws Exception {
assertTrue(rs.wasNull());
}

@Test(expected=IllegalDataException.class)
@Test
public void unknownTimezone() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS TIMEZONE_OFFSET_TEST (k1 INTEGER NOT NULL, dates DATE CONSTRAINT pk PRIMARY KEY (k1))";
Expand All @@ -124,11 +126,15 @@ public void unknownTimezone() throws Exception {
conn.createStatement().execute(dml);
conn.commit();

ResultSet rs = conn.createStatement().executeQuery(
"SELECT k1, dates, CONVERT_TZ(dates, 'UNKNOWN_TIMEZONE', 'America/Adak') FROM TIMEZONE_OFFSET_TEST");

rs.next();

rs.getDate(3).getTime();
try {
ResultSet rs = conn.createStatement().executeQuery(
"SELECT k1, dates, CONVERT_TZ(dates, 'UNKNOWN_TIMEZONE', 'America/Adak') FROM TIMEZONE_OFFSET_TEST");

rs.next();
rs.getDate(3).getTime();
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.ILLEGAL_DATA.getErrorCode(), e.getErrorCode());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,7 @@
import java.util.Collections;
import java.util.List;

import org.apache.phoenix.expression.function.EncodeFunction;
import org.apache.phoenix.schema.IllegalDataException;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.util.TestUtil;
import org.junit.Test;

Expand Down Expand Up @@ -120,8 +119,8 @@ public void testNullEncodingType() throws Exception {
try {
conn.createStatement().executeQuery("SELECT * FROM TEST_TABLE WHERE pk = ENCODE(1, NULL)");
fail();
} catch (IllegalDataException e) {
assertEquals("Unexpected exception message", e.getMessage(), EncodeFunction.getMissingEncodeFormatMsg());
} catch (SQLException e) {
assertEquals(SQLExceptionCode.ILLEGAL_DATA.getErrorCode(), e.getErrorCode());
}
}

Expand All @@ -134,9 +133,8 @@ public void testUnsupportedEncodingType() throws Exception {
try {
conn.createStatement().executeQuery("SELECT * FROM TEST_TABLE WHERE pk = ENCODE(1, 'HEX')");
fail();
} catch (IllegalDataException e) {
assertEquals("Unexpected exception message", e.getMessage(),
EncodeFunction.getUnsupportedEncodeFormatMsg("HEX"));
} catch (SQLException e) {
assertEquals(SQLExceptionCode.ILLEGAL_DATA.getErrorCode(), e.getErrorCode());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,12 @@
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.TimeZone;

import org.apache.phoenix.schema.IllegalDataException;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.junit.Test;


Expand Down Expand Up @@ -74,12 +75,9 @@ public void testUnknownTimezone() throws Exception {
rs.next();
assertEquals(0, rs.getInt(3));
fail();
} catch (IllegalDataException e) {
assertTrue(true);
return;
}
fail();

} catch (SQLException e) {
assertEquals(SQLExceptionCode.ILLEGAL_DATA.getErrorCode(), e.getErrorCode());
}
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -884,8 +884,8 @@ public void testMissingPKColumn() throws Exception {
try {
stmt.execute("upsert into PTSDB(INST,HOST,VAL) VALUES ('abc', 'abc-def-ghi', 0.5)");
fail();
} catch (ConstraintViolationException e) {
assertTrue(e.getMessage().contains("may not be null"));
} catch (SQLException e) {
assertEquals(SQLExceptionCode.CONSTRAINT_VIOLATION.getErrorCode(), e.getErrorCode());
} finally {
conn.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.WritableUtils;
import org.apache.phoenix.coprocessor.generated.PTableProtos;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.execute.TupleProjector;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.expression.ExpressionType;
Expand Down Expand Up @@ -322,7 +322,7 @@ protected RegionScanner doPostScannerOpen(final ObserverContext<RegionCoprocesso
column.getDataType(), expression.getMaxLength(),
expression.getScale(), column.getMaxLength(),
column.getScale())) {
throw new ValueTypeIncompatibleException(
throw new DataExceedsCapacityException(
column.getDataType(), column.getMaxLength(),
column.getScale());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,16 @@
import org.apache.phoenix.schema.types.PDataType;


public class ValueTypeIncompatibleException extends IllegalDataException {
public class DataExceedsCapacityException extends IllegalDataException {
private static final long serialVersionUID = 1L;
private static SQLExceptionCode code = SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY;

public ValueTypeIncompatibleException(PDataType type, Integer precision, Integer scale) {
super(new SQLExceptionInfo.Builder(code).setMessage(getTypeDisplayString(type, precision, scale))
public DataExceedsCapacityException(String message) {
super(new SQLExceptionInfo.Builder(
SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY).setMessage(message).build().buildException());
}

public DataExceedsCapacityException(PDataType type, Integer precision, Integer scale) {
super(new SQLExceptionInfo.Builder(SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY).setMessage(getTypeDisplayString(type, precision, scale))
.build().buildException());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ public SQLException newException(SQLExceptionInfo info) {
SINGLE_ROW_SUBQUERY_RETURNS_MULTIPLE_ROWS(215, "22015", "Single-row sub-query returns more than one row."),
SUBQUERY_RETURNS_DIFFERENT_NUMBER_OF_FIELDS(216, "22016", "Sub-query must return the same number of fields as the left-hand-side expression of 'IN'."),
AMBIGUOUS_JOIN_CONDITION(217, "22017", "Amibiguous or non-equi join condition specified. Consider using table list with where clause."),
CONSTRAINT_VIOLATION(218, "22018", "Constraint violatioin."),

/**
* Constraint Violation (errorcode 03, sqlstate 23)
Expand Down Expand Up @@ -313,7 +314,7 @@ public SQLException newException(SQLExceptionInfo info) {
return new SQLTimeoutException(OPERATION_TIMED_OUT.getMessage(),
OPERATION_TIMED_OUT.getSQLState(), OPERATION_TIMED_OUT.getErrorCode());
}
})
}),
;

private final int errorCode;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.util.List;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
Expand Down Expand Up @@ -74,7 +74,7 @@ public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
result = NumberUtil.setDecimalWidthAndScale(result, maxLength, scale);
}
if (result == null) {
throw new ValueTypeIncompatibleException(PDecimal.INSTANCE, maxLength, scale);
throw new DataExceedsCapacityException(PDecimal.INSTANCE, maxLength, scale);
}
ptr.set(PDecimal.INSTANCE.toBytes(result));
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.util.List;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
Expand Down Expand Up @@ -64,7 +64,7 @@ public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
result = NumberUtil.setDecimalWidthAndScale(result, getMaxLength(), getScale());
}
if (result == null) {
throw new ValueTypeIncompatibleException(PDecimal.INSTANCE, getMaxLength(), getScale());
throw new DataExceedsCapacityException(PDecimal.INSTANCE, getMaxLength(), getScale());
}
ptr.set(PDecimal.INSTANCE.toBytes(result));
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.util.List;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
Expand Down Expand Up @@ -64,7 +64,7 @@ public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
result = NumberUtil.setDecimalWidthAndScale(result, getMaxLength(), getScale());
}
if (result == null) {
throw new ValueTypeIncompatibleException(PDecimal.INSTANCE, getMaxLength(), getScale());
throw new DataExceedsCapacityException(PDecimal.INSTANCE, getMaxLength(), getScale());
}
ptr.set(PDecimal.INSTANCE.toBytes(result));
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.util.List;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
Expand Down Expand Up @@ -92,7 +92,7 @@ public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
result = NumberUtil.setDecimalWidthAndScale(result, maxLength, scale);
}
if (result == null) {
throw new ValueTypeIncompatibleException(PDecimal.INSTANCE, maxLength, scale);
throw new DataExceedsCapacityException(PDecimal.INSTANCE, maxLength, scale);
}
ptr.set(PDecimal.INSTANCE.toBytes(result));
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public ConstraintViolationException() {

public ConstraintViolationException(String message) {
super(new SQLExceptionInfo.Builder(
SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY).setMessage(message).build().buildException());
SQLExceptionCode.CONSTRAINT_VIOLATION).setMessage(message).build().buildException());
}

public ConstraintViolationException(Throwable cause) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import org.apache.phoenix.coprocessor.generated.PGuidePostsProtos;
import org.apache.phoenix.coprocessor.generated.PGuidePostsProtos.PGuidePosts;
import org.apache.phoenix.coprocessor.generated.PTableProtos;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
import org.apache.phoenix.index.IndexMaintainer;
Expand Down Expand Up @@ -521,7 +522,7 @@ public int newKey(ImmutableBytesWritable key, byte[][] values) {
if (maxLength != null && type.isFixedWidth() && byteValue.length <= maxLength) {
byteValue = StringUtil.padChar(byteValue, maxLength);
} else if (maxLength != null && byteValue.length > maxLength) {
throw new ConstraintViolationException(name.getString() + "." + column.getName().getString() + " may not exceed " + maxLength + " bytes (" + SchemaUtil.toString(type, byteValue) + ")");
throw new DataExceedsCapacityException(name.getString() + "." + column.getName().getString() + " may not exceed " + maxLength + " bytes (" + SchemaUtil.toString(type, byteValue) + ")");
}
os.write(byteValue, 0, byteValue.length);
}
Expand Down Expand Up @@ -704,7 +705,7 @@ public void setValue(PColumn column, byte[] byteValue) {
if (ptr.getLength() <= maxLength) {
type.pad(ptr, maxLength);
} else if (ptr.getLength() > maxLength) {
throw new ConstraintViolationException(name.getString() + "." + column.getName().getString() + " may not exceed " + maxLength + " bytes (" + type.toObject(byteValue) + ")");
throw new DataExceedsCapacityException(name.getString() + "." + column.getName().getString() + " may not exceed " + maxLength + " bytes (" + type.toObject(byteValue) + ")");
}
}
removeIfPresent(unsetValues, family, qualifier);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.schema.SortOrder;

public class PBinary extends PDataType<byte[]> {
Expand Down Expand Up @@ -54,7 +54,7 @@ public Object pad(Object object, Integer maxLength) {
return object;
}
if (b.length > maxLength) {
throw new ValueTypeIncompatibleException(this, maxLength, null);
throw new DataExceedsCapacityException(this, maxLength, null);
}
byte[] newBytes = new byte[maxLength];
System.arraycopy(b, 0, newBytes, 0, b.length);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.util.StringUtil;

Expand Down Expand Up @@ -61,7 +61,7 @@ public Object pad(Object object, Integer maxLength) {
return object;
}
if (s.length() > maxLength) {
throw new ValueTypeIncompatibleException(this,maxLength,null);
throw new DataExceedsCapacityException(this,maxLength,null);
}
return Strings.padEnd(s, maxLength, ' ');
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import java.util.List;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.exception.ValueTypeIncompatibleException;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.expression.function.RandomFunction;
import org.apache.phoenix.expression.visitor.CloneExpressionVisitor;
import org.apache.phoenix.schema.types.PDataType;
Expand Down Expand Up @@ -68,7 +68,7 @@ public void testDecimalAddition() throws Exception {
try {
e.evaluate(null, new ImmutableBytesWritable());
fail("Evaluation should have failed");
} catch (ValueTypeIncompatibleException ex) {
} catch (DataExceedsCapacityException ex) {
}

// Pass since we roll out imposing precisioin and scale.
Expand All @@ -87,7 +87,7 @@ public void testDecimalAddition() throws Exception {
try {
e.evaluate(null, new ImmutableBytesWritable());
fail("Evaluation should have failed");
} catch (ValueTypeIncompatibleException ex) {
} catch (DataExceedsCapacityException ex) {
}

// Decimal with no precision and scale.
Expand Down Expand Up @@ -141,7 +141,7 @@ public void testDecimalSubtraction() throws Exception {
try {
e.evaluate(null, new ImmutableBytesWritable());
fail("Evaluation should have failed");
} catch (ValueTypeIncompatibleException ex) {
} catch (DataExceedsCapacityException ex) {
}

// Pass since we roll up precision and scale imposing.
Expand All @@ -160,7 +160,7 @@ public void testDecimalSubtraction() throws Exception {
try {
e.evaluate(null, new ImmutableBytesWritable());
fail("Evaluation should have failed");
} catch (ValueTypeIncompatibleException ex) {
} catch (DataExceedsCapacityException ex) {
}

// Decimal with no precision and scale.
Expand Down Expand Up @@ -194,7 +194,7 @@ public void testDecimalMultiplication() throws Exception {
try {
e.evaluate(null, new ImmutableBytesWritable());
fail("Evaluation should have failed");
} catch (ValueTypeIncompatibleException ex) {
} catch (DataExceedsCapacityException ex) {
}

// Values exceeds scale.
Expand All @@ -205,7 +205,7 @@ public void testDecimalMultiplication() throws Exception {
try {
e.evaluate(null, new ImmutableBytesWritable());
fail("Evaluation should have failed");
} catch (ValueTypeIncompatibleException ex) {
} catch (DataExceedsCapacityException ex) {
}

// Decimal with no precision and scale.
Expand Down Expand Up @@ -243,7 +243,7 @@ public void testDecimalDivision() throws Exception {
try {
e.evaluate(null, new ImmutableBytesWritable());
fail("Evaluation should have failed");
} catch (ValueTypeIncompatibleException ex) {
} catch (DataExceedsCapacityException ex) {
}

// Decimal with no precision and scale.
Expand Down

0 comments on commit 86ed110

Please sign in to comment.