Skip to content

Commit

Permalink
SQL: Adjust the precision and scale for drivers (#40467)
Browse files Browse the repository at this point in the history
Fix #40357
  • Loading branch information
costin committed Mar 27, 2019
1 parent fed3580 commit 1557d77
Show file tree
Hide file tree
Showing 16 changed files with 62 additions and 62 deletions.
6 changes: 3 additions & 3 deletions docs/reference/sql/language/data-types.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ s|SQL precision
| <<number, `long`>> | long | BIGINT | 19
| <<number, `double`>> | double | DOUBLE | 15
| <<number, `float`>> | float | REAL | 7
| <<number, `half_float`>> | half_float | FLOAT | 16
| <<number, `scaled_float`>> | scaled_float | FLOAT | 19
| <<keyword, `keyword`>> | keyword | VARCHAR | based on <<ignore-above>>
| <<number, `half_float`>> | half_float | FLOAT | 3
| <<number, `scaled_float`>> | scaled_float | DOUBLE | 15
| <<keyword, `keyword`>> | keyword | VARCHAR | 32,766
| <<text, `text`>> | text | VARCHAR | 2,147,483,647
| <<binary, `binary`>> | binary | VARBINARY | 2,147,483,647
| <<date, `date`>> | datetime | TIMESTAMP | 24
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@
import java.util.Map;
import java.util.stream.Collectors;

import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.SQL_QUERY_REST_ENDPOINT;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.columnInfo;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.mode;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.randomMode;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.SQL_QUERY_REST_ENDPOINT;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
Expand Down Expand Up @@ -101,9 +101,9 @@ public void expectDescribe(Map<String, List<String>> columns, String user) throw
String mode = randomMode();
Map<String, Object> expected = new HashMap<>(3);
expected.put("columns", Arrays.asList(
columnInfo(mode, "column", "keyword", JDBCType.VARCHAR, 0),
columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 0),
columnInfo(mode, "mapping", "keyword", JDBCType.VARCHAR, 0)));
columnInfo(mode, "column", "keyword", JDBCType.VARCHAR, 32766),
columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 32766),
columnInfo(mode, "mapping", "keyword", JDBCType.VARCHAR, 32766)));
List<List<String>> rows = new ArrayList<>(columns.size());
for (Map.Entry<String, List<String>> column : columns.entrySet()) {
List<String> cols = new ArrayList<>();
Expand All @@ -120,8 +120,8 @@ public void expectDescribe(Map<String, List<String>> columns, String user) throw
public void expectShowTables(List<String> tables, String user) throws Exception {
String mode = randomMode();
List<Object> columns = new ArrayList<>();
columns.add(columnInfo(mode, "name", "keyword", JDBCType.VARCHAR, 0));
columns.add(columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 0));
columns.add(columnInfo(mode, "name", "keyword", JDBCType.VARCHAR, 32766));
columns.add(columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 32766));
Map<String, Object> expected = new HashMap<>();
expected.put("columns", columns);
List<List<String>> rows = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@
import java.util.List;
import java.util.Map;

import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.SQL_QUERY_REST_ENDPOINT;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.columnInfo;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.mode;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.randomMode;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.SQL_QUERY_REST_ENDPOINT;

public class UserFunctionIT extends ESRestTestCase {

Expand Down Expand Up @@ -81,7 +81,7 @@ public void testSingleRandomUser() throws IOException {

Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 0)));
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName)));
Map<String, Object> actual = runSql(randomUserName, mode, SQL);

Expand All @@ -97,7 +97,7 @@ public void testSingleRandomUserWithWhereEvaluatingTrue() throws IOException {

Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 0)));
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName),
Arrays.asList(randomUserName),
Arrays.asList(randomUserName)));
Expand All @@ -114,7 +114,7 @@ public void testSingleRandomUserWithWhereEvaluatingFalse() throws IOException {

Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 0)));
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Collections.<ArrayList<String>>emptyList());
String anotherRandomUserName = randomValueOtherThan(randomUserName, () -> randomAlphaOfLengthBetween(1, 15));
Map<String, Object> actual = runSql(randomUserName, mode, SQL + " FROM test WHERE USER()='" + anotherRandomUserName + "' LIMIT 3");
Expand All @@ -129,7 +129,7 @@ public void testMultipleRandomUsersAccess() throws IOException {
Map<String, Object> expected = new HashMap<>();

expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 0)));
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomlyPickedUsername)));
Map<String, Object> actual = runSql(randomlyPickedUsername, mode, SQL);

Expand All @@ -147,7 +147,7 @@ public void testSingleUserSelectFromIndex() throws IOException {

Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 0)));
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName),
Arrays.asList(randomUserName),
Arrays.asList(randomUserName)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@
import java.util.Locale;
import java.util.Map;

import static org.elasticsearch.xpack.sql.proto.Mode.CLI;
import static org.elasticsearch.xpack.sql.proto.Protocol.SQL_QUERY_REST_ENDPOINT;
import static org.elasticsearch.xpack.sql.proto.RequestInfo.CLIENT_IDS;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.mode;
import static org.elasticsearch.xpack.sql.proto.Mode.CLI;

public abstract class SqlProtocolTestCase extends ESRestTestCase {

Expand Down Expand Up @@ -62,7 +62,7 @@ public void testNumericTypes() throws IOException {
}

public void testTextualType() throws IOException {
assertQuery("SELECT 'abc123'", "'abc123'", "keyword", "abc123", 0);
assertQuery("SELECT 'abc123'", "'abc123'", "keyword", "abc123", 32766);
}

public void testDateTimes() throws IOException {
Expand Down Expand Up @@ -141,7 +141,7 @@ private void assertQuery(String sql, String columnName, String columnType, Objec
List<Object> row = (ArrayList<Object>) rows.get(0);
assertEquals(1, row.size());

// from xcontent we can get float or double, depending on the conversion
// from xcontent we can get float or double, depending on the conversion
// method of the specific xcontent format implementation
if (columnValue instanceof Float && row.get(0) instanceof Double) {
assertEquals(columnValue, (float)((Number) row.get(0)).doubleValue());
Expand Down Expand Up @@ -209,7 +209,7 @@ private Map<String, Object> runSql(String mode, String sql, boolean columnar) th
return XContentHelper.convertToMap(SmileXContent.smileXContent, content, false);
}
default:
return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false);
return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public void testBasicQuery() throws IOException {
String mode = randomMode();
boolean columnar = randomBoolean();

expected.put("columns", singletonList(columnInfo(mode, "test", "text", JDBCType.VARCHAR, 0)));
expected.put("columns", singletonList(columnInfo(mode, "test", "text", JDBCType.VARCHAR, Integer.MAX_VALUE)));
if (columnar) {
expected.put("values", singletonList(Arrays.asList("test", "test")));
} else {
Expand Down Expand Up @@ -118,7 +118,7 @@ public void testNextPage() throws IOException {
Map<String, Object> expected = new HashMap<>();
if (i == 0) {
expected.put("columns", Arrays.asList(
columnInfo(mode, "text", "text", JDBCType.VARCHAR, 0),
columnInfo(mode, "text", "text", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo(mode, "number", "long", JDBCType.BIGINT, 20),
columnInfo(mode, "s", "double", JDBCType.DOUBLE, 25),
columnInfo(mode, "SCORE()", "float", JDBCType.REAL, 15)));
Expand Down Expand Up @@ -184,7 +184,7 @@ public void testScoreWithFieldNamedScore() throws IOException {
Map<String, Object> expected = new HashMap<>();
boolean columnar = randomBoolean();
expected.put("columns", Arrays.asList(
columnInfo(mode, "name", "text", JDBCType.VARCHAR, 0),
columnInfo(mode, "name", "text", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo(mode, "score", "long", JDBCType.BIGINT, 20),
columnInfo(mode, "SCORE()", "float", JDBCType.REAL, 15)));
if (columnar) {
Expand Down Expand Up @@ -427,7 +427,7 @@ public void testBasicQueryWithFilter() throws IOException {
"{\"test\":\"bar\"}");

Map<String, Object> expected = new HashMap<>();
expected.put("columns", singletonList(columnInfo(mode, "test", "text", JDBCType.VARCHAR, 0)));
expected.put("columns", singletonList(columnInfo(mode, "test", "text", JDBCType.VARCHAR, Integer.MAX_VALUE)));
expected.put("rows", singletonList(singletonList("foo")));
assertResponse(expected, runSql(new StringEntity("{\"query\":\"SELECT * FROM test\", " +
"\"filter\":{\"match\": {\"test\": \"foo\"}}" + mode(mode) + "}",
Expand All @@ -442,7 +442,7 @@ public void testBasicQueryWithParameters() throws IOException {

Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "test", "text", JDBCType.VARCHAR, 0),
columnInfo(mode, "test", "text", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo(mode, "param", "integer", JDBCType.INTEGER, 11)
));
if (columnar) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ CREATE TABLE mock (
IS_AUTOINCREMENT VARCHAR,
IS_GENERATEDCOLUMN VARCHAR
) AS
SELECT null, 'test1', 'name', 12, 'TEXT', 0, 2147483647, null, null,
SELECT null, 'test1', 'name', 12, 'TEXT', 2147483647, 2147483647, null, null,
1, -- columnNullable
null, null, 12, 0, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
UNION ALL
SELECT null, 'test1', 'name.keyword', 12, 'KEYWORD', 0, 2147483647, null, null,
SELECT null, 'test1', 'name.keyword', 12, 'KEYWORD', 32766, 2147483647, null, null,
1, -- columnNullable
null, null, 12, 0, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public boolean equals(Object o) {
return false;
}
ColumnInfo that = (ColumnInfo) o;
return displaySize == that.displaySize &&
return Objects.equals(displaySize, that.displaySize) &&
Objects.equals(table, that.table) &&
Objects.equals(name, that.name) &&
Objects.equals(esType, that.esType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,7 @@ public final void execute(SqlSession session, ActionListener<SchemaRowSet> liste
.sorted(Comparator.comparing((DataType t) -> t.sqlType.getVendorTypeNumber()).thenComparing(DataType::sqlName))
.map(t -> asList(t.toString(),
t.sqlType.getVendorTypeNumber(),
//https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/column-size?view=sql-server-2017
t.defaultPrecision,
DataTypes.precision(t),
"'",
"'",
null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,19 +36,19 @@ public enum DataType {
DOUBLE( "double", JDBCType.DOUBLE, Double.BYTES, 15, 25, false, true, true),
// 24 bits defaultPrecision - 24*log10(2) =~ 7 (7.22)
FLOAT( "float", JDBCType.REAL, Float.BYTES, 7, 15, false, true, true),
HALF_FLOAT( "half_float", JDBCType.FLOAT, Double.BYTES, 16, 25, false, true, true),
HALF_FLOAT( "half_float", JDBCType.FLOAT, Float.BYTES, 3, 25, false, true, true),
// precision is based on long
SCALED_FLOAT( "scaled_float", JDBCType.FLOAT, Double.BYTES, 19, 25, false, true, true),
KEYWORD( "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE, 256, 0, false, false, true),
TEXT( "text", JDBCType.VARCHAR, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false),
SCALED_FLOAT( "scaled_float", JDBCType.DOUBLE, Long.BYTES, 15, 25, false, true, true),
KEYWORD( "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE, 32766, 32766, false, false, true),
TEXT( "text", JDBCType.VARCHAR, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, false, false, false),
OBJECT( "object", JDBCType.STRUCT, -1, 0, 0, false, false, false),
NESTED( "nested", JDBCType.STRUCT, -1, 0, 0, false, false, false),
BINARY( "binary", JDBCType.VARBINARY, -1, Integer.MAX_VALUE, 0, false, false, false),
BINARY( "binary", JDBCType.VARBINARY, -1, Integer.MAX_VALUE, Integer.MAX_VALUE, false, false, false),
DATE( JDBCType.DATE, Long.BYTES, 24, 24, false, false, true),
// since ODBC and JDBC interpret precision for Date as display size
// the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone)
// see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288
DATETIME( "date", JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true),
DATETIME( "date", JDBCType.TIMESTAMP, Long.BYTES, 3, 24, false, false, true),
//
// specialized types
//
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ public static Integer metaSqlDataType(DataType t) {
}

// https://github.com/elastic/elasticsearch/issues/30386
// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function?view=sql-server-2017
// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function
public static Integer metaSqlDateTimeSub(DataType t) {
if (t == DATETIME) {
// ODBC SQL_CODE_TIMESTAMP
Expand All @@ -185,42 +185,44 @@ public static Integer metaSqlDateTimeSub(DataType t) {
return 0;
}

// https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/decimal-digits?view=sql-server-2017
public static Short metaSqlMinimumScale(DataType t) {
// TODO: return info for HALF/SCALED_FLOATS (should be based on field not type)
if (t == DATETIME) {
return Short.valueOf((short) 3);
}
if (t.isInteger()) {
return Short.valueOf((short) 0);
}
// minimum scale?
if (t.isRational()) {
return Short.valueOf((short) 0);
}
return null;
return metaSqlSameScale(t);
}

public static Short metaSqlMaximumScale(DataType t) {
// TODO: return info for HALF/SCALED_FLOATS (should be based on field not type)
if (t == DATETIME) {
return Short.valueOf((short) 3);
}
return metaSqlSameScale(t);
}

// https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/decimal-digits
// https://github.com/elastic/elasticsearch/issues/40357
// since the scale is fixed, minimum and maximum should return the same value
// hence why this method exists
private static Short metaSqlSameScale(DataType t) {
// TODO: return info for SCALED_FLOATS (should be based on field not type)
if (t.isInteger()) {
return Short.valueOf((short) 0);
}
if (t.isRational()) {
if (t.isDateBased() || t.isRational()) {
return Short.valueOf((short) t.defaultPrecision);
}
return null;
}

// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function?view=sql-server-2017
// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function
public static Integer metaSqlRadix(DataType t) {
// RADIX - Determines how numbers returned by COLUMN_SIZE and DECIMAL_DIGITS should be interpreted.
// 10 means they represent the number of decimal digits allowed for the column.
// 2 means they represent the number of bits allowed for the column.
// null means radix is not applicable for the given type.
return t.isInteger() ? Integer.valueOf(10) : (t.isRational() ? Integer.valueOf(2) : null);
}

//https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function#comments
//https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/column-size
public static Integer precision(DataType t) {
if (t.isNumeric()) {
return t.defaultPrecision;
}
return t.displaySize;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public void testSqlAction() {
assertThat(response.columns(), hasSize(2));
int dataIndex = dataBeforeCount ? 0 : 1;
int countIndex = dataBeforeCount ? 1 : 0;
assertEquals(new ColumnInfo("", "data", "text", 0), response.columns().get(dataIndex));
assertEquals(new ColumnInfo("", "data", "text", 2147483647), response.columns().get(dataIndex));
assertEquals(new ColumnInfo("", "count", "long", 20), response.columns().get(countIndex));

assertThat(response.rows(), hasSize(2));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ private Tuple<Command, SqlSession> sql(String sql) {
public void testSysTypes() throws Exception {
Command cmd = sql("SYS TYPES").v1();

List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE",
List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "FLOAT", "DOUBLE", "SCALED_FLOAT",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", "DATETIME",
"INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND",
"INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND",
Expand Down
Loading

0 comments on commit 1557d77

Please sign in to comment.