Skip to content

Commit

Permalink
JDBC-552 Implement DatabaseMetaData.getFunctionColumns
Browse files Browse the repository at this point in the history
  • Loading branch information
mrotteveel committed Sep 8, 2019
1 parent 7d7db6a commit 62ed11f
Show file tree
Hide file tree
Showing 16 changed files with 1,623 additions and 211 deletions.
2 changes: 1 addition & 1 deletion devdoc/jdp/jdp-2019-05-database-metadata-implementation.md
Expand Up @@ -85,7 +85,7 @@ Jaybird 5 (and maybe Jaybird 6).

For Jaybird 4, the implementation of `getFunctions` and `getFunctionColumns` is
used as a testbed for the result set producing metadata methods. In addition,
one or two simple metadata methods will be moved to FirebirdVersionMetaData.
one or two simple metadata methods will be moved to `FirebirdVersionMetaData`.

For Jaybird 5, the remaining result set producing metadata methods and version
dependent simple metadata methods will be moved. This should not be done
Expand Down
26 changes: 26 additions & 0 deletions src/documentation/release_notes.md
Expand Up @@ -22,6 +22,8 @@ The following has been changed or fixed since Jaybird 4.0.0-beta-1

- New feature: support for `DatabaseMetaData.getFunctions` ([JDBC-552](http://tracker.firebirdsql.org/browse/JDBC-552)) \
See also [JDBC DatabaseMetaData.getFunctions implemented].
- New feature: support for `DatabaseMetaData.getFunctionColumns` ([JDBC-552](http://tracker.firebirdsql.org/browse/JDBC-552)) \
See also [JDBC DatabaseMetaData.getFunctionColumns implemented].
- Fixed: Connection property `defaultIsolation`/`isolation` did not work
through `DriverManager`, but only on `DataSource` implementations. ([JDBC-584](http://tracker.firebirdsql.org/browse/JDBC-584))
- Fixed: attempts to use a blob after it was freed or after transaction end
Expand Down Expand Up @@ -93,6 +95,7 @@ The main new features are:
- [JDBC DatabaseMetaData.getPseudoColumns implemented]
- [JDBC DatabaseMetaData.getVersionColumns implemented]
- [JDBC DatabaseMetaData.getFunctions implemented] (since Jaybird 4.0.0-beta-2)
- [JDBC DatabaseMetaData.getFunctionColumns implemented] (since Jaybird 4.0.0-beta-2)
- [Improved JDBC function escape support]
- [New JDBC protocol prefix jdbc:firebird:]
- [Generated keys support improvements]
Expand Down Expand Up @@ -1243,6 +1246,29 @@ or `"UDR"` (Firebird 3+)
- `JB_ENGINE_NAME` - Value of `RDB$ENGINE_NAME`, is `null for UDF and PSQL
functions

JDBC DatabaseMetaData.getFunctionColumns implemented
----------------------------------------------------

The `DatabaseMetaData.getFunctionColumns` method has now been implemented.

The JDBC API specifies this method as:

> Retrieves a description of the given catalog's system or user function
> parameters and return type.
The implementation only returns columns of functions that are available from
the `RDB$FUNCTIONS` table. This means that the built-in functions are not
included in the result of this method.

For Firebird 3 and higher, the result includes native UDF, PSQL and UDR
functions. The result does not include functions defined in packages as JDBC
does not provide support for packages.

Where Firebird provides no column name, Jaybird generates one by combining
the string `PARAM_` with the value of `RDB$ARGUMENT_POSITION`. Names are not
available for the parameters of legacy UDF functions, and for the return value
of any function.

Improved JDBC function escape support
-------------------------------------

Expand Down
183 changes: 25 additions & 158 deletions src/main/org/firebirdsql/jdbc/FBDatabaseMetaData.java
Expand Up @@ -31,7 +31,6 @@
import org.firebirdsql.jaybird.Version;
import org.firebirdsql.jca.FBManagedConnectionFactory;
import org.firebirdsql.jdbc.escape.FBEscapedFunctionHelper;
import org.firebirdsql.jdbc.field.JdbcTypeConverter;
import org.firebirdsql.jdbc.metadata.*;
import org.firebirdsql.logging.Logger;
import org.firebirdsql.logging.LoggerFactory;
Expand All @@ -45,6 +44,9 @@
import java.util.*;

import static org.firebirdsql.gds.ISCConstants.*;
import static org.firebirdsql.jdbc.metadata.FbMetadataConstants.*;
import static org.firebirdsql.jdbc.metadata.TypeMetadata.getDataType;
import static org.firebirdsql.jdbc.metadata.TypeMetadata.getDataTypeName;
import static org.firebirdsql.util.FirebirdSupportInfo.supportInfoFor;

/**
Expand All @@ -67,9 +69,6 @@ public class FBDatabaseMetaData implements FirebirdDatabaseMetaData {
private static final String OBJECT_NAME_TYPE = "varchar(" + OBJECT_NAME_LENGTH + ")";
private static final String OBJECT_NAME_PARAMETER = "cast(? as varchar(" + OBJECT_NAME_PARAMETER_LENGTH + ")) ";

private static final int SUBTYPE_NUMERIC = 1;
private static final int SUBTYPE_DECIMAL = 2;

protected static final DatatypeCoder datatypeCoder =
DefaultDatatypeCoder.forEncodingFactory(EncodingFactory.createInstance(StandardCharsets.UTF_8));

Expand Down Expand Up @@ -102,8 +101,6 @@ public class FBDatabaseMetaData implements FirebirdDatabaseMetaData {
private static final byte[] PROCEDURE_NULLABLE = createShort(DatabaseMetaData.procedureNullable);
private static final byte[] PROCEDURE_COLUMN_IN = createShort(DatabaseMetaData.procedureColumnIn);
private static final byte[] PROCEDURE_COLUMN_OUT = createShort(DatabaseMetaData.procedureColumnOut);
private static final byte[] FLOAT_PRECISION = createInt(7);
private static final byte[] DOUBLE_PRECISION = createInt(15);
private static final byte[] BIGINT_PRECISION = createInt(19);
private static final byte[] INTEGER_PRECISION = createInt(10);
private static final byte[] SMALLINT_PRECISION = createInt(5);
Expand All @@ -125,8 +122,6 @@ public class FBDatabaseMetaData implements FirebirdDatabaseMetaData {
private static final byte[] TABLE_INDEX_OTHER = createShort(DatabaseMetaData.tableIndexOther);
private static final byte[] ASC_BYTES = getBytes("A");
private static final byte[] DESC_BYTES = getBytes("D");
private static final int FLOAT_BINARY_PRECISION = 24;
private static final int DOUBLE_BINARY_PRECISION = 53;

private GDSHelper gdsHelper;
private FBConnection connection;
Expand All @@ -135,7 +130,6 @@ public class FBDatabaseMetaData implements FirebirdDatabaseMetaData {
private static final int STATEMENT_CACHE_SIZE = 12;
private final Map<String, FBPreparedStatement> statements = new LruPreparedStatementCache(STATEMENT_CACHE_SIZE);
private final FirebirdVersionMetaData versionMetaData;
private final DbMetadataMediator dbMetadataMediator = new DbMetadataMediatorImpl();

protected FBDatabaseMetaData(FBConnection c) throws SQLException {
this.gdsHelper = c.getGDSHelper();
Expand Down Expand Up @@ -1385,7 +1379,7 @@ public ResultSet getProcedureColumns(String catalog, String schemaPattern, Strin
.at(7).set(createInt(FLOAT_BINARY_PRECISION))
.at(10).set(RADIX_BINARY_SHORT);
} else {
valueBuilder.at(7).set(FLOAT_PRECISION);
valueBuilder.at(7).set(createInt(FLOAT_DECIMAL_PRECISION));
}
break;
case Types.DOUBLE:
Expand All @@ -1394,7 +1388,7 @@ public ResultSet getProcedureColumns(String catalog, String schemaPattern, Strin
.at(7).set(createInt(DOUBLE_BINARY_PRECISION))
.at(10).set(RADIX_BINARY_SHORT);
} else {
valueBuilder.at(7).set(DOUBLE_PRECISION);
valueBuilder.at(7).set(createInt(DOUBLE_DECIMAL_PRECISION));
}
break;
case Types.BIGINT:
Expand Down Expand Up @@ -1885,7 +1879,7 @@ public ResultSet getColumns(String catalog, String schemaPattern, String tableNa
.at(6).set(createInt(FLOAT_BINARY_PRECISION))
.at(9).set(RADIX_BINARY);
} else {
valueBuilder.at(6).set(FLOAT_PRECISION);
valueBuilder.at(6).set(createInt(FLOAT_DECIMAL_PRECISION));
}
break;
case Types.DOUBLE:
Expand All @@ -1894,7 +1888,7 @@ public ResultSet getColumns(String catalog, String schemaPattern, String tableNa
.at(6).set(createInt(DOUBLE_BINARY_PRECISION))
.at(9).set(RADIX_BINARY);
} else {
valueBuilder.at(6).set(DOUBLE_PRECISION);
valueBuilder.at(6).set(createInt(DOUBLE_DECIMAL_PRECISION));
}
break;
case Types.BIGINT:
Expand Down Expand Up @@ -2034,128 +2028,6 @@ private String getScopeCatalogColumnName() {
return scopeCatalog;
}

// TODO Duplicates JdbcTypeConverter (and probably BlrConstants)
private static final int smallint_type = 7;
private static final int integer_type = 8;
private static final int quad_type = 9;
private static final int float_type = 10;
private static final int d_float_type = 11;
private static final int date_type = 12;
private static final int time_type = 13;
private static final int char_type = 14;
private static final int int64_type = 16;
private static final int dec16_type = 24;
private static final int dec34_type = 25;
private static final int dec_fixed_type = 26;
private static final int double_type = 27;
private static final int timestamp_type = 35;
private static final int varchar_type = 37;
// private static final int cstring_type = 40;
private static final int blob_type = 261;
private static final int boolean_type = 23;
private static final int time_tz_type = 28;
private static final int timestamp_tz_type = 29;

private static int getDataType(int fieldType, int fieldSubType, int fieldScale, int characterSetId) {
// TODO Preserved for backwards compatibility, is this really necessary?
if (fieldType == blob_type && fieldSubType > 1) {
return Types.OTHER;
}
final int jdbcType = JdbcTypeConverter.fromMetaDataToJdbcType(fieldType, fieldSubType, fieldScale);
// Metadata from RDB$ tables does not contain character set in subtype, manual fixup
if (characterSetId == CS_BINARY) {
if (jdbcType == Types.CHAR) {
return Types.BINARY;
} else if (jdbcType == Types.VARCHAR) {
return Types.VARBINARY;
}
}
return jdbcType;
}

// TODO Unify with AbstractFieldMetadata
private static String getDataTypeName(int sqltype, int sqlsubtype, int sqlscale) {
switch (sqltype) {
case smallint_type:
if (sqlsubtype == SUBTYPE_NUMERIC || (sqlsubtype == 0 && sqlscale < 0)) {
return "NUMERIC";
} else if (sqlsubtype == SUBTYPE_DECIMAL) {
return "DECIMAL";
} else {
return "SMALLINT";
}
case integer_type:
if (sqlsubtype == SUBTYPE_NUMERIC || (sqlsubtype == 0 && sqlscale < 0)) {
return "NUMERIC";
} else if (sqlsubtype == SUBTYPE_DECIMAL) {
return "DECIMAL";
} else {
return "INTEGER";
}
case double_type:
case d_float_type:
if (sqlsubtype == SUBTYPE_NUMERIC || (sqlsubtype == 0 && sqlscale < 0)) {
return "NUMERIC";
} else if (sqlsubtype == SUBTYPE_DECIMAL) {
return "DECIMAL";
} else {
return "DOUBLE PRECISION";
}
case float_type:
return "FLOAT";
case char_type:
return "CHAR";
case varchar_type:
return "VARCHAR";
case timestamp_type:
return "TIMESTAMP";
case time_type:
return "TIME";
case date_type:
return "DATE";
case time_tz_type:
return "TIME WITH TIME ZONE";
case timestamp_tz_type:
return "TIMESTAMP WITH TIME ZONE";
case int64_type:
if (sqlsubtype == SUBTYPE_NUMERIC || (sqlsubtype == 0 && sqlscale < 0)) {
return "NUMERIC";
} else if (sqlsubtype == SUBTYPE_DECIMAL) {
return "DECIMAL";
} else {
return "BIGINT";
}
case blob_type:
if (sqlsubtype < 0) {
// TODO Include actual subtype?
return "BLOB SUB_TYPE <0";
} else if (sqlsubtype == BLOB_SUB_TYPE_BINARY) {
return "BLOB SUB_TYPE 0";
} else if (sqlsubtype == BLOB_SUB_TYPE_TEXT) {
return "BLOB SUB_TYPE 1";
} else {
return "BLOB SUB_TYPE " + sqlsubtype;
}
case quad_type:
return "ARRAY";
case boolean_type:
return "BOOLEAN";
case dec_fixed_type:
switch (sqlsubtype) {
case SUBTYPE_DECIMAL:
return "DECIMAL";
case SUBTYPE_NUMERIC:
default:
return "NUMERIC";
}
case dec16_type:
case dec34_type:
return "DECFLOAT";
default:
return "NULL";
}
}

private static final String GET_COLUMN_PRIVILEGES_START = "select "
+ "cast(RF.RDB$RELATION_NAME as " + OBJECT_NAME_TYPE + ") as TABLE_NAME,"
+ "cast(RF.RDB$FIELD_NAME as " + OBJECT_NAME_TYPE + ") as COLUMN_NAME,"
Expand Down Expand Up @@ -3359,31 +3231,18 @@ public ResultSet getClientInfoProperties() throws SQLException {
return new FBResultSet(rowDescriptor, Collections.<RowValue>emptyList());
}

/**
* {@inheritDoc}
*
* <p>
* This method does not return columns of functions defined in packages.
* </p>
*/
@Override
public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern,
String columnNamePattern) throws SQLException {
// FIXME implement this method to return actual result
final RowDescriptor rowDescriptor = new RowDescriptorBuilder(17, datatypeCoder)
.at(0).simple(SQL_VARYING, OBJECT_NAME_LENGTH, "FUNCTION_CAT", "FUNCTION_COLUMNS").addField()
.at(1).simple(SQL_VARYING, OBJECT_NAME_LENGTH, "FUNCTION_SCHEM", "FUNCTION_COLUMNS").addField()
.at(2).simple(SQL_VARYING, OBJECT_NAME_LENGTH, "FUNCTION_NAME", "FUNCTION_COLUMNS").addField()
.at(3).simple(SQL_VARYING, OBJECT_NAME_LENGTH, "COLUMN_NAME", "FUNCTION_COLUMNS").addField()
.at(4).simple(SQL_SHORT, 0, "COLUMN_TYPE", "FUNCTION_COLUMNS").addField()
.at(5).simple(SQL_LONG, 0, "DATA_TYPE", "FUNCTION_COLUMNS").addField()
.at(6).simple(SQL_VARYING, 31, "TYPE_NAME", "FUNCTION_COLUMNS").addField()
.at(7).simple(SQL_LONG, 0, "PRECISION", "FUNCTION_COLUMNS").addField()
.at(8).simple(SQL_LONG, 0, "LENGTH", "FUNCTION_COLUMNS").addField()
.at(9).simple(SQL_SHORT, 0, "SCALE", "FUNCTION_COLUMNS").addField()
.at(10).simple(SQL_SHORT, 0, "RADIX", "FUNCTION_COLUMNS").addField()
.at(11).simple(SQL_SHORT, 0, "NULLABLE", "FUNCTION_COLUMNS").addField()
.at(12).simple(SQL_VARYING, 80, "REMARKS", "FUNCTION_COLUMNS").addField()
.at(13).simple(SQL_LONG, 0, "CHAR_OCTET_LENGTH", "FUNCTION_COLUMNS").addField()
.at(14).simple(SQL_LONG, 0, "ORDINAL_POSITION", "FUNCTION_COLUMNS").addField()
.at(15).simple(SQL_VARYING, 31, "IS_NULLABLE", "FUNCTION_COLUMNS").addField()
.at(16).simple(SQL_VARYING, OBJECT_NAME_LENGTH, "SPECIFIC_NAME", "FUNCTION_COLUMNS").addField()
.toRowDescriptor();

return new FBResultSet(rowDescriptor, Collections.<RowValue>emptyList());
return GetFunctionColumns.create(getDbMetadataMediator())
.getFunctionColumns(catalog, schemaPattern, functionNamePattern, columnNamePattern);
}

/**
Expand All @@ -3401,11 +3260,15 @@ public ResultSet getFunctionColumns(String catalog, String schemaPattern, String
* <li><b>JB_ENTRYPOINT</b> String => Value of {@code RDB$ENTRYPOINT} (is {@code null} for PSQL)</li>
* <li><b>JB_ENGINE_NAME</b> String => Value of {@code RDB$ENGINE_NAME} (is {@code null} for UDF and PSQL)</li>
* </ol>
* </p>
* <p>
* This method does not return functions defined in packages.
* </p>
*/
@Override
public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern)
throws SQLException {
return GetFunctions.create(dbMetadataMediator).getFunctions(catalog, schemaPattern, functionNamePattern);
return GetFunctions.create(getDbMetadataMediator()).getFunctions(catalog, schemaPattern, functionNamePattern);
}

@Override
Expand Down Expand Up @@ -3858,6 +3721,10 @@ protected boolean removeEldestEntry(Map.Entry<String, FBPreparedStatement> eldes
}
}

protected DbMetadataMediator getDbMetadataMediator() {
return new DbMetadataMediatorImpl();
}

private class DbMetadataMediatorImpl extends DbMetadataMediator {

@Override
Expand Down

0 comments on commit 62ed11f

Please sign in to comment.