Skip to content

Commit

Permalink
SNOW-1157904 write and bindings structured types (#1727)
Browse files Browse the repository at this point in the history
* Structures type write by bindings
  • Loading branch information
sfc-gh-pmotacki committed Apr 26, 2024
1 parent 804ef67 commit 8dcd217
Show file tree
Hide file tree
Showing 30 changed files with 1,492 additions and 80 deletions.
96 changes: 96 additions & 0 deletions src/main/java/net/snowflake/client/core/FieldSchemaCreator.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
package net.snowflake.client.core;

import java.sql.SQLException;
import java.sql.Types;
import java.util.Optional;
import net.snowflake.client.jdbc.BindingParameterMetadata;
import net.snowflake.client.jdbc.SnowflakeColumn;
import net.snowflake.client.jdbc.SnowflakeType;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;

@SnowflakeJdbcInternalApi
public class FieldSchemaCreator {
static final SFLogger logger = SFLoggerFactory.getLogger(FieldSchemaCreator.class);
public static final int MAX_TEXT_COLUMN_SIZE = 134217728;
public static final int MAX_BINARY_COLUMN_SIZE = 67108864;

public static BindingParameterMetadata buildSchemaForText(
String fieldName, Optional<SnowflakeColumn> maybeColumn) {
return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata()
.withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse("text"))
.withLength(maybeColumn.map(cl -> cl.length()).orElse(MAX_TEXT_COLUMN_SIZE))
.withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName))
.build();
}

public static BindingParameterMetadata buildSchemaForBytesType(
String fieldName, Optional<SnowflakeColumn> maybeColumn) {
return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata()
.withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse("binary"))
.withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName))
.withLength(maybeColumn.map(cl -> cl.precision()).orElse(MAX_TEXT_COLUMN_SIZE))
.withByteLength(maybeColumn.map(cl -> cl.byteLength()).orElse(MAX_BINARY_COLUMN_SIZE))
.build();
}

public static BindingParameterMetadata buildSchemaTypeAndNameOnly(
String fieldName, String type, Optional<SnowflakeColumn> maybeColumn) {
return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata()
.withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse(type))
.withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName))
.build();
}

public static BindingParameterMetadata buildSchemaWithScaleAndPrecision(
String fieldName,
String type,
int scale,
int precision,
Optional<SnowflakeColumn> maybeColumn) {
return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata()
.withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse(type))
.withScale(maybeColumn.map(cl -> cl.scale()).filter(i -> i > 0).orElse(scale))
.withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName))
.withPrecision(maybeColumn.map(cl -> cl.precision()).filter(i -> i > 0).orElse(precision))
.build();
}

public static BindingParameterMetadata buildBindingSchemaForType(int baseType)
throws SQLException {
return buildBindingSchemaForType(baseType, true);
}

public static BindingParameterMetadata buildBindingSchemaForType(int baseType, boolean addName)
throws SQLException {
String name = addName ? SnowflakeType.javaTypeToSFType(baseType, null).name() : null;
switch (baseType) {
case Types.VARCHAR:
case Types.CHAR:
return FieldSchemaCreator.buildSchemaForText(name, Optional.empty());
case Types.FLOAT:
case Types.DOUBLE:
case Types.DECIMAL:
return FieldSchemaCreator.buildSchemaWithScaleAndPrecision(
name, "real", 9, 38, Optional.empty());
case Types.NUMERIC:
case Types.INTEGER:
case Types.SMALLINT:
case Types.TINYINT:
case Types.BIGINT:
return FieldSchemaCreator.buildSchemaWithScaleAndPrecision(
null, "fixed", 0, 38, Optional.empty());
case Types.BOOLEAN:
return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "boolean", Optional.empty());
case Types.DATE:
return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "date", Optional.empty());
case Types.TIMESTAMP:
case Types.TIME:
return FieldSchemaCreator.buildSchemaWithScaleAndPrecision(
name, "timestamp", 9, 0, Optional.empty());
default:
logger.error("Could not create schema for type : " + baseType);
throw new SQLException("Could not create schema for type : " + baseType);
}
}
}
16 changes: 12 additions & 4 deletions src/main/java/net/snowflake/client/core/JsonSqlInput.java
Original file line number Diff line number Diff line change
Expand Up @@ -234,8 +234,12 @@ public <T> List<T> readList(Class<T> type) throws SQLException {
List<T> result = new ArrayList();
if (ArrayNode.class.isAssignableFrom(value.getClass())) {
for (JsonNode node : (ArrayNode) value) {

result.add(convertObject(type, TimeZone.getDefault(), getValue(node), fieldMetadata));
result.add(
convertObject(
type,
TimeZone.getDefault(),
getValue(node),
fieldMetadata.getFields().get(0)));
}
return result;
} else {
Expand All @@ -259,7 +263,11 @@ public <T> T[] readArray(Class<T> type) throws SQLException {
int counter = 0;
for (JsonNode node : valueNodes) {
array[counter++] =
convertObject(type, TimeZone.getDefault(), getValue(node), fieldMetadata);
convertObject(
type,
TimeZone.getDefault(),
getValue(node),
fieldMetadata.getFields().get(0));
}
return array;
} else {
Expand Down Expand Up @@ -306,7 +314,7 @@ private Timestamp convertTimestamp(TimeZone tz, Object value, FieldMetadata fiel
int columnSubType = fieldMetadata.getType();
int scale = fieldMetadata.getScale();
Timestamp result =
SqlInputTimestampUtil.getTimestampFromType(
SfTimestampUtil.getTimestampFromType(
columnSubType, (String) value, session, sessionTimeZone, tz);
if (result != null) {
return result;
Expand Down
Loading

0 comments on commit 8dcd217

Please sign in to comment.