Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Avoid using S3 multipart upload for a single part #6201

Merged
merged 5 commits into from Dec 9, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -23,7 +23,7 @@

/**
* Translate statements in Hive QL to Presto SQL.
*
* <p>
* Only translation of quoted literals is currently included.
*/
public final class HiveQlToPrestoTranslator
Expand Down
Expand Up @@ -23,9 +23,9 @@

/**
* Stores a mapping between
* - the projected columns required by a connector level pagesource and
* - the columns supplied by format-specific page source
*
* - the projected columns required by a connector level pagesource and
* - the columns supplied by format-specific page source
* <p>
* Currently used in {@link HivePageSource}.
*/
public class ReaderColumns
Expand Down

Large diffs are not rendered by default.

Expand Up @@ -104,7 +104,9 @@ public class TestHiveQlTranslation
"'double: two singles'''''")
.build();

/** Prepare all combinations of {@code n} of the given columns. */
/**
* Prepare all combinations of {@code n} of the given columns.
*/
private static Iterator<Object[]> getNColumns(int n, Map<String, String> columns)
{
Stream<String> hiveNames =
Expand All @@ -115,7 +117,7 @@ private static Iterator<Object[]> getNColumns(int n, Map<String, String> columns
Lists.cartesianProduct(nCopies(n, List.copyOf(columns.values()))).stream()
.map(names -> join(", ", names));

return Streams.zip(hiveNames, prestoNames, (h, p) -> new Object[]{h, p}).iterator();
return Streams.zip(hiveNames, prestoNames, (h, p) -> new Object[] {h, p}).iterator();
}

@DataProvider(name = "simple_hive_translation_columns")
Expand Down
Expand Up @@ -163,7 +163,8 @@ private static TestingConnectorSession getSession(HiveConfig config)

private static DynamicFilter getDynamicFilter(TupleDomain<ColumnHandle> tupleDomain)
{
return new DynamicFilter() {
return new DynamicFilter()
{
@Override
public CompletableFuture<?> isBlocked()
{
Expand Down
Expand Up @@ -119,8 +119,8 @@ public class TestRecordingHiveMetastore
private static final List<String> PARTITION_COLUMN_NAMES = ImmutableList.of(TABLE_COLUMN.getName());
private static final Domain PARTITION_COLUMN_EQUAL_DOMAIN = Domain.singleValue(createUnboundedVarcharType(), Slices.utf8Slice("value1"));
private static final TupleDomain<String> TUPLE_DOMAIN = TupleDomain.withColumnDomains(ImmutableMap.<String, Domain>builder()
.put(TABLE_COLUMN.getName(), PARTITION_COLUMN_EQUAL_DOMAIN)
.build());
.put(TABLE_COLUMN.getName(), PARTITION_COLUMN_EQUAL_DOMAIN)
.build());

@Test
public void testRecordingHiveMetastore()
Expand Down
Expand Up @@ -131,17 +131,17 @@ public Optional<Table> getTable(HiveIdentity identity, String databaseName, Stri
{
if (databaseName.equals("database")) {
return Optional.of(new Table(
"database",
tableName,
"owner",
"table_type",
TABLE_STORAGE,
ImmutableList.of(TABLE_COLUMN),
ImmutableList.of(TABLE_COLUMN),
ImmutableMap.of("param", "value3"),
Optional.of("original_text"),
Optional.of("expanded_text"),
OptionalLong.empty()));
"database",
tableName,
"owner",
"table_type",
TABLE_STORAGE,
ImmutableList.of(TABLE_COLUMN),
ImmutableList.of(TABLE_COLUMN),
ImmutableMap.of("param", "value3"),
Optional.of("original_text"),
Optional.of("expanded_text"),
OptionalLong.empty()));
}
return Optional.empty();
}
Expand Down
Expand Up @@ -140,7 +140,7 @@ public void testHashingCompare()
for (BucketingVersion version : BucketingVersion.values()) {
List<TypeInfo> typeInfos = ImmutableList.of(timestampTypeInfo);

assertThatThrownBy(() -> version.getBucketHashCode(typeInfos, new Object[]{0}))
assertThatThrownBy(() -> version.getBucketHashCode(typeInfos, new Object[] {0}))
.hasMessage("Computation of Hive bucket hashCode is not supported for Hive primitive category: TIMESTAMP");

TimestampType timestampType = createTimestampType(3);
Expand Down
Expand Up @@ -176,11 +176,12 @@ public void close()
return;
}
closed = true;
columnWriters.forEach(ColumnWriter::close);

flush();
writeFooter();
outputStream.close();
try (outputStream) {
columnWriters.forEach(ColumnWriter::close);
flush();
writeFooter();
}
}

// Parquet File Layout:
Expand Down