Skip to content

Commit

Permalink
Remove unused parameter from InternalHiveSplitFactory
Browse files Browse the repository at this point in the history
  • Loading branch information
tangjiangling authored and losipiuk committed Aug 16, 2022
1 parent fe3cf0d commit 28002f0
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveSplit.BucketConversion;
import io.trino.plugin.hive.HiveSplit.BucketValidation;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.fs.DirectoryLister;
import io.trino.plugin.hive.fs.HiveFileIterator;
import io.trino.plugin.hive.metastore.Column;
Expand Down Expand Up @@ -148,7 +147,6 @@ public class BackgroundHiveSplitLoader
private static final ListenableFuture<Void> COMPLETED_FUTURE = immediateVoidFuture();

private final Table table;
private final AcidTransaction transaction;
private final TupleDomain<? extends ColumnHandle> compactEffectivePredicate;
private final DynamicFilter dynamicFilter;
private final long dynamicFilteringWaitTimeoutMillis;
Expand Down Expand Up @@ -192,7 +190,6 @@ public class BackgroundHiveSplitLoader

public BackgroundHiveSplitLoader(
Table table,
AcidTransaction transaction,
Iterable<HivePartitionMetadata> partitions,
TupleDomain<? extends ColumnHandle> compactEffectivePredicate,
DynamicFilter dynamicFilter,
Expand All @@ -212,7 +209,6 @@ public BackgroundHiveSplitLoader(
Optional<Long> maxSplitFileSize)
{
this.table = table;
this.transaction = requireNonNull(transaction, "transaction is null");
this.compactEffectivePredicate = compactEffectivePredicate;
this.dynamicFilter = dynamicFilter;
this.dynamicFilteringWaitTimeoutMillis = dynamicFilteringWaitTimeout.toMillis();
Expand Down Expand Up @@ -473,7 +469,6 @@ private ListenableFuture<Void> loadPartition(HivePartitionMetadata partition)
getMaxInitialSplitSize(session),
isForceLocalScheduling(session),
s3SelectPushdownEnabled,
transaction,
maxSplitFileSize);

// To support custom input formats, we want to call getSplits()
Expand Down Expand Up @@ -662,7 +657,6 @@ private ListenableFuture<Void> createHiveSymlinkSplits(
getMaxInitialSplitSize(session),
isForceLocalScheduling(session),
s3SelectPushdownEnabled,
transaction,
maxSplitFileSize);
lastResult = addSplitsToSource(targetSplits, splitFactory);
if (stopped) {
Expand Down Expand Up @@ -719,7 +713,6 @@ Optional<Iterator<InternalHiveSplit>> buildManifestFileIterator(
getMaxInitialSplitSize(session),
isForceLocalScheduling(session),
s3SelectPushdownEnabled,
transaction,
maxSplitFileSize);
return Optional.of(locatedFileStatuses.stream()
.map(locatedFileStatus -> splitFactory.createInternalHiveSplit(locatedFileStatus, OptionalInt.empty(), OptionalInt.empty(), splittable, Optional.empty()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,6 @@ public ConnectorSplitSource getSplits(
int concurrency = isTransactionalTable(table.getParameters()) ? splitLoaderConcurrency : min(splitLoaderConcurrency, partitions.size());
HiveSplitLoader hiveSplitLoader = new BackgroundHiveSplitLoader(
table,
hiveTable.getTransaction(),
hivePartitions,
hiveTable.getCompactEffectivePredicate(),
dynamicFilter,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import io.trino.plugin.hive.InternalHiveSplit;
import io.trino.plugin.hive.InternalHiveSplit.InternalHiveBlock;
import io.trino.plugin.hive.TableToPartitionMapping;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.s3select.S3SelectPushdown;
import io.trino.spi.HostAddress;
import io.trino.spi.predicate.Domain;
Expand Down Expand Up @@ -88,7 +87,6 @@ public InternalHiveSplitFactory(
DataSize minimumTargetSplitSize,
boolean forceLocalScheduling,
boolean s3SelectPushdownEnabled,
AcidTransaction transaction,
Optional<Long> maxSplitFileSize)
{
this.fileSystem = requireNonNull(fileSystem, "fileSystem is null");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@
import static io.trino.plugin.hive.HiveTimestampPrecision.DEFAULT_PRECISION;
import static io.trino.plugin.hive.HiveType.HIVE_INT;
import static io.trino.plugin.hive.HiveType.HIVE_STRING;
import static io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION;
import static io.trino.plugin.hive.util.HiveBucketing.BucketingVersion.BUCKETING_V1;
import static io.trino.plugin.hive.util.HiveUtil.getRegularColumnHandles;
import static io.trino.spi.predicate.TupleDomain.withColumnDomains;
Expand Down Expand Up @@ -520,7 +519,6 @@ public void testPropagateException(boolean error, int threads)

BackgroundHiveSplitLoader backgroundHiveSplitLoader = new BackgroundHiveSplitLoader(
SIMPLE_TABLE,
NO_ACID_TRANSACTION,
() -> new Iterator<>()
{
private boolean threw;
Expand Down Expand Up @@ -1074,7 +1072,6 @@ private BackgroundHiveSplitLoader backgroundHiveSplitLoader(

return new BackgroundHiveSplitLoader(
table,
NO_ACID_TRANSACTION,
hivePartitionMetadatas,
compactEffectivePredicate,
dynamicFilter,
Expand Down Expand Up @@ -1107,7 +1104,6 @@ private BackgroundHiveSplitLoader backgroundHiveSplitLoader(List<LocatedFileStat

return new BackgroundHiveSplitLoader(
SIMPLE_TABLE,
NO_ACID_TRANSACTION,
hivePartitionMetadatas,
TupleDomain.none(),
DynamicFilter.EMPTY,
Expand All @@ -1134,7 +1130,6 @@ private static BackgroundHiveSplitLoader backgroundHiveSplitLoaderOfflinePartiti

return new BackgroundHiveSplitLoader(
SIMPLE_TABLE,
NO_ACID_TRANSACTION,
createPartitionMetadataWithOfflinePartitions(),
TupleDomain.all(),
DynamicFilter.EMPTY,
Expand Down

0 comments on commit 28002f0

Please sign in to comment.