diff --git a/db/arena_wrapped_db_iter.cc b/db/arena_wrapped_db_iter.cc index 919ea566502..83cc3cfa503 100644 --- a/db/arena_wrapped_db_iter.cc +++ b/db/arena_wrapped_db_iter.cc @@ -138,10 +138,10 @@ Status ArenaWrappedDBIter::Refresh(const Snapshot* snapshot) { reinit_internal_iter(); break; } else { - delete *memtable_range_tombstone_iter_; - *memtable_range_tombstone_iter_ = new TruncatedRangeDelIterator( - std::unique_ptr(t), - &cfd->internal_comparator(), nullptr, nullptr); + *memtable_range_tombstone_iter_ = + std::make_unique( + std::unique_ptr(t), + &cfd->internal_comparator(), nullptr, nullptr); } } db_impl->ReturnAndCleanupSuperVersion(cfd, sv); diff --git a/db/arena_wrapped_db_iter.h b/db/arena_wrapped_db_iter.h index 5c4d2db7776..a3b28556456 100644 --- a/db/arena_wrapped_db_iter.h +++ b/db/arena_wrapped_db_iter.h @@ -55,7 +55,8 @@ class ArenaWrappedDBIter : public Iterator { db_iter_->SetIter(iter); } - void SetMemtableRangetombstoneIter(TruncatedRangeDelIterator** iter) { + void SetMemtableRangetombstoneIter( + std::unique_ptr* iter) { memtable_range_tombstone_iter_ = iter; } @@ -110,7 +111,8 @@ class ArenaWrappedDBIter : public Iterator { bool allow_refresh_ = true; // If this is nullptr, it means the mutable memtable does not contain range // tombstone when added under this DBIter. - TruncatedRangeDelIterator** memtable_range_tombstone_iter_ = nullptr; + std::unique_ptr* memtable_range_tombstone_iter_ = + nullptr; }; // Generate the arena wrapped iterator class. diff --git a/db/db_impl/db_impl.cc b/db/db_impl/db_impl.cc index b67539352ae..f58f615f5fd 100644 --- a/db/db_impl/db_impl.cc +++ b/db/db_impl/db_impl.cc @@ -2064,19 +2064,19 @@ InternalIterator* DBImpl::NewInternalIterator( read_options, super_version->GetSeqnoToTimeMapping(), arena); Status s; if (!read_options.ignore_range_deletions) { - TruncatedRangeDelIterator* mem_tombstone_iter = nullptr; + std::unique_ptr mem_tombstone_iter; auto range_del_iter = super_version->mem->NewRangeTombstoneIterator( read_options, sequence, false /* immutable_memtable */); if (range_del_iter == nullptr || range_del_iter->empty()) { delete range_del_iter; } else { - mem_tombstone_iter = new TruncatedRangeDelIterator( + mem_tombstone_iter = std::make_unique( std::unique_ptr(range_del_iter), &cfd->ioptions()->internal_comparator, nullptr /* smallest */, nullptr /* largest */); } - merge_iter_builder.AddPointAndTombstoneIterator(mem_iter, - mem_tombstone_iter); + merge_iter_builder.AddPointAndTombstoneIterator( + mem_iter, std::move(mem_tombstone_iter)); } else { merge_iter_builder.AddIterator(mem_iter); } diff --git a/db/memtable_list.cc b/db/memtable_list.cc index ffa9de111fc..07487ca0bbe 100644 --- a/db/memtable_list.cc +++ b/db/memtable_list.cc @@ -235,19 +235,19 @@ void MemTableListVersion::AddIterators( SequenceNumber read_seq = options.snapshot != nullptr ? options.snapshot->GetSequenceNumber() : kMaxSequenceNumber; - TruncatedRangeDelIterator* mem_tombstone_iter = nullptr; + std::unique_ptr mem_tombstone_iter; auto range_del_iter = m->NewRangeTombstoneIterator( options, read_seq, true /* immutale_memtable */); if (range_del_iter == nullptr || range_del_iter->empty()) { delete range_del_iter; } else { - mem_tombstone_iter = new TruncatedRangeDelIterator( + mem_tombstone_iter = std::make_unique( std::unique_ptr(range_del_iter), &m->GetInternalKeyComparator(), nullptr /* smallest */, nullptr /* largest */); } - merge_iter_builder->AddPointAndTombstoneIterator(mem_iter, - mem_tombstone_iter); + merge_iter_builder->AddPointAndTombstoneIterator( + mem_iter, std::move(mem_tombstone_iter)); } } } diff --git a/db/table_cache.cc b/db/table_cache.cc index dc9e2f4b61d..71fc29c3227 100644 --- a/db/table_cache.cc +++ b/db/table_cache.cc @@ -230,7 +230,7 @@ InternalIterator* TableCache::NewIterator( const InternalKey* smallest_compaction_key, const InternalKey* largest_compaction_key, bool allow_unprepared_value, uint8_t block_protection_bytes_per_key, const SequenceNumber* read_seqno, - TruncatedRangeDelIterator** range_del_iter) { + std::unique_ptr* range_del_iter) { PERF_TIMER_GUARD(new_table_iterator_nanos); Status s; @@ -285,7 +285,7 @@ InternalIterator* TableCache::NewIterator( delete new_range_del_iter; *range_del_iter = nullptr; } else { - *range_del_iter = new TruncatedRangeDelIterator( + *range_del_iter = std::make_unique( std::unique_ptr( new_range_del_iter), &icomparator, &file_meta.smallest, &file_meta.largest); diff --git a/db/table_cache.h b/db/table_cache.h index 4dce0336791..f77d74bbe1e 100644 --- a/db/table_cache.h +++ b/db/table_cache.h @@ -100,7 +100,7 @@ class TableCache { const InternalKey* largest_compaction_key, bool allow_unprepared_value, uint8_t protection_bytes_per_key, const SequenceNumber* range_del_read_seqno = nullptr, - TruncatedRangeDelIterator** range_del_iter = nullptr); + std::unique_ptr* range_del_iter = nullptr); // If a seek to internal key "k" in specified file finds an entry, // call get_context->SaveValue() repeatedly until diff --git a/db/version_set.cc b/db/version_set.cc index 454fca0503e..32de4f637ae 100644 --- a/db/version_set.cc +++ b/db/version_set.cc @@ -976,7 +976,8 @@ class LevelIterator final : public InternalIterator { const std::vector* compaction_boundaries = nullptr, bool allow_unprepared_value = false, - TruncatedRangeDelIterator**** range_tombstone_iter_ptr_ = nullptr) + std::unique_ptr*** range_tombstone_iter_ptr_ = + nullptr) : table_cache_(table_cache), read_options_(read_options), file_options_(file_options), @@ -1116,9 +1117,8 @@ class LevelIterator final : public InternalIterator { } void ClearRangeTombstoneIter() { - if (range_tombstone_iter_ && *range_tombstone_iter_) { - delete *range_tombstone_iter_; - *range_tombstone_iter_ = nullptr; + if (range_tombstone_iter_) { + range_tombstone_iter_->reset(); } } @@ -1201,7 +1201,7 @@ class LevelIterator final : public InternalIterator { // iterator end). // // *range_tombstone_iter_ points to range tombstones of the current SST file - TruncatedRangeDelIterator** range_tombstone_iter_; + std::unique_ptr* range_tombstone_iter_; // The sentinel key to be returned Slice sentinel_; @@ -1929,7 +1929,7 @@ InternalIterator* Version::TEST_GetLevelIterator( int level, bool allow_unprepared_value) { auto* arena = merge_iter_builder->GetArena(); auto* mem = arena->AllocateAligned(sizeof(LevelIterator)); - TruncatedRangeDelIterator*** tombstone_iter_ptr = nullptr; + std::unique_ptr** tombstone_iter_ptr = nullptr; auto level_iter = new (mem) LevelIterator( cfd_->table_cache(), read_options, file_options_, cfd_->internal_comparator(), &storage_info_.LevelFilesBrief(level), @@ -2029,7 +2029,7 @@ void Version::AddIteratorsForLevel(const ReadOptions& read_options, auto* arena = merge_iter_builder->GetArena(); if (level == 0) { // Merge all level zero files together since they may overlap - TruncatedRangeDelIterator* tombstone_iter = nullptr; + std::unique_ptr tombstone_iter = nullptr; for (size_t i = 0; i < storage_info_.LevelFilesBrief(0).num_files; i++) { const auto& file = storage_info_.LevelFilesBrief(0).files[i]; auto table_iter = cfd_->table_cache()->NewIterator( @@ -2046,8 +2046,8 @@ void Version::AddIteratorsForLevel(const ReadOptions& read_options, if (read_options.ignore_range_deletions) { merge_iter_builder->AddIterator(table_iter); } else { - merge_iter_builder->AddPointAndTombstoneIterator(table_iter, - tombstone_iter); + merge_iter_builder->AddPointAndTombstoneIterator( + table_iter, std::move(tombstone_iter)); } } if (should_sample) { @@ -2064,7 +2064,7 @@ void Version::AddIteratorsForLevel(const ReadOptions& read_options, // walks through the non-overlapping files in the level, opening them // lazily. auto* mem = arena->AllocateAligned(sizeof(LevelIterator)); - TruncatedRangeDelIterator*** tombstone_iter_ptr = nullptr; + std::unique_ptr** tombstone_iter_ptr = nullptr; auto level_iter = new (mem) LevelIterator( cfd_->table_cache(), read_options, soptions, cfd_->internal_comparator(), &storage_info_.LevelFilesBrief(level), @@ -7078,8 +7078,8 @@ InternalIterator* VersionSet::MakeInputIterator( // that will be initialized to where CompactionMergingIterator stores // pointer to its range tombstones. This is used by LevelIterator // to update pointer to range tombstones as it traverse different SST files. - std::vector< - std::pair> + std::vector, + std::unique_ptr**>> range_tombstones; size_t num = 0; for (size_t which = 0; which < c->num_input_levels(); which++) { @@ -7101,7 +7101,8 @@ InternalIterator* VersionSet::MakeInputIterator( *end, fmd.smallest.user_key()) < 0) { continue; } - TruncatedRangeDelIterator* range_tombstone_iter = nullptr; + std::unique_ptr range_tombstone_iter = + nullptr; list[num++] = cfd->table_cache()->NewIterator( read_options, file_options_compactions, cfd->internal_comparator(), fmd, range_del_agg, @@ -7118,11 +7119,13 @@ InternalIterator* VersionSet::MakeInputIterator( c->mutable_cf_options()->block_protection_bytes_per_key, /*range_del_read_seqno=*/nullptr, /*range_del_iter=*/&range_tombstone_iter); - range_tombstones.emplace_back(range_tombstone_iter, nullptr); + range_tombstones.emplace_back(std::move(range_tombstone_iter), + nullptr); } } else { // Create concatenating iterator for the files from this level - TruncatedRangeDelIterator*** tombstone_iter_ptr = nullptr; + std::unique_ptr** tombstone_iter_ptr = + nullptr; list[num++] = new LevelIterator( cfd->table_cache(), read_options, file_options_compactions, cfd->internal_comparator(), c->input_levels(which), diff --git a/table/compaction_merging_iterator.cc b/table/compaction_merging_iterator.cc index 98581b16d76..de354ee7211 100644 --- a/table/compaction_merging_iterator.cc +++ b/table/compaction_merging_iterator.cc @@ -11,8 +11,8 @@ class CompactionMergingIterator : public InternalIterator { CompactionMergingIterator( const InternalKeyComparator* comparator, InternalIterator** children, int n, bool is_arena_mode, - std::vector< - std::pair> + std::vector, + std::unique_ptr**>>& range_tombstones) : is_arena_mode_(is_arena_mode), comparator_(comparator), @@ -27,7 +27,7 @@ class CompactionMergingIterator : public InternalIterator { } assert(range_tombstones.size() == static_cast(n)); for (auto& p : range_tombstones) { - range_tombstone_iters_.push_back(p.first); + range_tombstone_iters_.push_back(std::move(p.first)); } pinned_heap_item_.resize(n); for (int i = 0; i < n; ++i) { @@ -47,10 +47,7 @@ class CompactionMergingIterator : public InternalIterator { } ~CompactionMergingIterator() override { - // TODO: use unique_ptr for range_tombstone_iters_ - for (auto child : range_tombstone_iters_) { - delete child; - } + range_tombstone_iters_.clear(); for (auto& child : children_) { child.iter.DeleteIter(is_arena_mode_); @@ -197,7 +194,8 @@ class CompactionMergingIterator : public InternalIterator { // nullptr means the sorted run of children_[i] does not have range // tombstones (or the current SSTable does not have range tombstones in the // case of LevelIterator). - std::vector range_tombstone_iters_; + std::vector> + range_tombstone_iters_; // Used as value for range tombstone keys std::string dummy_tombstone_val{}; @@ -349,8 +347,9 @@ void CompactionMergingIterator::AddToMinHeapOrCheckStatus(HeapItem* child) { InternalIterator* NewCompactionMergingIterator( const InternalKeyComparator* comparator, InternalIterator** children, int n, - std::vector>& range_tombstone_iters, + std::vector, + std::unique_ptr**>>& + range_tombstone_iters, Arena* arena) { assert(n >= 0); if (n == 0) { diff --git a/table/compaction_merging_iterator.h b/table/compaction_merging_iterator.h index e3fd7797fd8..845e49a4ee3 100644 --- a/table/compaction_merging_iterator.h +++ b/table/compaction_merging_iterator.h @@ -38,7 +38,8 @@ class CompactionMergingIterator; InternalIterator* NewCompactionMergingIterator( const InternalKeyComparator* comparator, InternalIterator** children, int n, - std::vector>& range_tombstone_iters, + std::vector, + std::unique_ptr**>>& + range_tombstone_iters, Arena* arena = nullptr); } // namespace ROCKSDB_NAMESPACE diff --git a/table/merging_iterator.cc b/table/merging_iterator.cc index 833c6123eee..375c811c59f 100644 --- a/table/merging_iterator.cc +++ b/table/merging_iterator.cc @@ -96,8 +96,9 @@ class MergingIterator : public InternalIterator { // could be updated. In that case, this merging iterator is only responsible // for freeing the new range tombstone iterator that it has pointers to in // range_tombstone_iters_. - void AddRangeTombstoneIterator(TruncatedRangeDelIterator* iter) { - range_tombstone_iters_.emplace_back(iter); + void AddRangeTombstoneIterator( + std::unique_ptr&& iter) { + range_tombstone_iters_.emplace_back(std::move(iter)); } // Called by MergingIteratorBuilder when all point iterators and range @@ -125,9 +126,7 @@ class MergingIterator : public InternalIterator { } ~MergingIterator() override { - for (auto child : range_tombstone_iters_) { - delete child; - } + range_tombstone_iters_.clear(); for (auto& child : children_) { child.iter.DeleteIter(is_arena_mode_); @@ -624,7 +623,8 @@ class MergingIterator : public InternalIterator { // Invariant(rti): pinned_heap_item_[i] is in minHeap_ iff // range_tombstone_iters_[i]->Valid() and at most one pinned_heap_item_[i] is // in minHeap_. - std::vector range_tombstone_iters_; + std::vector> + range_tombstone_iters_; // Levels (indices into range_tombstone_iters_/children_ ) that currently have // "active" range tombstones. See comments above MergingIterator for meaning @@ -841,7 +841,8 @@ void MergingIterator::SeekImpl(const Slice& target, size_t starting_level, prefetched_target.emplace_back( level, current_search_key.GetInternalKey().ToString()); } - auto range_tombstone_iter = range_tombstone_iters_[level]; + UnownedPtr range_tombstone_iter = + range_tombstone_iters_[level].get(); if (range_tombstone_iter) { range_tombstone_iter->SeekInternalKey( current_search_key.GetInternalKey()); @@ -1125,7 +1126,8 @@ void MergingIterator::SeekForPrevImpl(const Slice& target, prefetched_target.emplace_back( level, current_search_key.GetInternalKey().ToString()); } - auto range_tombstone_iter = range_tombstone_iters_[level]; + UnownedPtr range_tombstone_iter = + range_tombstone_iters_[level].get(); if (range_tombstone_iter) { range_tombstone_iter->SeekForPrev(current_search_key.GetUserKey()); if (range_tombstone_iter->Valid()) { @@ -1349,7 +1351,8 @@ void MergingIterator::SwitchToForward() { ParseInternalKey(target, &pik, false /* log_err_key */) .PermitUncheckedError(); for (size_t i = 0; i < range_tombstone_iters_.size(); ++i) { - auto iter = range_tombstone_iters_[i]; + UnownedPtr iter = + range_tombstone_iters_[i].get(); if (iter) { iter->Seek(pik.user_key); // The while loop is needed as the Seek() call above is only for user @@ -1395,7 +1398,8 @@ void MergingIterator::SwitchToBackward() { ParseInternalKey(target, &pik, false /* log_err_key */) .PermitUncheckedError(); for (size_t i = 0; i < range_tombstone_iters_.size(); ++i) { - auto iter = range_tombstone_iters_[i]; + UnownedPtr iter = + range_tombstone_iters_[i].get(); if (iter) { iter->SeekForPrev(pik.user_key); // Since the SeekForPrev() call above is only for user key, @@ -1690,8 +1694,9 @@ void MergeIteratorBuilder::AddIterator(InternalIterator* iter) { } void MergeIteratorBuilder::AddPointAndTombstoneIterator( - InternalIterator* point_iter, TruncatedRangeDelIterator* tombstone_iter, - TruncatedRangeDelIterator*** tombstone_iter_ptr) { + InternalIterator* point_iter, + std::unique_ptr&& tombstone_iter, + std::unique_ptr** tombstone_iter_ptr) { // tombstone_iter_ptr != nullptr means point_iter is a LevelIterator. bool add_range_tombstone = tombstone_iter || !merge_iter->range_tombstone_iters_.empty() || @@ -1711,7 +1716,7 @@ void MergeIteratorBuilder::AddPointAndTombstoneIterator( merge_iter->children_.size() - 1) { merge_iter->AddRangeTombstoneIterator(nullptr); } - merge_iter->AddRangeTombstoneIterator(tombstone_iter); + merge_iter->AddRangeTombstoneIterator(std::move(tombstone_iter)); } if (tombstone_iter_ptr) { diff --git a/table/merging_iterator.h b/table/merging_iterator.h index 66351bcc39a..b21a3e8ef9f 100644 --- a/table/merging_iterator.h +++ b/table/merging_iterator.h @@ -70,8 +70,10 @@ class MergeIteratorBuilder { // point iterators are not LevelIterator, then range tombstone iterator is // only added to the merging iter if there is a non-null `tombstone_iter`. void AddPointAndTombstoneIterator( - InternalIterator* point_iter, TruncatedRangeDelIterator* tombstone_iter, - TruncatedRangeDelIterator*** tombstone_iter_ptr = nullptr); + InternalIterator* point_iter, + std::unique_ptr&& tombstone_iter, + std::unique_ptr** tombstone_iter_ptr = + nullptr); // Get arena used to build the merging iterator. It is called one a child // iterator needs to be allocated. @@ -91,7 +93,7 @@ class MergeIteratorBuilder { Arena* arena; // Used to set LevelIterator.range_tombstone_iter_. // See AddRangeTombstoneIterator() implementation for more detail. - std::vector> + std::vector**>> range_del_iter_ptrs_; };