Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Persistent collections updates (part 6) #180

Merged
merged 17 commits into from Sep 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
b3227e6
[PersistentCollections] New benchmarks measuring behavior on string data
lorentey Sep 20, 2022
53895c6
[PersistentCollections] Split up node mutations into multiple files
lorentey Sep 18, 2022
12e3867
[Collections] Allow determining if internal checking is enabled
lorentey Sep 20, 2022
9da672f
[PersistentDictionary] Fix index(forKey:) test
lorentey Sep 18, 2022
f7c419d
[PersistentDictionary] Rework allocations
lorentey Sep 19, 2022
d9cff57
[test][PersistentDictionary] Clean up fixture initialization
lorentey Sep 19, 2022
e11f923
[PersistentDictionary] Don’t bump version in uniqueKeys initializers
lorentey Sep 19, 2022
a2491aa
[PersistentDictionary] Improved copy & resize operations
lorentey Sep 19, 2022
310f801
[Utilities] Add UInt16 rank utilities
lorentey Sep 19, 2022
ea30715
[PersistentDictionary] Implement Keys and Values views
lorentey Sep 19, 2022
75edf6c
[PersistentDictionary] Add support for merging dictionaries & for in-…
lorentey Sep 19, 2022
6dc8de1
[PersistentDictionary] Add more initializers
lorentey Sep 19, 2022
71c881c
[PersistentDictionary] mapValues, compactMapValues, filter
lorentey Sep 19, 2022
b7bfc52
[test] Add Dictionary API checker protocol
lorentey Sep 19, 2022
8d7c777
[PersistentCollections] _children → children, itemEnd → itemsEndSlot,…
lorentey Sep 19, 2022
a28921b
Update Sources/PersistentCollections/PersistentDictionary/PersistentD…
lorentey Sep 20, 2022
39fc818
Update Sources/PersistentCollections/PersistentDictionary/PersistentD…
lorentey Sep 20, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -382,5 +382,75 @@ extension Benchmark {
blackHole(d)
}
}

self.add(
title: "PersistentDictionary<Large, Large> subscript, insert, unique",
input: [Large].self
) { input in
return { timer in
var d: PersistentDictionary<Large, Large> = [:]
timer.measure {
for value in input {
d[value] = value
}
}
precondition(d.count == input.count)
blackHole(d)
}
}

self.add(
title: "PersistentDictionary<Large, Large> subscript, insert, shared",
input: [Large].self
) { input in
return { timer in
var d: PersistentDictionary<Large, Large> = [:]
timer.measure {
for value in input {
let copy = d
d[value] = value
blackHole((copy, d))
}
}
precondition(d.count == input.count)
blackHole(d)
}
}

self.add(
title: "PersistentDictionary<Large, Large> subscript, remove existing, unique",
input: ([Large], [Large]).self
) { input, lookups in
return { timer in
var d = PersistentDictionary(
uniqueKeysWithValues: input.lazy.map { ($0, $0) })
timer.measure {
for key in lookups {
d[key] = nil
}
}
precondition(d.isEmpty)
blackHole(d)
}
}

self.add(
title: "PersistentDictionary<Large, Large> subscript, remove existing, shared",
input: ([Large], [Large]).self
) { input, lookups in
return { timer in
var d = PersistentDictionary(
uniqueKeysWithValues: input.lazy.map { ($0, $0) })
timer.measure {
for key in lookups {
let copy = d
d[key] = nil
blackHole((copy, d))
}
}
precondition(d.isEmpty)
blackHole(d)
}
}
}
}
13 changes: 13 additions & 0 deletions Benchmarks/Sources/benchmark-tool/main.swift
Expand Up @@ -11,8 +11,21 @@

import CollectionsBenchmark
import Benchmarks
import DequeModule

if Deque<Int>._isConsistencyCheckingEnabled {
complain("""
*** INTERNAL CONSISTENCY CHECKING IS ENABLED ***

Performance guarantees aren't valid in this configuration,
and benchmarking data will be largely useless. Proceed at
your own risk.

""")
}

var benchmark = Benchmark(title: "Collection Benchmarks")
benchmark.registerCustomGenerators()
benchmark.addArrayBenchmarks()
benchmark.addSetBenchmarks()
benchmark.addDictionaryBenchmarks()
Expand Down
6 changes: 6 additions & 0 deletions Sources/DequeModule/Deque+Testing.swift
Expand Up @@ -9,10 +9,16 @@
//
//===----------------------------------------------------------------------===//

import _CollectionsUtilities

// This file contains exported but non-public entry points to support clear box
// testing.

extension Deque {
public static var _isConsistencyCheckingEnabled: Bool {
_isCollectionsInternalCheckingEnabled
}

/// The maximum number of elements this deque is currently able to store
/// without reallocating its storage buffer.
///
Expand Down
Expand Up @@ -9,12 +9,18 @@
//
//===----------------------------------------------------------------------===//

import _CollectionsUtilities

extension OrderedSet._UnstableInternals {
@_spi(Testing) public var capacity: Int { base._capacity }
@_spi(Testing) public var minimumCapacity: Int { base._minimumCapacity }
@_spi(Testing) public var scale: Int { base._scale }
@_spi(Testing) public var reservedScale: Int { base._reservedScale }
@_spi(Testing) public var bias: Int { base._bias }

public static var isConsistencyCheckingEnabled: Bool {
_isCollectionsInternalCheckingEnabled
}
}

extension OrderedSet {
Expand Down
Expand Up @@ -27,7 +27,7 @@ extension _Node: CustomStringConvertible {
}
result += "\(key): \(value)"
}
for child in $0._children {
for child in $0.children {
if first {
first = false
} else {
Expand Down
6 changes: 3 additions & 3 deletions Sources/PersistentCollections/Node/_Node+Debugging.swift
Expand Up @@ -73,10 +73,10 @@ extension _Node.UnsafeHandle {
""")
guard limit > 0 else { return }
if iterationOrder {
for slot in stride(from: .zero, to: itemEnd, by: 1) {
for slot in stride(from: .zero, to: itemsEndSlot, by: 1) {
print(" \(restPrefix)[\(slot)] \(_itemString(at: slot))")
}
for slot in stride(from: .zero, to: childEnd, by: 1) {
for slot in stride(from: .zero, to: childrenEndSlot, by: 1) {
self[child: slot].dump(
iterationOrder: true,
limit: limit - 1,
Expand All @@ -86,7 +86,7 @@ extension _Node.UnsafeHandle {
}
}
else if isCollisionNode {
for slot in stride(from: .zero, to: itemEnd, by: 1) {
for slot in stride(from: .zero, to: itemsEndSlot, by: 1) {
print("\(restPrefix)[\(slot)] \(_itemString(at: slot))")
}
} else {
Expand Down
2 changes: 1 addition & 1 deletion Sources/PersistentCollections/Node/_Node+Equatable.swift
Expand Up @@ -42,7 +42,7 @@ extension _Node: Equatable where Value: Equatable {
guard l.reverseItems.elementsEqual(r.reverseItems, by: { $0 == $1 })
else { return false }

guard l._children.elementsEqual(r._children) else { return false }
guard l.children.elementsEqual(r.children) else { return false }
return true
}
}
Expand Down
16 changes: 8 additions & 8 deletions Sources/PersistentCollections/Node/_Node+Initializers.swift
Expand Up @@ -15,8 +15,7 @@ extension _Node {
_ item1: Element,
_ inserter2: (UnsafeMutablePointer<Element>) -> Void
) -> _Node {
var node = _Node(collisionCapacity: 2)
node.count = 2
var node = _Node(storage: Storage.allocate(itemCapacity: 2), count: 2)
node.update {
$0.collisionCount = 2
let byteCount = 2 * MemoryLayout<Element>.stride
Expand All @@ -38,8 +37,7 @@ extension _Node {
_ bucket2: _Bucket
) -> (node: _Node, slot1: _Slot, slot2: _Slot) {
assert(bucket1 != bucket2)
var node = _Node(itemCapacity: 2)
node.count = 2
var node = _Node(storage: Storage.allocate(itemCapacity: 2), count: 2)
let (slot1, slot2) = node.update {
$0.itemMap.insert(bucket1)
$0.itemMap.insert(bucket2)
Expand All @@ -59,8 +57,9 @@ extension _Node {
internal static func _regularNode(
_ child: _Node, _ bucket: _Bucket
) -> _Node {
var node = _Node(childCapacity: 1)
node.count = child.count
var node = _Node(
storage: Storage.allocate(childCapacity: 1),
count: child.count)
node.update {
$0.childMap.insert(bucket)
$0.bytesFree &-= MemoryLayout<_Node>.stride
Expand All @@ -78,8 +77,9 @@ extension _Node {
_ childBucket: _Bucket
) -> _Node {
assert(itemBucket != childBucket)
var node = _Node(itemCapacity: 1, childCapacity: 1)
node.count = child.count + 1
var node = _Node(
storage: Storage.allocate(itemCapacity: 1, childCapacity: 1),
count: child.count &+ 1)
node.update {
$0.itemMap.insert(itemBucket)
$0.childMap.insert(childBucket)
Expand Down
Expand Up @@ -36,7 +36,7 @@ extension _Node {
let itemBytes = $0.itemCount * MemoryLayout<Element>.stride
let childBytes = $0.childCount * MemoryLayout<_Node>.stride
assert(itemBytes + $0.bytesFree + childBytes == $0.byteCapacity)
let actualCount = $0._children.reduce($0.itemCount, { $0 + $1.count })
let actualCount = $0.children.reduce($0.itemCount, { $0 + $1.count })
assert(actualCount == self.count)
}
}
Expand Down
8 changes: 4 additions & 4 deletions Sources/PersistentCollections/Node/_Node+Lookups.swift
Expand Up @@ -54,7 +54,7 @@ extension _Node.UnsafeHandle {
}
// Note: this searches the items in reverse insertion order.
guard let slot = reverseItems.firstIndex(where: { $0.key == key })
else { return (1, self.itemEnd, _Hash(_value: 0)) }
else { return (1, self.itemsEndSlot, _Hash(_value: 0)) }
return (0, _Slot(itemCount &- 1 &- slot), _Hash(_value: 0))
}
}
Expand Down Expand Up @@ -142,7 +142,7 @@ extension _Node.UnsafeHandle {
return .found(.invalid, r.slot)
}
if r.code == 1 {
return .notFound(.invalid, self.itemEnd)
return .notFound(.invalid, self.itemsEndSlot)
}
assert(r.code == 2)
return .expansion(r.expansionHash)
Expand Down Expand Up @@ -214,7 +214,7 @@ extension _Node {
return nil
case .descend(_, let slot):
return read { h in
let children = h._children
let children = h.children
let p = children[slot.value]
.position(forKey: key, level.descend(), hash)
guard let p = p else { return nil }
Expand All @@ -234,7 +234,7 @@ extension _Node {
return $0[item: _Slot(itemsToSkip)]
}
itemsToSkip -= itemCount
let children = $0._children
let children = $0.children
for i in children.indices {
if itemsToSkip < children[i].count {
return children[i].item(position: itemsToSkip)
Expand Down