Skip to content
This repository has been archived by the owner on May 24, 2023. It is now read-only.

Stop ignoring null nodes in crawlAsync #59

Merged
merged 1 commit into from
Mar 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
strategy:
fail-fast: false
matrix:
sdk: [beta]
sdk: [dev]
steps:
- uses: actions/checkout@v2
- uses: dart-lang/setup-dart@v0.3
Expand All @@ -47,7 +47,7 @@ jobs:
matrix:
# Add macos-latest and/or windows-latest if relevant for this package.
os: [ubuntu-latest]
sdk: [beta]
sdk: [2.12.0, dev]
steps:
- uses: actions/checkout@v2
- uses: dart-lang/setup-dart@v0.3
Expand Down
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# 1.0.0
# 2.0.0-dev

- **Breaking**: `crawlAsync` will no longer ignore a node from the graph if the
`readNode` callback returns null.

# 1.0.0

- Migrate to null safety.
- **Breaking**: Paths from `shortestPath[s]` are now returned as iterables to
Expand Down
13 changes: 4 additions & 9 deletions lib/src/crawl_async.dart
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import 'dart:async';
import 'dart:collection';

final _empty = Future<Null>.value(null);
final _empty = Future<void>.value();

/// Finds and returns every node in a graph who's nodes and edges are
/// asynchronously resolved.
Expand All @@ -22,10 +22,6 @@ final _empty = Future<Null>.value(null);
/// performed at every node in an asynchronous graph, but does not give
/// guarantees that the work is done in topological order.
///
/// If [readNode] returns null for any key it will be ignored from the rest of
/// the graph. If missing nodes are important they should be tracked within the
/// [readNode] callback.
///
/// If either [readNode] or [edges] throws the error will be forwarded
/// through the result stream and no further nodes will be crawled, though some
/// work may have already been started.
Expand Down Expand Up @@ -54,7 +50,7 @@ class _CrawlAsync<K, V> {

/// Add all nodes in the graph to [result] and return a Future which fires
/// after all nodes have been seen.
Future<Null> run() async {
Future<void> run() async {
try {
await Future.wait(roots.map(_visit), eagerError: true);
await result.close();
Expand All @@ -66,9 +62,8 @@ class _CrawlAsync<K, V> {

/// Resolve the node at [key] and output it, then start crawling all of it's
/// edges.
Future<Null> _crawlFrom(K key) async {
Future<void> _crawlFrom(K key) async {
var value = await readNode(key);
if (value == null) return;
if (result.isClosed) return;
result.add(value);
var next = await edges(key, value);
Expand All @@ -81,7 +76,7 @@ class _CrawlAsync<K, V> {
/// The returned Future will complete only after the work for [key] and all
/// transitively reachable nodes has either been finished, or will be finished
/// by some other Future in [_seen].
Future<Null> _visit(K key) {
Future<void> _visit(K key) {
if (_seen.contains(key)) return _empty;
_seen.add(key);
return _crawlFrom(key);
Expand Down
6 changes: 3 additions & 3 deletions pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
name: graphs
version: 1.0.0
version: 2.0.0-dev
description: Graph algorithms that operate on graphs in any representation
homepage: https://github.com/dart-lang/graphs
repository: https://github.com/dart-lang/graphs

environment:
sdk: '>=2.12.0-0 <3.0.0'
sdk: '>=2.12.0 <3.0.0'

dev_dependencies:
pedantic: ^1.10.0
Expand Down
4 changes: 2 additions & 2 deletions test/crawl_async_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -73,13 +73,13 @@ void main() {
expect(result, allOf(contains('a'), contains('b')));
});

test('ignores null nodes', () async {
test('allows null nodes', () async {
var result = await crawl({
'a': ['b'],
}, [
'a'
]);
expect(result, ['a']);
expect(result, ['a', null]);
});

test('surfaces exceptions for crawling edges', () {
Expand Down