Skip to content

Commit

Permalink
[macros] JSON benchmarks, JsonBuffer experiment. (#3883)
Browse files Browse the repository at this point in the history
  • Loading branch information
davidmorgan committed Jun 6, 2024
1 parent dd11c4e commit 341764a
Show file tree
Hide file tree
Showing 11 changed files with 662 additions and 3 deletions.
6 changes: 5 additions & 1 deletion working/macros/dart_model/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ as a library;\
The "macros" referred to in this exploration are independent of the in-progress
macro implementation, hence the "scare quotes".

## Benchmarks
## End to End Benchmarks

`testing/benchmark` is a tool to assist in benchmarking, it creates codebases
of the specified size and codegen strategy.
Expand Down Expand Up @@ -65,3 +65,7 @@ $ dart bin/main.dart
# files to see how the analyzer responds; you can watch the macro host terminal
# to see when it is rewriting augmentation files.
```

## Serialization Benchmarks

`testing/json_benchmark` is benchmarking related to JSON serialization.
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import 'package:json_benchmark/json_benchmark.dart';

Future<void> main() async => JsonBenchmark().run();
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import 'dart:io';

import 'json_buffer_subject.dart';
import 'json_subject.dart';

class JsonBenchmark {
Future<void> run() async {
final jsonSubject = JsonSubject();

print('Subject,Scenario,Data size/bytes,Time per/ms');
for (final subject in [
JsonBufferSubject(),
JsonBufferSubject(),
JsonBufferSubject(),
JsonBufferSubject(),
JsonBufferSubject(), /*JsonSubject()*/
]) {
for (final size in [256]) {
final neutralData = jsonSubject.createData(libraryCount: size);
final subjectData = subject.deepCopyIn(neutralData);
final byteData = subject.serialize(subjectData);
final byteLength = byteData.length;

await benchmark(subject.name, 'create', byteLength,
() => subject.createData(libraryCount: size));
await benchmark(subject.name, 'deepCopyIn', byteLength,
() => subject.deepCopyIn(neutralData));
await benchmark(subject.name, 'serialize', byteLength,
() => subject.serialize(subjectData));
await benchmark(subject.name, 'writeSync', byteLength,
() => File('/tmp/benchmark').writeAsBytesSync(byteData));
await benchmark(
subject.name,
'copySerializeWrite',
byteLength,
() => File('/tmp/benchmark').writeAsBytesSync(
subject.serialize(subject.deepCopyIn(neutralData))));
await benchmark(
subject.name,
'createSerializeWrite',
byteLength,
() => File('/tmp/benchmark').writeAsBytesSync(
subject.serialize(subject.createData(libraryCount: size))));

await benchmark(
subject.name, 'process', byteLength, () => process(subjectData));
await benchmark(subject.name, 'deepCopyOut', byteLength,
() => subject.deepCopyOut(subjectData));
await benchmark(subject.name, 'readSync', byteLength,
() => File('/tmp/benchmark').readAsBytesSync());
await benchmark(subject.name, 'deserialize', byteLength,
() => subject.deserialize(byteData));
await benchmark(
subject.name,
'readDeserializeCopy',
byteLength,
() => subject.deepCopyOut(
subject.deserialize(File('/tmp/benchmark').readAsBytesSync())));
await benchmark(
subject.name,
'readDeserializeProcess',
byteLength,
() => process(
subject.deserialize(File('/tmp/benchmark').readAsBytesSync())));
}
}
}

int process(Map<String, Object?> data) {
var result = 0;
for (final entry in data.entries) {
final key = entry.key;
result ^= key.hashCode;
var value = entry.value;
if (value is Map<String, Object?>) {
result ^= process(value);
} else {
result ^= value.hashCode;
}
}
return result;
}

Future<void> benchmark(String subjectName, String scenarioName, int length,
Function subject) async {
final repetitions = 100;
for (var i = 0; i != repetitions; ++i) {
subject();
}
final stopwatch = Stopwatch()..start();
for (var i = 0; i != repetitions; ++i) {
subject();
}
final elapsed = stopwatch.elapsedMilliseconds;
print('$subjectName,$scenarioName,$length,${elapsed / repetitions}');
}
}
Loading

0 comments on commit 341764a

Please sign in to comment.