650 changes: 426 additions & 224 deletions dustmite.d
Original file line number Diff line number Diff line change
Expand Up @@ -25,23 +25,28 @@ alias std.string.join join;

string dir, resultDir, tester, globalCache;
size_t maxBreadth;
Entity[] set;

struct Times { Duration load, testSave, resultSave, test, clean, cacheHash, misc; }
Times times;
SysTime lastTime;
Duration elapsedTime() { auto c = Clock.currTime(); auto d = c - lastTime; lastTime = c; return d; }
void measure(string what)(void delegate() p) { times.misc += elapsedTime(); p(); mixin("times."~what~" += elapsedTime();"); }
Entity root;
int tests; bool foundAnything;
bool noSave;

struct Times { StopWatch total, load, testSave, resultSave, test, clean, cacheHash, globalCache, misc; }
Times times;
static this() { times.total.start(); times.misc.start(); }
void measure(string what)(void delegate() p)
{
times.misc.stop(); mixin("times."~what~".start();");
p();
mixin("times."~what~".stop();"); times.misc.start();
}

struct Reduction
{
enum Type { None, Remove, Unwrap, ReplaceWord }
Type type;

// Remove / Unwrap
size_t[] address;
Entity target;

// ReplaceWord
string from, to;
Expand All @@ -60,17 +65,18 @@ struct Reduction
case Reduction.Type.Remove:
case Reduction.Type.Unwrap:
string[] segments = new string[address.length];
Entity[] e = set;
real progress = 0.0, progressFraction = 100.0;
Entity e = root;
size_t progress;
bool binary = maxBreadth == 2;
foreach (i, a; address)
{
segments[i] = binary ? text(a) : format("%d/%d", e.length-a, e.length);
progressFraction /= e.length;
progress += progressFraction * (e.length-a-1);
e = e[a].children;
segments[i] = binary ? text(a) : format("%d/%d", e.children.length-a, e.children.length);
foreach (c; e.children[a+1..$])
progress += c.descendants;
progress++; // account for this node
e = e.children[a];
}
return format("[%5.1f%%] %s [%s]", progress, name, segments.join(binary ? "" : ", "));
return format("[%5.1f%%] %s [%s]", progress * 100.0 / root.descendants, name, segments.join(binary ? "" : ", "));
}
}
}
Expand All @@ -79,7 +85,7 @@ auto nullReduction = Reduction(Reduction.Type.None);

int main(string[] args)
{
bool force, dump, showTimes, stripComments, obfuscate, keepLength, showHelp;
bool force, dump, showTimes, stripComments, obfuscate, keepLength, showHelp, noOptimize;
string coverageDir;
string[] noRemoveStr;

Expand All @@ -93,14 +99,15 @@ int main(string[] args)
"dump", &dump,
"times", &showTimes,
"cache", &globalCache, // for research
"nosave", &noSave, // for research
"nosave|no-save", &noSave, // for research
"no-optimize", &noOptimize, // for research
"h|help", &showHelp
);

if (showHelp || args.length == 1 || args.length>3)
{
stderr.write(
"Usage: "~args[0]~" [OPTION]... PATH TESTER
stderr.writef(q"EOS
Usage: %s [OPTION]... PATH TESTER
PATH should be a directory containing a clean copy of the file-set to reduce.
A file path can also be specified. NAME.EXT will be treated like NAME/NAME.EXT.
TESTER should be a shell command which returns 0 for a correct reduction,
Expand All @@ -114,10 +121,29 @@ Supported options:
--obfuscate Instead of reducing, obfuscates the input by replacing
words with random substitutions
--keep-length Preserve word length when obfuscating
EOS", args[0]);

if (!showHelp)
{
stderr.write(q"EOS
--help Show this message and some less interesting options
EOS");
}
else
{
stderr.write(q"EOS
--help Show this message
Less interesting options:
--dump Dump parsed tree to DIR.dump file
--times Display verbose spent time breakdown
");
return 64; // EX_USAGE
--cache DIR Use DIR as persistent disk cache
(in addition to memory cache)
--no-save Disable saving in-progress result
--no-optimize Disable tree optimization step
(may be useful with --dump)
EOS");
}
return showHelp ? 0 : 64; // EX_USAGE
}

enforce(!(stripComments && coverageDir), "Sorry, --strip-comments is not compatible with --coverage");
Expand All @@ -137,21 +163,20 @@ Supported options:
return 1;
}

auto startTime = lastTime = Clock.currTime();

ParseOptions parseOptions;
parseOptions.stripComments = stripComments;
parseOptions.mode = obfuscate ? ParseOptions.Mode.Words : ParseOptions.Mode.Source;
measure!"load"({set = loadFiles(dir, parseOptions);});
enforce(set.length, "No files in specified directory");
measure!"load"({root = loadFiles(dir, parseOptions);});
enforce(root.children.length, "No files in specified directory");

if (!obfuscate)
optimize(set);
applyNoRemoveMagic();
applyNoRemoveRegex(noRemoveStr);
if (coverageDir)
loadCoverage(coverageDir);
maxBreadth = getMaxBreadth(set);
if (!obfuscate && !noOptimize)
optimize(root);
maxBreadth = getMaxBreadth(root);
countDescendants(root);

if (dump)
dumpSet(dir ~ ".dump");
Expand All @@ -165,7 +190,7 @@ Supported options:
resultDir = dir ~ ".reduced";
enforce(!exists(resultDir), "Result directory already exists");

if (!test(Reduction(Reduction.Type.None)))
if (!test(nullReduction))
throw new Exception("Initial test fails");

foundAnything = false;
Expand All @@ -174,7 +199,7 @@ Supported options:
else
reduce();

auto duration = Clock.currTime()-startTime;
auto duration = cast(Duration)times.total.peek();
duration = dur!"msecs"(duration.total!"msecs"); // truncate anything below ms, users aren't interested in that
if (foundAnything)
{
Expand All @@ -187,50 +212,52 @@ Supported options:

if (showTimes)
foreach (i, t; times.tupleof)
writefln("%s: %s", times.tupleof[i].stringof, times.tupleof[i]);
writefln("%s: %s", times.tupleof[i].stringof, cast(Duration)times.tupleof[i].peek());

return 0;
}

size_t getMaxBreadth(Entity[] set)
size_t getMaxBreadth(Entity e)
{
size_t breadth = set.length;
foreach (ref child; set)
size_t breadth = e.children.length;
foreach (child; e.children)
{
auto childBreadth = getMaxBreadth(child.children);
auto childBreadth = getMaxBreadth(child);
if (breadth < childBreadth)
breadth = childBreadth;
}
return breadth;
}

size_t countDescendants(Entity e)
{
size_t n = 1;
foreach (c; e.children)
n += countDescendants(c);
return e.descendants = n;
}

size_t checkDescendants(Entity e)
{
size_t n = 1;
foreach (c; e.children)
n += countDescendants(c);
assert(e.descendants == n);
return n;
}

/// Try reductions at address. Edit set, save result and return true on successful reduction.
bool testAddress(size_t[] address)
{
bool reduceSet(size_t[] subAddress, ref Entity[] entities)
{
auto i = subAddress[0];
if (subAddress.length > 1)
return reduceSet(subAddress[1..$], entities[i].children);

if (test(Reduction(Reduction.Type.Remove, address)))
{
entities = remove(entities, i);
saveResult();
return true;
}
else
if (entities[i].head.length && entities[i].tail.length && test(Reduction(Reduction.Type.Unwrap, address)))
{
entities[i].head = entities[i].tail = null;
saveResult();
return true;
}
else
return false;
}
auto e = entityAt(address);

return reduceSet(address, set);
if (tryReduction(Reduction(Reduction.Type.Remove, address, e)))
return true;
else
if (e.head.length && e.tail.length && tryReduction(Reduction(Reduction.Type.Unwrap, address, e)))
return true;
else
return false;
}

void testLevel(int testDepth, out bool tested, out bool changed)
Expand All @@ -240,33 +267,33 @@ void testLevel(int testDepth, out bool tested, out bool changed)
enum MAX_DEPTH = 1024;
size_t[MAX_DEPTH] address;

void scan(ref Entity[] entities, int depth)
void scan(Entity e, int depth)
{
foreach_reverse (i; 0..entities.length)
if (depth < testDepth)
{
address[depth] = i;
if (depth < testDepth)
// recurse
foreach_reverse (i, c; e.children)
{
// recurse
scan(entities[i].children, depth+1);
}
else
if (entities[i].noRemove)
{
// skip, but don't stop going deeper
tested = true;
}
else
{
// test
tested = true;
if (testAddress(address[0..depth+1]))
changed = true;
address[depth] = i;
scan(c, depth+1);
}
}
else
if (e.noRemove)
{
// skip, but don't stop going deeper
tested = true;
}
else
{
// test
tested = true;
if (testAddress(address[0..depth]))
changed = true;
}
}

scan(set, 0);
scan(root, 0);

//writefln("Scan results: tested=%s, changed=%s", tested, changed);
}
Expand Down Expand Up @@ -352,37 +379,37 @@ void reduceInDepth()
enum MAX_DEPTH = 1024;
size_t[MAX_DEPTH] address;

void scan(ref Entity[] entities, int depth)
void scan(Entity e, int depth)
{
foreach_reverse (i; 0..entities.length)
if (e.noRemove)
{
address[depth] = i;
if (entities[i].noRemove)
{
// skip, but don't stop going deeper
}
else
// skip, but don't stop going deeper
}
else
{
// test
if (testAddress(address[0..depth]))
{
// test
if (testAddress(address[0..depth+1]))
{
changed = true;
continue;
}
changed = true;
return;
}
}

// recurse
scan(entities[i].children, depth+1);
// recurse
foreach_reverse (i, c; e.children)
{
address[depth] = i;
scan(c, depth+1);
}
}

scan(set, 0);
} while (changed); // stop when we couldn't reduce anything this iteration
scan(root, 0);
} while (changed && root.children.length); // stop when we couldn't reduce anything this iteration
}

void reduce()
{
//reduceIterative();
//reduceCareful();
//reduceLookback();
reduceInDepth();
}
Expand All @@ -392,9 +419,9 @@ void obfuscate(bool keepLength)
bool[string] wordSet;
string[] words; // preserve file order

foreach (f; set)
foreach (f; root.children)
{
foreach (ref entity; parseToWords(f.filename) ~ f.children)
foreach (entity; parseToWords(f.filename) ~ f.children)
if (entity.head.length && !isDigit(entity.head[0]))
if (entity.head !in wordSet)
{
Expand All @@ -418,8 +445,8 @@ void obfuscate(bool keepLength)
}
else
{
static int index;
index++;
static int n;
int index = n++;

string result;
result ~= first[index % $];
Expand All @@ -445,111 +472,205 @@ void obfuscate(bool keepLength)
while (r.to in wordSet && tries++ < 10);
wordSet[r.to] = true;

if (test(r))
{
foreach (ref f; set)
{
f.filename = applyReductionToPath(f.filename, r);
foreach (ref entity; f.children)
if (entity.head == r.from)
entity.head = r.to;
}
saveResult();
}
tryReduction(r);
}
}

void dump(Entity[] entities, ref Reduction reduction, void delegate(string) writer, bool fileLevel)
bool skipEntity(Entity e)
{
auto childReduction = reduction;
if (e.removed)
return true;
foreach (dependency; e.dependencies)
if (skipEntity(dependency))
return true;
return false;
}

foreach (i, ref e; entities)
void dump(Entity root, ref Reduction reduction, void delegate(string) handleFile, void delegate(string) handleText)
{
void dumpEntity(Entity e)
{
if (reduction.type == Reduction.Type.ReplaceWord)
{
if (fileLevel)
if (e.isFile)
{
if (e.filename)
{
writer(applyReductionToPath(e.filename, reduction));
dump(e.children, reduction, writer, false);
}
else
dump(e.children, reduction, writer, true);

assert(e.tail.length==0);
assert(e.head.length==0 && e.tail.length==0);
handleFile(applyReductionToPath(e.filename, reduction));
foreach (c; e.children)
dumpEntity(c);
}
else
if (e.head)
{
assert(e.children.length==0);
if (e.head == reduction.from)
writer(reduction.to);
handleText(reduction.to);
else
writer(e.head);
writer(e.tail);
handleText(e.head);
handleText(e.tail);
}
else
foreach (c; e.children)
dumpEntity(c);
}
else
if (reduction.address.length==1 && reduction.address[0]==i)
if (e is reduction.target)
{
final switch (reduction.type)
{
case Reduction.Type.None:
case Reduction.Type.ReplaceWord:
assert(0);
case Reduction.Type.Remove: // skip this entity
continue;
return;
case Reduction.Type.Unwrap: // skip head/tail
dump(e.children, nullReduction, writer, fileLevel && e.filename is null);
foreach (c; e.children)
dumpEntity(c);
break;
}
}
else
if (skipEntity(e))
return;
else
if (e.isFile)
{
if (e.head.length) writer(e.head);
childReduction.address = reduction.address.length>1 && reduction.address[0]==i ? reduction.address[1..$] : null;
dump(e.children, childReduction, writer, fileLevel && e.filename is null);
if (e.tail.length) writer(e.tail);
handleFile(e.filename);
foreach (c; e.children)
dumpEntity(c);
}
else
{
if (e.head.length) handleText(e.head);
foreach (c; e.children)
dumpEntity(c);
if (e.tail.length) handleText(e.tail);
}
}

debug verifyNotRemoved(root);
if (reduction.type == Reduction.Type.Remove)
markRemoved(reduction.target, true); // Needed for dependencies

dumpEntity(root);

if (reduction.type == Reduction.Type.Remove)
markRemoved(reduction.target, false);
debug verifyNotRemoved(root);
}

void save(Reduction reduction, string savedir)
{
enforce(!exists(savedir), "Directory already exists: " ~ savedir);
mkdirRecurse(savedir);
auto childReduction = reduction;

void saveFiles(Entity[] entities, size_t[] address)
File o;

void handleFile(string fn)
{
foreach (i, f; entities)
auto path = std.path.join(savedir, fn);
if (!exists(dirname(path)))
mkdirRecurse(dirname(path));

if (o.isOpen)
o.close();
o.open(path, "wb");
}

dump(root, reduction, &handleFile, &o.write!string);

if (o.isOpen)
o.close();
}

Entity entityAt(size_t[] address)
{
Entity e = root;
foreach (a; address)
e = e.children[a];
return e;
}

/// Try specified reduction. If it succeeds, apply it permanently and save intermediate result.
bool tryReduction(Reduction r)
{
if (test(r))
{
foundAnything = true;
debug
auto hashBefore = hash(r);
applyReduction(r);
debug
{
if (address.length==1 && address[0]==i) // skip this file / file group
auto hashAfter = hash(nullReduction);
assert(hashBefore == hashAfter, "Reduction preview/application mismatch");
}
saveResult();
return true;
}
return false;
}

void verifyNotRemoved(Entity e)
{
assert(!e.removed);
foreach (c; e.children)
verifyNotRemoved(c);
}

void markRemoved(Entity e, bool value)
{
assert(e.removed == !value);
e.removed = value;
foreach (c; e.children)
markRemoved(c, value);
}

/// Permanently apply specified reduction to set.
void applyReduction(ref Reduction r)
{
final switch (r.type)
{
case Reduction.Type.None:
return;
case Reduction.Type.ReplaceWord:
{
foreach (ref f; root.children)
{
assert(reduction.type == Reduction.Type.Remove);
continue;
f.filename = applyReductionToPath(f.filename, r);
foreach (ref entity; f.children)
if (entity.head == r.from)
entity.head = r.to;
}
return;
}
case Reduction.Type.Remove:
{
debug verifyNotRemoved(root);

auto nextAddress = address.length>1 && address[0]==i ? address[1..$] : null;
markRemoved(entityAt(r.address), true);

if (f.filename)
if (r.address.length)
{
auto casualties = entityAt(r.address).descendants;
foreach (n; 0..r.address.length)
entityAt(r.address[0..n]).descendants -= casualties;

auto path = std.path.join(savedir, applyReductionToPath(f.filename, reduction));
if (!exists(dirname(path)))
mkdirRecurse(dirname(path));

auto o = File(path, "wb");
childReduction.address = nextAddress;
dump(f.children, childReduction, &o.write!string, false);
o.close();
auto p = entityAt(r.address[0..$-1]);
p.children = remove(p.children, r.address[$-1]);
}
else
saveFiles(f.children, nextAddress);
root = new Entity();

debug verifyNotRemoved(root);
debug checkDescendants(root);
return;
}
case Reduction.Type.Unwrap:
with (entityAt(r.address))
head = tail = null;
return;
}

saveFiles(set, reduction.address);
}

string applyReductionToPath(string path, Reduction reduction)
Expand All @@ -558,7 +679,7 @@ string applyReductionToPath(string path, Reduction reduction)
{
Entity[] words = parseToWords(path);
string result;
foreach (i, ref word; words)
foreach (i, word; words)
{
if (i > 0 && i == words.length-1 && words[i-1].tail.endsWith("."))
result ~= word.head; // skip extension
Expand Down Expand Up @@ -595,7 +716,7 @@ void safeSave(string savedir)
{
auto tempdir = savedir ~ ".inprogress"; scope(failure) safeRmdirRecurse(tempdir);
if (exists(tempdir)) safeRmdirRecurse(tempdir);
save(Reduction(Reduction.Type.None), tempdir);
save(nullReduction, tempdir);
if (exists(savedir)) safeRmdirRecurse(savedir);
rename(tempdir, savedir);
}
Expand All @@ -620,7 +741,8 @@ version(HAVE_AE)
{
static StringBuffer sb;
sb.clear();
dump(set, reduction, &sb.put!string, true);
auto writer = &sb.put!string;
dump(root, reduction, writer, writer);
return murmurHash3_128(sb.get());
}

Expand All @@ -637,7 +759,8 @@ else
ubyte[16] digest;
MD5_CTX context;
context.start();
dump(set, reduction, cast(void delegate(string))&context.update, true);
auto writer = cast(void delegate(string))&context.update;
dump(root, reduction, writer, writer);
context.finish(digest);
return digest;
}
Expand Down Expand Up @@ -674,18 +797,22 @@ bool test(Reduction reduction)
if (globalCache)
{
string cacheBase = absolutePath(buildPath(globalCache, formatHash(digest))) ~ "-";
if (exists(cacheBase~"0"))
bool found;

measure!"globalCache"({ found = exists(cacheBase~"0"); });
if (found)
{
writeln("No (disk cache)");
return false;
}
if (exists(cacheBase~"1"))
measure!"globalCache"({ found = exists(cacheBase~"1"); });
if (found)
{
writeln("Yes (disk cache)");
return true;
}
auto result = fallback;
std.file.write(cacheBase ~ (result ? "1" : "0"), "");
measure!"globalCache"({ std.file.write(cacheBase ~ (result ? "1" : "0"), ""); });
return result;
}
else
Expand All @@ -706,10 +833,7 @@ bool test(Reduction reduction)
return result;
}

auto result = ramCached(diskCached(doTest()));
if (result)
foundAnything = true;
return result;
return ramCached(diskCached(doTest()));
}

void applyNoRemoveMagic()
Expand All @@ -730,66 +854,105 @@ void applyNoRemoveMagic()
return state;
}

bool scan(Entity[] set)
bool scan(Entity e)
{
bool result = false;
foreach (ref e; set)
{
bool removeThis;
removeThis = scanString(e.head);
removeThis |= scan(e.children);
removeThis |= scanString(e.tail);
e.noRemove |= removeThis;
result |= removeThis;
}
return result;
bool removeThis;
removeThis = scanString(e.head);
foreach (c; e.children)
removeThis |= scan(c);
removeThis |= scanString(e.tail);
e.noRemove |= removeThis;
return removeThis;
}

scan(set);
scan(root);
}

void applyNoRemoveRegex(string[] noRemoveStr)
{
auto noRemove = map!regex(noRemoveStr);
auto noRemove = array(map!((s) => regex(s, "mg"))(noRemoveStr));

void mark(Entity[] set)
void mark(Entity e)
{
foreach (ref e; set)
{
e.noRemove = true;
mark(e.children);
}
e.noRemove = true;
foreach (c; e.children)
mark(c);
}

bool scan(Entity[] set)
foreach (f; root.children)
{
bool found = false;
foreach (ref e; set)
if (canFind!((a){return !match(e.head, a).empty || !match(e.tail, a).empty;})(noRemove))
assert(f.isFile);
if (canFind!((a){return !match(f.filename, a).empty;})(noRemove))
{
mark(f);
root.noRemove = true;
continue;
}

immutable(char)*[] starts, ends;

foreach (r; noRemove)
foreach (c; match(f.contents, r))
{
e.noRemove = true;
mark(e.children);
found = true;
assert(c.hit.ptr >= f.contents.ptr && c.hit.ptr < f.contents.ptr+f.contents.length);
starts ~= c.hit.ptr;
ends ~= c.hit.ptr + c.hit.length;
}
else
if (scan(e.children))

starts.sort;
ends.sort;

int noRemoveLevel = 0;

bool scanString(string s)
{
if (!s.length)
return noRemoveLevel > 0;

auto start = s.ptr;
auto end = start + s.length;
assert(start >= f.contents.ptr && end <= f.contents.ptr+f.contents.length);

while (starts.length && starts[0] < end)
{
e.noRemove = true;
found = true;
noRemoveLevel++;
starts = starts[1..$];
}
return found;
}
bool result = noRemoveLevel > 0;
while (ends.length && ends[0] <= end)
{
noRemoveLevel--;
ends = ends[1..$];
}
return result;
}

scan(set);
bool scan(Entity e)
{
bool result = false;
if (scanString(e.head))
result = true;
foreach (c; e.children)
if (scan(c))
result = true;
if (scanString(e.tail))
result = true;
if (result)
e.noRemove = root.noRemove = true;
return result;
}

scan(f);
}
}

void loadCoverage(string dir)
{
foreach (ref f; set)
void scanFile(Entity f)
{
auto fn = std.path.join(dir, addExt(basename(f.filename), "lst"));
if (!exists(fn))
continue;
return;
writeln("Loading coverage file ", fn);

static bool covered(string line)
Expand Down Expand Up @@ -829,46 +992,85 @@ void loadCoverage(string dir)
foreach (ref c; f.children)
f.noRemove |= cover(c);
}

void scanFiles(Entity e)
{
if (e.isFile)
scanFile(e);
else
foreach (c; e.children)
scanFiles(c);
}

scanFiles(root);
}

void dumpSet(string fn)
{
auto f = File(fn, "wt");

string printable(string s, bool isFile)
string printable(string s) { return s is null ? "null" : `"` ~ s.replace("\\", `\\`).replace("\"", `\"`).replace("\r", `\r`).replace("\n", `\n`) ~ `"`; }
string printableFN(string s) { return "/*** " ~ s ~ " ***/"; }

int counter;
void assignID(Entity e)
{
if (isFile)
return "/*** " ~ s ~ " ***/";
else
return s is null ? "null" : `"` ~ s.replace("\\", `\\`).replace("\"", `\"`).replace("\r", `\r`).replace("\n", `\n`) ~ `"`;
e.id = counter++;
foreach (c; e.children)
assignID(c);
}
assignID(root);

bool[int] dependents;
void scanDependents(Entity e)
{
foreach (d; e.dependencies)
dependents[d.id] = true;
foreach (c; e.children)
scanDependents(c);
}
scanDependents(root);

void print(Entity[] entities, int depth, bool fileLevel)
void print(Entity e, int depth)
{
auto prefix = replicate(" ", depth);
foreach (e; entities)
{
bool isFile = fileLevel && e.filename;
bool inFiles = fileLevel && !e.filename;

// if (!fileLevel) { f.writeln(prefix, "[ ... ]"); continue; }
// if (!fileLevel) { f.writeln(prefix, "[ ... ]"); continue; }

if (e.children.length == 0)
{
f.writeln(prefix, "[", e.noRemove ? "!" : "", " ", e.head ? printable(e.head, isFile) ~ " " : null, e.tail ? printable(e.tail, isFile) ~ " " : null, "]");
}
else
{
f.writeln(prefix, "[", e.noRemove ? "!" : "", e.isPair ? " // Pair" : null);
if (e.head) f.writeln(prefix, " ", printable(e.head, isFile));
print(e.children, depth+1, inFiles);
if (e.tail) f.writeln(prefix, " ", printable(e.tail, isFile));
f.writeln(prefix, "]");
}
f.write(prefix);
if (e.id in dependents)
f.write(e.id, " ");
if (e.dependencies.length)
{
f.write(" => ");
foreach (d; e.dependencies)
f.write(d.id, " ");
}

if (e.children.length == 0)
{
f.writeln("[", e.noRemove ? "!" : "", " ", e.isFile ? e.filename ? printableFN(e.filename) ~ " " : null : e.head ? printable(e.head) ~ " " : null, e.tail ? printable(e.tail) ~ " " : null, "]");
}
else
{
f.writeln("[", e.noRemove ? "!" : "", e.isPair ? " // Pair" : null);
if (e.isFile) f.writeln(prefix, " ", printableFN(e.filename));
if (e.head) f.writeln(prefix, " ", printable(e.head));
foreach (c; e.children)
print(c, depth+1);
if (e.tail) f.writeln(prefix, " ", printable(e.tail));
f.writeln(prefix, "]");
}
}

print(set, 0, true);
print(root, 0);

f.close();
}

void dumpText(string fn, ref Reduction r = nullReduction)
{
auto f = File(fn, "wt");
dump(root, r, (string) {}, &f.write!string);
f.close();
}