Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Fetching contributors…

Cannot retrieve contributors at this time

72 lines (63 sloc) 1.636 kB
use 5.006;
use strict;
use warnings;
use Test::More 0.92;
use File::Temp;
use Test::Deep qw/cmp_deeply/;
use File::pushd qw/pushd/;
use lib 't/lib';
use PCNTest;
use Path::Iterator::Rule;
#--------------------------------------------------------------------------#
my @tree = qw(
aaaa.txt
bbbb.txt
cccc/dddd.txt
cccc/eeee/ffff.txt
gggg.txt
hhhh/iiii/jjjj/kkkk/llll/mmmm.txt
);
my $td = make_tree(@tree);
{
my @files;
my $rule = Path::Iterator::Rule->new->file->min_depth(3);
my $expected = [
qw(
cccc/eeee/ffff.txt
hhhh/iiii/jjjj/kkkk/llll/mmmm.txt
)
];
@files = map { unixify( $_, $td ) } $rule->all($td);
cmp_deeply( \@files, $expected, "min_depth(3) test" )
or diag explain { got => \@files, expected => $expected };
}
{
my @files;
my $rule = Path::Iterator::Rule->new->max_depth(2)->file;
my $expected = [
qw(
aaaa.txt
bbbb.txt
gggg.txt
cccc/dddd.txt
)
];
@files = map { unixify( $_, $td ) } $rule->all($td);
cmp_deeply( \@files, $expected, "max_depth(2) test" )
or diag explain { got => \@files, expected => $expected };
}
{
my @files;
my $rule = Path::Iterator::Rule->new->file->min_depth(2)->max_depth(3);
my $expected = [
qw(
cccc/dddd.txt
cccc/eeee/ffff.txt
)
];
@files = map { unixify( $_, $td ) } $rule->all($td);
cmp_deeply( \@files, $expected, "min_depth(2)->max_depth(3) test" )
or diag explain { got => \@files, expected => $expected };
}
done_testing;
# COPYRIGHT
Jump to Line
Something went wrong with that request. Please try again.