Skip to content

Commit

Permalink
removing all of the pathologically commented warnings & dead code.
Browse files Browse the repository at this point in the history
  • Loading branch information
zpmorgan committed Aug 3, 2012
1 parent 8995990 commit 46bd741
Show file tree
Hide file tree
Showing 4 changed files with 2 additions and 810 deletions.
112 changes: 1 addition & 111 deletions examples/digits/digits.pl
Expand Up @@ -37,7 +37,6 @@ sub imag_theta1{
my $slice = $tmp_piddle->slice("$x:".($x+27).",$y:".($y+27));
$slice .= $t1->slice($i)->reshape(28,28);
}
#$t1 = $t1->transpose->reshape(28,28*10)->sever;
imag2d normlz $tmp_piddle;
}

Expand Down Expand Up @@ -67,10 +66,9 @@ sub y_to_vectors{
my $y = $id->range($labels->dummy(0))->transpose;
$y *= 2;
$y -= 1;
# die $y->slice("0:9,0:19");
return $y->transpose
}
for(1..200){
for(1..20){
$nerl->train_batch(
x => $images->slice("0:799"),
y => y_to_vectors $labels->slice("0:799"),
Expand All @@ -86,111 +84,3 @@ sub y_to_vectors{
)->sum
);
imag_theta1 $nerl->model->theta1;
#$foo = $nerl->model->theta1->slice(4);
#imag_neuron ($foo);# * ($foo>1));

# a second __END__ :)
__END__
my $y = identity(10)->range($labels->transpose)->sever;
$y *= 2;
$y -= 1;
say 't10k data loaded';
my $nerl = AI::Nerl->new(
# type => image,dims=>[28,28],...
scale_input => 1/256,
# train_x => $images(0:99),
# train_y => $y(0:99),
# test_x => $images(8000:8999),
# test_y => $y(8000:8999),
# cv_x => $images(9000:9999),
# cv_y => $y(9000:9999),
);
$nerl->init_network(l1 => 784, l3=>10, l2=>80,alpha=>.45);#method=batch,hidden=>12345,etc
for(1..300){
my $n = int rand(8000);
my $m = $n+999;
my $ix = $images->slice("$n:$m");
my $iy = $y->slice("$n:$m");
$nerl->network->train($ix,$iy,passes=>5);
my ($cost,$nc) = $nerl->network->cost($images(9000:9999),$y(9000:9999));
print "cost:$cost\n,num correct: $nc / 1000\n";
print "example output, images 0 to 4\n";
print "Labels: " . $y(0:4) . "\n";
print $nerl->network->run($images(0:4));
$nerl->network->show_neuron($_) for (0..4);
}
__END__
#my $label_targets = identity(10)->($labels);
my $id = identity(10);
$images = $images(10:11);
show784($images(0));
show784($images(1));
$labels = $labels(10:11);
my $out_neurons = grandom(28*28,10) * .01;
# http://ufldl.stanford.edu/wiki/index.php/Backpropagation_Algorithm
# http://www.stanford.edu/class/cs294a/sparseAutoencoder_2011new.pdf
for my $pass(1..3){
my $delta = $out_neurons * 0;
for my $i(0..$images->dim(0)-1){
my $img = $images(($i));
my $a = sigmoid($out_neurons x $img->transpose); #(t,10)
#arn $out_neurons x $img->transpose if $pass > 1;; #(t,10)
$a = $a((0));
my $label = $labels(($i));
my $d= $id(($label)) - $a;
$d = -$d * $a * (1-$a); #(t,10)
$delta += $d->transpose x $img;
if (rand() < 1.002){
warn $d;
warn $a;
warn "$label -> " . $a->maximum_ind;
say "\n"x 2;
}
if ($pass%250==0 and $i<5){
warn $a;
warn $d;
warn "$label -> " . $a->maximum_ind;
}
}
$delta /= $images->dim(0);
$delta *= .2;
$out_neurons -= $delta;
if ($pass%200==0){
warn $delta(100:104);
warn $out_neurons(100:104);
}
show784($delta(:,0));
show784($delta(:,6));
show784($delta(:,4));
}
#die join (',',$nncost->dims);
use PDL::Graphics2D;
sub show784{
my $w = shift;
$w = $w->squeeze;
my $min = $w->minimum;
$w -= $min;
my $max = $w->maximum;
$w /= $max;
$w = $w->reshape(28,28);
imag2d $w;
}
sub sigmoid{
my $foo = shift;
return 1/(1+E**-$foo);
}
sub logistic{
#find sigmoid before calling this.
#grad=logistic(sigmoid(foo))
my $foo = shift;
return $foo * (1-$foo);
}
160 changes: 0 additions & 160 deletions lib/AI/Nerl.pm
Expand Up @@ -106,164 +106,4 @@ sub train_batch{
$self->model->train_batch(@_);
}

1;
__END__
#initialize $self->network, but don't train.
# any parameters AI::Nerl::Network takes are fine here.
sub init_network{
my $self = shift;
my %nn_params = @_;
#input layer size:
unless ($nn_params{l1}){
if ($self->basis){
$nn_params{l1} = $self->basis->network->l1 + $self->basis->network->l2;
} elsif($self->train_x) {
$nn_params{l1} ||= $self->train_x->dim(1);
}
}
#output layer size:
unless ($nn_params{l3}){
if ($self->basis){
$nn_params{l3} = $self->basis->network->l3;
} elsif($self->train_x) {
$nn_params{l3} ||= $self->train_y->dim(1);
}
}
$nn_params{l2} ||= $self->l2;
$nn_params{scale_input} ||= $self->scale_input;
my $nn = AI::Nerl::Network->new(
%nn_params
);
$self->network($nn);
}
sub resize_l2{
my $self = shift;
my $new_l2 = shift;
$self->l2($new_l2);
$self->network->resize_l2($new_l2);
}
sub init{
my $self = shift;
$self->build_network();
}
sub build_network{
my $self = shift;
my $l1 = $self->inputs // $self->test_x->dim(1);
my $l3 = $self->outputs // $self->test_y->dim(1);
my $nn = AI::Nerl::Network->new(
l1 => $l1,
l2 => $self->l2,
l3 => $l3,
scale_input => $self->scale_input,
);
$self->network($nn);
}
sub append_l2{
my ($self,$x) = @_;
if($self->basis){
$x = $self->basis->append_l2($x);
}
return $self->network->append_l2($x);
}
sub run{
my ($self,$x) = @_;
$x->sever;
if($self->basis){
$x = $self->basis->append_l2($x);
}
return $self->network->run($x);
}
sub train{
my ($self,$x,$y) = @_;
$x->sever;
if($self->basis){
$x = $self->basis->append_l2($x);
}
return $self->network->train($x,$y);
}
sub cost{
my ($self,$x,$y) = @_;
unless ($x and $y){
$x = $self->test_x->copy();
$y = $self->test_y->copy();
}
$x->sever();
if($self->basis){
$x = $self->basis->append_l2($x);
}
return $self->network->cost($x,$y);
}
# A nerl occupied a directory.
# its parameters occupy a .json file.
# its piddles occupy .fits files.
# Its network(s) occupy subdirectories,
# in which network piddles occupy .fits files
# and network params occupy another .json file.
use PDL::IO::FITS;
use File::Path;
my @props = qw/l2 test_x test_y inputs outputs train_x train_y cv_x cv_y scale_input
network basis/;
sub save{
my ($self,$dir) = @_;
my $top_json = {};
#die 'ugh. i dont like that nerl dir name' if $dir =~ /data|nerls$|\.|lib/;
rmtree $dir if -d $dir;
mkdir $dir;
for my $p (@props){
next unless defined $self->$p;
$top_json->{$p} = $self->$p;
if (ref $top_json->{$p} eq 'PDL'){
my $pfile = "$p.fits";
#switcharoo with file name
$top_json->{$p}->wfits("$dir/$pfile");
$top_json->{$p} = $pfile;
}
elsif (ref $top_json->{$p} eq 'AI::Nerl::Network'){
my $nn = $self->$p;
my $nndir = "$dir/$p";
$top_json->{$p} = "|AINN|$nndir";
$nn->save($nndir);
}
}
my $encoded_nerl = to_json($top_json);
write_file("$dir/attribs", $encoded_nerl);
}
sub load{
my $dir = shift;
$dir = shift if $dir eq 'AI::Nerl';
die 'symptom ai::nerl->load(lack of dir?)' unless $dir;
my $from_json = from_json(read_file("$dir/attribs"));
my %to_nerl;
for my $a (@props){
my $value = $from_json->{$a};
next unless defined $value;
$to_nerl{$a} = $value;
#special cases: ai::nerl::networks and piddles
if ($value =~ /\.fits$/){
my $piddle = rfits("$dir/$value");
$to_nerl{$a} = $piddle;
}
elsif ($value =~ /^\|AINN\|(.*)$/){ #load a AI::N::network
my $nn = AI::Nerl::Network->load($1);
$to_nerl{$a} = $nn;
}
}
my $self = AI::Nerl->new(%to_nerl);
return $self;
}
'a neural network has your dog.';
47 changes: 1 addition & 46 deletions lib/AI/Nerl/Model/Perceptron3.pm
Expand Up @@ -79,8 +79,6 @@ sub _build_act_sub{
return sub{
my $in = shift;
return $in->tanh;
# my $exp = exp($in*2);
# return (($exp-1)/($exp+1));
};
}
sub _build_act_deriv_sub{
Expand Down Expand Up @@ -187,15 +185,10 @@ sub train{
my $z3 = $theta2->transpose x $a2;
$z3->transpose->inplace->plus($b2,0);
my $a3 = $self->_act->($z3);
#die $a3->dims; #(cats,n)
#so far so good.
#die $z3->slice("0:5,0:5");

my $d3 = -($y-$a3)*$self->_act_deriv->($z3);
# warn $self->_act_deriv->($z3)->slice("0:5,0:3");;
# warn $x->slice("10:18,10:18");;
my $d2 = ($theta2 x $d3) * $self->_act_deriv->($z2);
#warn $self->_act_deriv->($z2->sever)->slice("0:3,0:3");

my $delta2 = $a2 x $d3->transpose;
my $delta1 = $x x $d2->transpose;
Expand All @@ -214,45 +207,7 @@ sub train{
$self->b1->inplace->minus($diffb1,0);

return;
#iterate over examples :(
=pod
for my $i (0..$num_examples-1){
my $a1 = $x(($i));
my $z2 = ($self->theta1 x $a1->transpose)->squeeze;
$z2 += $self->b1; #add bias.
my $a2 = $z2->tanh;
my $z3 = ($self->theta2 x $a2->transpose)->squeeze;
$z3 += $self->b2; #add bias.
my $a3 = $z3->tanh;
# warn $y(($i)) - $a3;
my $d3= -($y(($i)) - $a3) * tanhxderivative($a3);
#warn $d3;
$delta2 += $d3->transpose x $a2;
my $d2 = ($self->theta2->transpose x $d3->transpose)->squeeze * tanhxderivative($a2);
$delta1 += $d2->transpose x $a1;
#warn $delta2(4);
$deltab1 += $d2;
$deltab2 += $d3;
if($DEBUG==1){
warn "z2: $z2\n$z3: $z3\n";
warn "d3:$d3\n";
}
}
#warn $deltab1;
if($DEBUG==1){
warn "theta1: ". $self->theta1;#/ $num_examples;
warn "theta2: ". $self->theta2;
warn "delta1: $delta1\n";
warn "delta2: $delta2\n";
}
$self->{theta2} -= $alpha * ($delta2 / $num_examples + $theta2 * $lambda);
$self->{theta1} -= $alpha * ($delta1 / $num_examples + $theta1 * $lambda);
$self->{b1} -= $alpha * $deltab1 / $num_examples;
$self->{b2} -= $alpha * $deltab2 / $num_examples;
=cut
}

1;

'$nn->train($soviet_russian);'

0 comments on commit 46bd741

Please sign in to comment.