diff --git a/experiments/omniglot/model.go b/experiments/omniglot/model.go index acf65b5..12d5bfd 100644 --- a/experiments/omniglot/model.go +++ b/experiments/omniglot/model.go @@ -94,13 +94,13 @@ func normInputLayer(c anyvec.Creator, numOut, numPixels int) anyrnn.Block { Scalers: anydiff.NewVar(c.MakeVector(numPixels + numOut)), Biases: anydiff.NewVar(c.MakeVector(numPixels + numOut)), } - affine.Scalers.Vector.AddScaler(c.MakeNumeric(4)) + affine.Scalers.Vector.AddScalar(c.MakeNumeric(4)) modified := affine.Scalers.Vector.Slice(numPixels, numPixels+numOut) modified.Scale(c.MakeNumeric(4)) modified = affine.Biases.Vector.Slice(0, numPixels) - modified.AddScaler(c.MakeNumeric(-4 * 0.92)) + modified.AddScalar(c.MakeNumeric(-4 * 0.92)) return &anyrnn.LayerBlock{Layer: affine} } diff --git a/net_test.go b/net_test.go index f52d838..0a51d2b 100644 --- a/net_test.go +++ b/net_test.go @@ -43,7 +43,7 @@ func TestNetTrain(t *testing.T) { target := anydiff.NewVar(targetVec) stepSize := c.MakeVector(1) - stepSize.AddScaler(c.MakeNumeric(0.1)) + stepSize.AddScalar(c.MakeNumeric(0.1)) trained := virtualNet.Train(input, target, anydiff.NewConst(stepSize), 4, 2) actual := trained.Parameters.Outputs() @@ -136,7 +136,7 @@ func BenchmarkNetwork(b *testing.B) { anyvec.Rand(target.Vector, anyvec.Normal, nil) stepSize := anydiff.NewConst(c.MakeVector(1)) - stepSize.Vector.AddScaler(float32(0.1)) + stepSize.Vector.AddScalar(float32(0.1)) b.Run("Forward", func(b *testing.B) { for i := 0; i < b.N; i++ {