Skip to content
Permalink
Browse files

chore: fix some documentation to make users (and linters) happy

  • Loading branch information...
owulveryck committed Sep 2, 2019
1 parent f563602 commit 781a21e00664bc15a098bc9a67da477a2c66ea9d
Showing with 59 additions and 26 deletions.
  1. +2 −1 device.go
  2. +12 −4 errors.go
  3. +6 −6 graph.go
  4. +1 −0 interfaces.go
  5. +4 −0 nn.go
  6. +1 −0 op.go
  7. +1 −1 operations.go
  8. +1 −1 release.go
  9. +4 −1 solvers.go
  10. +15 −7 type.go
  11. +0 −4 utils.go
  12. +1 −0 vm.go
  13. +8 −0 walker.go
  14. +3 −1 weights.go
@@ -8,7 +8,8 @@ import "gorgonia.org/tensor"
type Device int

const (
CPU Device = 0 // CPU the only device the graph will be executed on
// CPU the only device the graph will be executed on
CPU Device = 0
)

// String implements fmt.Stringer and runtime.Stringer
@@ -88,8 +88,16 @@ type SymDiffError struct {
err error
}

func (err SymDiffError) Error() string { return err.err.Error() }
func (err SymDiffError) Nodes() Nodes { return err.nodes }
func (err SymDiffError) Node() *Node { return err.single }
func (err SymDiffError) Error() string { return err.err.Error() }

// Nodes returns the nodes involved in the error
func (err SymDiffError) Nodes() Nodes { return err.nodes }

// Node returns a specific node involved in the error
func (err SymDiffError) Node() *Node { return err.single }

// Grads returns the grads involved in the error
func (err SymDiffError) Grads() map[*Node]Nodes { return err.gradMap }
func (err SymDiffError) Grad() *Node { return err.grad }

// Grad returns a specific grad involved in the error
func (err SymDiffError) Grad() *Node { return err.grad }
@@ -15,7 +15,7 @@ type ExprGraph struct {

all Nodes

byId map[int64]int
byID map[int64]int
byHash map[uint32]*Node
evac map[uint32]Nodes
to map[*Node]Nodes
@@ -40,7 +40,7 @@ func WithGraphName(name string) graphconopt {
// NewGraph creates a new graph. Duh
func NewGraph(opts ...graphconopt) *ExprGraph {
g := &ExprGraph{
byId: make(map[int64]int),
byID: make(map[int64]int),
byHash: make(map[uint32]*Node),
evac: make(map[uint32]Nodes),
to: make(map[*Node]Nodes),
@@ -90,7 +90,7 @@ func (g *ExprGraph) Clone() interface{} {
}
}

g2.byId = make(map[int64]int)
g2.byID = make(map[int64]int)
g2.byHash = make(map[uint32]*Node)
for k, v := range g.byHash {
g2.byHash[k] = mapping[v]
@@ -510,15 +510,15 @@ func (g *ExprGraph) Node(id int64) graph.Node {
}

func (g *ExprGraph) node(id int64) *Node {
if idx, ok := g.byId[id]; ok {
if idx, ok := g.byID[id]; ok {
if idx >= len(g.all) {
return nil
}
return g.all[idx]
}
for i, n := range g.all {
if n.id == id {
g.byId[id] = i
g.byID[id] = i
return n
}
}
@@ -673,7 +673,7 @@ func (g *ExprGraph) subgraph(ns Nodes, findMissing bool, opts ...Nodes) *ExprGra

retVal := &ExprGraph{
all: ns,
byId: make(map[int64]int),
byID: make(map[int64]int),
byHash: g.byHash,
evac: g.evac,
to: g.to,
@@ -10,6 +10,7 @@ import (
"gorgonia.org/tensor"
)

// Tensor is an interface that describes an ndarray
type Tensor interface {
// info about the ndarrayN
Shape() tensor.Shape
4 nn.go
@@ -356,10 +356,14 @@ func MaxPool2D(x *Node, kernel tensor.Shape, pad, stride []int) (*Node, error) {
return retVal, err
}

// MaxPool1D applies a maxpool on the node x.
func MaxPool1D(x *Node, kernel, pad, stride int) (*Node, error) {
return MaxPool2D(x, tensor.Shape{1, kernel}, []int{0, pad}, []int{1, stride})
}

// BatchNorm applies a batchnormalization. This operator can be used in forward pass or for training.
// In an evaluation only, the "op" output can be discared.
// In training phase, γ, β can be discarded and the op should be used.
func BatchNorm(x, scale, bias *Node, momentum, epsilon float64) (retVal, γ, β *Node, op *BatchNormOp, err error) {
dt, err := dtypeOf(x.Type())
if err != nil {
1 op.go
@@ -154,6 +154,7 @@ type CLDoer interface {
CLDo(inputs ...Value) (Value, error)
}

// A CUDAADOp operation have a specific method to run with CUDA
type CUDAADOp interface {
ADOp
CUDADoDiff(extern External, dev Device, inputs Nodes, output *Node) error
@@ -567,7 +567,7 @@ func Tensordot(aAxes []int, bAxes []int, a, b *Node) (retVal *Node, err error) {

// Check if input tensors actually have dim >= 1
if (len(a.Shape()) < 1) || (len(b.Shape()) < 1) || (a.Dims() < 1) || (b.Dims() < 1) {
return nil, errors.New("Input Node's shape should have length at least 1!")
return nil, errors.New("Input Node's shape should have length at least 1")
}

// Check if number of specified axes for a and b matches
@@ -34,7 +34,7 @@ func cudaLogf(format string, attrs ...interface{}) {}
func allocatorLogf(format string, attr ...interface{}) {}
func recoverFrom(format string, attrs ...interface{}) {}

/* Graph Collision related debugging code */
// GraphCollisionStats returns the collisions in the graph only when built with the debug tag, otherwise it's a noop that returns 0
func GraphCollisionStats() (int, int, int) { return 0, 0, 0 }

func incrCC() {}
@@ -203,6 +203,7 @@ func WithRho(rho float64) SolverOpt {
return f
}

// WithMomentum sets the momentum of the solver. It is a no-op is the solver's type is not Momentum
func WithMomentum(momentum float64) SolverOpt {
f := func(s Solver) {
switch st := s.(type) {
@@ -1350,7 +1351,7 @@ func (s *AdaGradSolver) Step(model []ValueGrad) (err error) {
return
}

// Barzilai-Borwein performs Gradient Descent in steepest descend direction
// BarzilaiBorweinSolver / Barzilai-Borwein performs Gradient Descent in steepest descend direction
// Solves 0 = F(x), by
// x_{i+1} = x_i - eta * Grad(F)(x_i)
// Where the learn rate eta is calculated by the Barzilai-Borwein method:
@@ -1365,6 +1366,8 @@ type BarzilaiBorweinSolver struct {
prevDV []*dualValue // dual value for x_{i-1} step
}

// NewBarzilaiBorweinSolver creates a new Barzilai-Borwein solver withs some default values:
// the learn rate is set to 0.001 and the solver does not use clipping.
func NewBarzilaiBorweinSolver(opts ...SolverOpt) *BarzilaiBorweinSolver {
s := &BarzilaiBorweinSolver{
eta: 0.001,
22 type.go
@@ -10,15 +10,23 @@ import (
var (
// Represents the types that Nodes can take in Gorgonia

// Float64 ...
Float64 = tensor.Float64
// Float32 ...
Float32 = tensor.Float32
Int = tensor.Int
Int64 = tensor.Int64
Int32 = tensor.Int32
Byte = tensor.Uint8
Bool = tensor.Bool

Ptr = tensor.UnsafePointer // equivalent to interface{}. Ugh Ugh Ugh
// Int ...
Int = tensor.Int
// Int64 ...
Int64 = tensor.Int64
// Int32 ...
Int32 = tensor.Int32
// Byte ...
Byte = tensor.Uint8
// Bool ...
Bool = tensor.Bool

// Ptr is equivalent to interface{}. Ugh Ugh Ugh
Ptr = tensor.UnsafePointer

vecF64 = &TensorType{Dims: 1, Of: tensor.Float64}
vecF32 = &TensorType{Dims: 1, Of: tensor.Float32}
@@ -139,10 +139,8 @@ func hasInf(v Value, dev Device) bool {
switch vt := v.(type) {
case *F64:
return false
return math.IsInf(float64(*vt), 0)
case *F32:
return false
return math32.IsInf(float32(*vt), 0)
case tensor.Tensor:
if e, ok := vt.Engine().(tensor.InfChecker); ok {
ok, _ := e.HasInf(vt) // BUG: errors not checked
@@ -182,10 +180,8 @@ func hasNaN(v Value, dev Device) bool {
switch vt := v.(type) {
case *F64:
return false
return math.IsNaN(float64(*vt))
case *F32:
return false
return math32.IsNaN(float32(*vt))
case tensor.Tensor:
if e, ok := vt.Engine().(tensor.NaNChecker); ok {
ok, _ := e.HasNaN(vt) // BUG: errors not checked
1 vm.go
@@ -284,6 +284,7 @@ func WithManualGradient() VMOpt {
return f
}

// WithEngine sets the tensor engine for computation inside the VM.
func WithEngine(e tensor.Engine) VMOpt {
f := func(m VM) {
switch v := m.(type) {
@@ -38,6 +38,9 @@ func walkGraph(start *Node, ch chan *Node, walked NodeSet) {
}

// Sort topologically sorts a ExprGraph: root of graph will be first
// nodes are sorted using gonum's SortStabilized function.
//
// see https://godoc.org/gonum.org/v1/gonum/graph/topo#SortStabilized for more info
func Sort(g *ExprGraph) (sorted Nodes, err error) {
var sortedNodes []graph.Node
// if sortedNodes, err = topo.Sort(g); err != nil {
@@ -49,6 +52,11 @@ func Sort(g *ExprGraph) (sorted Nodes, err error) {
return
}

// UnstableSort performs a topological sort of the directed graph g returning the 'from' to 'to'
// sort order. If a topological ordering is not possible, an Unorderable error is returned
// listing cyclic components in g with each cyclic component's members sorted by ID. When
// an Unorderable error is returned, each cyclic component's topological position within
// the sorted nodes is marked with a nil graph.Node.
func UnstableSort(g *ExprGraph) (sorted Nodes, err error) {
var sortedNodes []graph.Node
if sortedNodes, err = topo.Sort(g); err != nil {
@@ -4,7 +4,7 @@ import (
"math"
"time"

"github.com/leesper/go_rng"
rng "github.com/leesper/go_rng"
"github.com/pkg/errors"
"gorgonia.org/tensor"
)
@@ -37,6 +37,7 @@ func Zeroes() InitWFn {
return f
}

// Ones creates an InitWfn that populates a Value with ones. See Zeroes() for more explanation.
func Ones() InitWFn {
f := func(dt tensor.Dtype, s ...int) interface{} {
size := tensor.Shape(s).TotalSize()
@@ -76,6 +77,7 @@ func RangedFrom(start int) InitWFn {
return f
}

// ValuesOf creates an InitWrn that populates a value with val. This function will cause a panic if val's type is incompatible with the values type.
func ValuesOf(val interface{}) InitWFn {
f := func(dt tensor.Dtype, s ...int) interface{} {
size := tensor.Shape(s).TotalSize()

0 comments on commit 781a21e

Please sign in to comment.
You can’t perform that action at this time.