Skip to content
This repository has been archived by the owner on Dec 2, 2023. It is now read-only.

Commit

Permalink
Google/staging (#25)
Browse files Browse the repository at this point in the history
* Enable email notifications.

PiperOrigin-RevId: 175008054

* Use the appropriate shape function based on type. This may add some overhead at small array sizes.

PiperOrigin-RevId: 175023016

* Add a friendlier message to error for functions without source code. Fixes #10

PiperOrigin-RevId: 175024365
  • Loading branch information
Dan Moldovan committed Nov 8, 2017
1 parent 0ef0225 commit 6b4a4f5
Show file tree
Hide file tree
Showing 7 changed files with 71 additions and 11 deletions.
4 changes: 4 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,7 @@ script:
- py.test --cov=coverage --short tests
after_success:
- coveralls
notifications:
email:
on_success: change
on_failure: change
3 changes: 2 additions & 1 deletion tangent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,16 @@
from tangent.utils import balanced_eq
from tangent.utils import copy
from tangent.utils import grad_dot
from tangent.utils import insert_grad_of
from tangent.utils import init_grad
from tangent.utils import insert_grad_of
from tangent.utils import pop
from tangent.utils import pop_stack
from tangent.utils import push
from tangent.utils import push_stack
from tangent.utils import Stack
from tangent.utils import unbroadcast
from tangent.utils import unreduce
from tangent.utils import unreduce_like

# Imported last to avoid circular imports
from tangent.grad_util import grad
Expand Down
10 changes: 5 additions & 5 deletions tangent/grads.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,23 +304,23 @@ def maximum(ans, x, y):


@adjoint(tangent.unreduce)
def unreduce(y, x, shape, axis, keepdims):
def aunreduce(y, x, shape, axis, keepdims):
d[x] = tangent.unbroadcast(d[y], x)


@adjoint(tangent.unbroadcast)
def unbroadcast(y, array, shape):
d[array] = tangent.unreduce(d[y], numpy.shape(array), None, False)
def aunbroadcast(y, x, shape):
d[x] = tangent.unreduce_like(d[y], x, None, False)


@adjoint(tangent.add_grad)
def add_grad(z, left, right):
def aadd_grad(z, left, right):
d[left] = tangent.unbroadcast(d[z], left)
d[right] = tangent.unbroadcast(d[z], right)


@adjoint(tangent.astype)
def astype(z, array, y):
def aastype(z, array, y):
d[array] = tangent.astype(d[z], array)


Expand Down
9 changes: 8 additions & 1 deletion tangent/quoting.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,14 @@ def to_source(node, indentation=' ' * 4):

def parse_function(fn):
"""Get the source of a function and return its AST."""
return parse_string(inspect.getsource(fn))
try:
return parse_string(inspect.getsource(fn))
except (IOError, OSError) as e:
raise ValueError(
'Cannot differentiate function: %s. Tangent must be able to access the '
'source code of the function. Functions defined in a Python '
'interpreter and functions backed by C extension modules do not '
'have accessible source code.' % e)


def parse_string(src):
Expand Down
9 changes: 7 additions & 2 deletions tangent/tf_extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,15 @@

import numpy as np
from tangent import grads
from tangent import non_differentiable
from tangent import tangents
from tangent import utils
from tangent.grads import adjoint
from tangent.grads import DEFAULT
from tangent import non_differentiable
from tangent.tangents import tangent_
from tangent.utils import register_all_add_grad
from tangent.utils import register_init_grad
from tangent.utils import register_shape_function
from tangent.utils import register_unbroadcast
from tangent.utils import register_unreduce
import tensorflow as tf
Expand All @@ -43,6 +44,9 @@ def dtype(t):
def shape_as_list(t):
return t.shape.as_list()

register_shape_function(ops.EagerTensor, shape_as_list)
register_shape_function(resource_variable_ops.ResourceVariable, shape_as_list)


non_differentiable.register_non_differentiable_functions(
tf.shape, tf.to_float, tf.equal, tf.constant,
Expand Down Expand Up @@ -210,6 +214,7 @@ def dtfreshape(y, x, shape):

@adjoint(tf.reduce_sum)
def dtfreduce_sum(y, x, axis=DEFAULT, keep_dims=DEFAULT):
# TODO: We may be able to assume unreduce_tensor works throughout.
d[x] = tangent.unreduce(d[y], tangent.shape_as_list(x), axis, keep_dims)


Expand All @@ -226,7 +231,7 @@ def dtfreduce_max(y, x, axis=DEFAULT, keep_dims=DEFAULT):
tf.equal(
tangent.unreduce(y, tangent.shape_as_list(x), axis, keep_dims), x))
d[x] = tf.multiply(
tangent.unreduce(d[y], tangent.shape_as_list(x), axis, keep_dims), mask),
tangent.unreduce(d[y], tangent.shape_as_list(x), axis, keep_dims), mask)


@adjoint(tf.add)
Expand Down
45 changes: 45 additions & 0 deletions tangent/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,24 @@ def unreduce(array, shape, axis, keepdims):
return unreducer(array, shape, axis, keepdims)


def unreduce_like(array, original_array, axis, keepdims):
"""Reverse summing over a dimension.
Args:
array: The array that was reduced.
original_array: An array whose shape to unreduce to.
axis: The axis or axes that were summed.
keepdims: Whether these axes were kept as singleton axes.
Returns:
An array with axes broadcast to match the shape of the original array.
"""
atype = type(array)
unreducer = unreducers[atype]
shape = shape_functions[atype]
return unreducer(array, shape(original_array), axis, keepdims)


def unreduce_array(array, shape, axis, keepdims):
"""Reverse summing over a dimension, NumPy implementation.
Expand All @@ -185,6 +203,33 @@ def unreduce_array(array, shape, axis, keepdims):
array = numpy.expand_dims(array, ax)
return numpy.broadcast_to(array, shape)


# The values are unary functions.
shape_functions = {
numpy.ndarray: numpy.shape,
numpy.float32: numpy.shape,
numpy.float64: numpy.shape,
float: lambda _: (),
int: lambda _: (),
bool: lambda _: (),
}


def register_shape_function(t, shape_function):
"""Register a new shape function.
Shape functions extract the shape of an array-like object.
Args:
t: A Python type object. The data type supported by the
unreducer.
shape_function: A unary function that returns a list or tuple with zero
or more integers representing the dimensions of `t`.
"""
assert t not in shape_functions
shape_functions[t] = shape_function


# The values are functions with signature like `unreduce_array`
unreducers = {
numpy.ndarray: unreduce_array,
Expand Down
2 changes: 0 additions & 2 deletions tests/test_hessian_vector_products.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def _test_hvp(func, optimized):


def _test_tf_hvp(func, optimized):
np.random.seed(0)
a = tf.random_normal(shape=(300,))
v = tf.reshape(a, shape=(-1,))

Expand All @@ -80,7 +79,6 @@ def _test_tf_hvp(func, optimized):
for mode2 in modes:
if mode1 == mode2 == 'forward':
continue
print(mode1, mode2)
df = tangent.grad(func, mode=mode1, motion='joint', optimized=optimized)
ddf = tangent.grad(df, mode=mode2, motion='joint', optimized=optimized)
dx = ddf(a, tf.constant(1.0), v)
Expand Down

0 comments on commit 6b4a4f5

Please sign in to comment.