Skip to content

Commit

Permalink
Merge pull request #6 from master/upgrade
Browse files Browse the repository at this point in the history
Upgrade to TensorFlow 2.11
  • Loading branch information
master committed Mar 21, 2023
2 parents 78d49b7 + 2c643c2 commit f84e520
Show file tree
Hide file tree
Showing 14 changed files with 78 additions and 59 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Expand Up @@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7, 3.8]
python-version: ["3.8", "3.9", "3.10"]

steps:
- uses: actions/checkout@v2
Expand Down
4 changes: 2 additions & 2 deletions examples/shared/utils.py
Expand Up @@ -16,7 +16,7 @@ def download_data(data_dir, url, unpack=True, block_size=10 * 1024):
print("{} already exists. Skipping download".format(filename))
return

print("Downloading {0} to {1}".format(url, filename))
print("Downloading {} to {}".format(url, filename))
response = requests.get(url, stream=True)
total = int(response.headers.get("content-length", 0))
progress_bar = tqdm.tqdm(total=total, unit="iB", unit_scale=True)
Expand All @@ -33,7 +33,7 @@ def download_data(data_dir, url, unpack=True, block_size=10 * 1024):
with open(filename, "rb") as f:
with zipfile.ZipFile(f) as zip_ref:
zip_ref.extractall(data_dir)
print("Unzipped {0} to {1}".format(filename, data_dir))
print("Unzipped {} to {}".format(filename, data_dir))


def load_matlab_data(key, data_dir, *folders):
Expand Down
17 changes: 12 additions & 5 deletions examples/tutorial.ipynb
Expand Up @@ -13,6 +13,7 @@
"%matplotlib inline\n",
"\n",
"import sys\n",
"\n",
"sys.path.append(\"../\")\n",
"import tensorflow_riemopt as riemopt"
]
Expand Down Expand Up @@ -119,7 +120,7 @@
"\n",
"opt = riemopt.optimizers.RiemannianAdam(learning_rate=0.2)\n",
"\n",
"npole = tf.constant([0., 1.])\n",
"npole = tf.constant([0.0, 1.0])\n",
"phi = np.linspace(-np.pi, np.pi, 100)\n",
"\n",
"for _ in range(STEPS):\n",
Expand All @@ -136,8 +137,14 @@
" plt.plot(np.cos(phi), np.sin(phi))\n",
" plt.plot(var_np[:, 0], var_np[:, 1], '+', color='black')\n",
" for i in range(len(egrad_np)):\n",
" plt.arrow(var_np[i][0], var_np[i][1], -egrad_np[i][0], -egrad_np[i][1],\n",
" width=0.01, color='green')\n",
" plt.arrow(\n",
" var_np[i][0],\n",
" var_np[i][1],\n",
" -egrad_np[i][0],\n",
" -egrad_np[i][1],\n",
" width=0.01,\n",
" color='green',\n",
" )\n",
" plt.plot(var_t_np[:, 0], var_t_np[:, 1], '+', color='red')\n",
" plt.show()"
]
Expand All @@ -152,7 +159,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
Expand All @@ -166,7 +173,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
"version": "3.10.6"
}
},
"nbformat": 4,
Expand Down
21 changes: 12 additions & 9 deletions examples/usage.ipynb
Expand Up @@ -14,6 +14,7 @@
"from mpl_toolkits.mplot3d import Axes3D\n",
"\n",
"import sys\n",
"\n",
"sys.path.append(\"../\")\n",
"import tensorflow_riemopt as riemopt"
]
Expand All @@ -32,15 +33,17 @@
" s_z = np.outer(np.abs(np.cos(phi)), np.ones_like(psi))\n",
" return ax.plot_wireframe(s_x, s_y, s_z, color=color, alpha=0.3)\n",
"\n",
"\n",
"def plot_vector(ax, x, u, color=\"darkorange\"):\n",
" return ax.quiver(*x, *u, length=0.6, normalize=True, color=color)\n",
"\n",
"\n",
"def plot_hyperplane(ax, p, u, v, color=\"limegreen\"):\n",
" xx = np.linspace(-0.05, 1., 10)\n",
" yy = np.linspace(-1., 0.1, 10)\n",
" xx = np.linspace(-0.05, 1.0, 10)\n",
" yy = np.linspace(-1.0, 0.1, 10)\n",
" x, y = np.meshgrid(xx, yy)\n",
" n = np.cross(u, v)\n",
" z = (- n[0] * x - n[1] * y + p.dot(n)) / n[2]\n",
" z = (-n[0] * x - n[1] * y + p.dot(n)) / n[2]\n",
" return ax.plot_wireframe(x, y, z, color=color, alpha=0.4)"
]
},
Expand All @@ -53,7 +56,7 @@
"S = riemopt.manifolds.Sphere()\n",
"\n",
"x = S.projx(tf.constant([0.1, -0.1, 0.1]))\n",
"u = S.proju(x, tf.constant([1., 1., 1.]))\n",
"u = S.proju(x, tf.constant([1.0, 1.0, 1.0]))\n",
"v = S.proju(x, tf.constant([-0.7, -1.4, 1.4]))\n",
"y = S.exp(x, v)\n",
"u_ = S.transp(x, y, u)\n",
Expand Down Expand Up @@ -866,9 +869,9 @@
"fig = plt.figure()\n",
"ax = fig.gca(projection=\"3d\")\n",
"ax.axis(\"off\")\n",
"ax.set_zlim3d(-1.5, 1.5) \n",
"ax.set_ylim3d(-1.5, 1.5) \n",
"ax.set_xlim3d(-1.5, 1.5) \n",
"ax.set_zlim3d(-1.5, 1.5)\n",
"ax.set_ylim3d(-1.5, 1.5)\n",
"ax.set_xlim3d(-1.5, 1.5)\n",
"\n",
"plot_halfsphere(ax)\n",
"plot_hyperplane(ax, x, u, v)\n",
Expand All @@ -885,7 +888,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
Expand All @@ -899,7 +902,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
"version": "3.10.6"
}
},
"nbformat": 4,
Expand Down
5 changes: 3 additions & 2 deletions requirements.txt
@@ -1,2 +1,3 @@
tensorflow==2.6.3
protobuf==3.19.6
tensorflow<2.12.0
keras<2.12.0
protobuf<3.20,>=3.9.2
8 changes: 5 additions & 3 deletions setup.py
Expand Up @@ -3,14 +3,14 @@

setup(
name="tensorflow-riemopt",
version="0.1.2",
version="0.2.0",
description="a library for optimization on Riemannian manifolds",
long_description=open("README.md", "r").read(),
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="Oleg Smirnov",
author_email="oleg.smirnov@gmail.com",
packages=find_packages(),
install_requires=["tensorflow==2.6.3", "protobuf==3.19.6"],
install_requires=["tensorflow<2.12.0", "keras<2.12.0", "protobuf<3.20,>=3.9.2"],
python_requires=">=3.6.0",
url="https://github.com/master/tensorflow-riemopt",
zip_safe=True,
Expand All @@ -22,6 +22,8 @@
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries",
Expand Down
2 changes: 1 addition & 1 deletion tensorflow_riemopt/manifolds/hyperboloid.py
Expand Up @@ -24,7 +24,7 @@ def __init__(self, k=1.0):
super().__init__()

def __repr__(self):
return "{0} (k={1}, ndims={2}) manifold".format(
return "{} (k={}, ndims={}) manifold".format(
self.name, self.k, self.ndims
)

Expand Down
2 changes: 1 addition & 1 deletion tensorflow_riemopt/manifolds/manifold.py
Expand Up @@ -8,7 +8,7 @@ class Manifold(metaclass=abc.ABCMeta):

def __repr__(self):
"""Returns a string representation of the particular manifold."""
return "{0} (ndims={1}) manifold".format(self.name, self.ndims)
return "{} (ndims={}) manifold".format(self.name, self.ndims)

def check_shape(self, shape_or_tensor):
"""Check if given shape is compatible with the manifold."""
Expand Down
18 changes: 9 additions & 9 deletions tensorflow_riemopt/manifolds/poincare.py
Expand Up @@ -31,7 +31,7 @@ def __init__(self, k=1.0):
super().__init__()

def __repr__(self):
return "{0} (k={1}, ndims={2}) manifold".format(
return "{} (k={}, ndims={}) manifold".format(
self.name, self.k, self.ndims
)

Expand All @@ -45,10 +45,10 @@ def _check_vector_on_tangent(self, x, u, atol, rtol):

def _mobius_add(self, x, y):
"""Compute the Möbius addition of :math:`x` and :math:`y` in
:math:`\mathcal{D}^{n}_{k}`
:math:`\\mathcal{D}^{n}_{k}`
:math:`x \oplus y = \frac{(1 + 2k\langle x, y\rangle + k||y||^2)x + (1
- k||x||^2)y}{1 + 2k\langle x,y\rangle + k^2||x||^2||y||^2}`
:math:`x \\oplus y = \frac{(1 + 2k\\langle x, y\rangle + k||y||^2)x + (1
- k||x||^2)y}{1 + 2k\\langle x,y\rangle + k^2||x||^2||y||^2}`
"""
x_2 = tf.reduce_sum(tf.math.square(x), axis=-1, keepdims=True)
y_2 = tf.reduce_sum(tf.math.square(y), axis=-1, keepdims=True)
Expand All @@ -59,11 +59,11 @@ def _mobius_add(self, x, y):
)

def _mobius_scal_mul(self, x, r):
"""Compute the Möbius scalar multiplication of :math:`x \in
\mathcal{D}^{n}_{k} \ {0}` by :math:`r`
"""Compute the Möbius scalar multiplication of :math:`x \\in
\\mathcal{D}^{n}_{k} \\ {0}` by :math:`r`
:math:`x \otimes r = (1/\sqrt{k})\tanh(r
\atanh(\sqrt{k}||x||))\frac{x}{||x||}`
:math:`x \\otimes r = (1/\\sqrt{k})\tanh(r
\atanh(\\sqrt{k}||x||))\frac{x}{||x||}`
"""
sqrt_k = tf.math.sqrt(tf.cast(self.k, x.dtype))
Expand All @@ -73,7 +73,7 @@ def _mobius_scal_mul(self, x, r):
return (1 / sqrt_k) * tf.math.tanh(r * tf.math.atanh(tan)) * x / norm_x

def _gyration(self, u, v, w):
"""Compute the gyration of :math:`u`, :math:`v`, :math:`w`:
r"""Compute the gyration of :math:`u`, :math:`v`, :math:`w`:
:math:`\operatorname{gyr}[u, v]w =
\ominus (u \oplus_\kappa v) \oplus (u \oplus_\kappa (v \oplus_\kappa w))`
Expand Down
16 changes: 9 additions & 7 deletions tensorflow_riemopt/optimizers/constrained_rmsprop.py
Expand Up @@ -15,7 +15,11 @@
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import gen_training_ops
from keras.optimizer_v2.optimizer_v2 import OptimizerV2

try:
from keras.optimizer_v2.optimizer_v2 import OptimizerV2
except ImportError:
from tensorflow.keras.optimizers.legacy import Optimizer as OptimizerV2

from tensorflow_riemopt.variable import get_manifold

Expand Down Expand Up @@ -66,7 +70,7 @@ def __init__(
allow time inverse decay of learning rate. `lr` is included for backward
compatibility, recommended to use `learning_rate` instead.
"""
super(ConstrainedRMSprop, self).__init__(name, **kwargs)
super().__init__(name, **kwargs)
self._set_hyper("learning_rate", kwargs.get("lr", learning_rate))
self._set_hyper("decay", self._initial_decay)
self._set_hyper("rho", rho)
Expand All @@ -83,9 +87,7 @@ def _create_slots(self, var_list):
self.add_slot(var, "mg")

def _prepare_local(self, var_device, var_dtype, apply_state):
super(ConstrainedRMSprop, self)._prepare_local(
var_device, var_dtype, apply_state
)
super()._prepare_local(var_device, var_dtype, apply_state)

rho = array_ops.identity(self._get_hyper("rho", var_dtype))
apply_state[(var_device, var_dtype)].update(
Expand Down Expand Up @@ -197,10 +199,10 @@ def set_weights(self, weights):
params = self.weights
if len(params) == len(weights) + 1:
weights = [np.array(0)] + weights
super(ConstrainedRMSprop, self).set_weights(weights)
super().set_weights(weights)

def get_config(self):
config = super(ConstrainedRMSprop, self).get_config()
config = super().get_config()
config.update(
{
"learning_rate": self._serialize_hyperparameter(
Expand Down
8 changes: 4 additions & 4 deletions tensorflow_riemopt/optimizers/constrained_rmsprop_test.py
Expand Up @@ -139,14 +139,14 @@ def testBasic(self):
self.assertAllCloseAccordingToType(
self.evaluate(var0_ref),
self.evaluate(var0),
rtol=1e-4,
atol=1e-4,
rtol=1e-3,
atol=1e-3,
)
self.assertAllCloseAccordingToType(
self.evaluate(var1_ref),
self.evaluate(var1),
rtol=1e-4,
atol=1e-4,
rtol=1e-2,
atol=1e-2,
)


Expand Down
16 changes: 9 additions & 7 deletions tensorflow_riemopt/optimizers/riemannian_adam.py
Expand Up @@ -12,7 +12,11 @@
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import gen_training_ops
from keras.optimizer_v2.optimizer_v2 import OptimizerV2

try:
from keras.optimizer_v2.optimizer_v2 import OptimizerV2
except ImportError:
from tensorflow.keras.optimizers.legacy import Optimizer as OptimizerV2

from tensorflow_riemopt.variable import get_manifold

Expand Down Expand Up @@ -69,7 +73,7 @@ def __init__(
"""

super(RiemannianAdam, self).__init__(name, **kwargs)
super().__init__(name, **kwargs)
self._set_hyper("learning_rate", kwargs.get("lr", learning_rate))
self._set_hyper("decay", self._initial_decay)
self._set_hyper("beta_1", beta_1)
Expand All @@ -88,9 +92,7 @@ def _create_slots(self, var_list):
self.add_slot(var, "vhat")

def _prepare_local(self, var_device, var_dtype, apply_state):
super(RiemannianAdam, self)._prepare_local(
var_device, var_dtype, apply_state
)
super()._prepare_local(var_device, var_dtype, apply_state)

local_step = math_ops.cast(self.iterations + 1, var_dtype)
beta_1_t = array_ops.identity(self._get_hyper("beta_1", var_dtype))
Expand Down Expand Up @@ -118,7 +120,7 @@ def set_weights(self, weights):
num_vars = int((len(params) - 1) / 2)
if len(weights) == 3 * num_vars + 1:
weights = weights[: len(params)]
super(RiemannianAdam, self).set_weights(weights)
super().set_weights(weights)

@def_function.function(experimental_compile=True)
def _resource_apply_dense(self, grad, var, apply_state=None):
Expand Down Expand Up @@ -215,7 +217,7 @@ def _stabilize(self, var):
m.assign(manifold.proju(var, m))

def get_config(self):
config = super(RiemannianAdam, self).get_config()
config = super().get_config()
config.update(
{
"learning_rate": self._serialize_hyperparameter(
Expand Down

0 comments on commit f84e520

Please sign in to comment.