Skip to content

Commit

Permalink
Caching of numerical prefactors as tensors for beamsplitter (#8)
Browse files Browse the repository at this point in the history
* modified TFBackend to cache non-variable parts of BS matrix

* added documentation for bs tensor caching

* swapped import order of functools
  • Loading branch information
co9olguy authored and josh146 committed Apr 30, 2018
1 parent f3cd09a commit 6509d49
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 7 deletions.
1 change: 1 addition & 0 deletions strawberryfields/backends/tfbackend/circuit.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ def reset(self, pure=True, graph=None):
if graph != self._graph:
del self._graph # get rid of the old graph from memory
self._graph = graph
ops.get_prefac_tensor.cache_clear() # clear any cached tensors that may live on old graph
self._make_vac_states()
self._state_history = []
self._cache = {}
Expand Down
23 changes: 16 additions & 7 deletions strawberryfields/backends/tfbackend/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
# pylint: disable=too-many-arguments

from string import ascii_lowercase as indices
from functools import lru_cache

import tensorflow as tf
import numpy as np
Expand Down Expand Up @@ -87,6 +88,20 @@ def unravel_index(ind, tensor_shape):
unraveled_coords = (ind % strides) // strides_shifted
return tf.transpose(unraveled_coords)

@lru_cache()
def get_prefac_tensor(D, directory, save):
"""Equivalent to the functionality of shared_ops the bs_factors functions from shared_ops,
but caches the return value as a tensor. This allows us to re-use the same prefactors and save
space on the computational graph."""
try:
prefac = load_bs_factors(D, directory)
except FileNotFoundError:
prefac = generate_bs_factors(D)
if save:
save_bs_factors(prefac, directory)
prefac = tf.expand_dims(tf.cast(prefac[:D, :D, :D, :D, :D], def_type), 0)
return prefac

###################################################################

# Matrices:
Expand Down Expand Up @@ -283,13 +298,7 @@ def beamsplitter_matrix(t, r, D, batched=False, save=False, directory=None):
M_minus_n_plus_k = tf.where(tf.greater(M_minus_n_plus_k, 0), M_minus_n_plus_k, tf.zeros_like(M_minus_n_plus_k))

# load parameter-independent prefactors
try:
prefac = load_bs_factors(D, directory)
except FileNotFoundError:
prefac = generate_bs_factors(D)
if save:
save_bs_factors(prefac, directory)
prefac = tf.expand_dims(tf.cast(prefac[:D, :D, :D, :D, :D], def_type), 0)
prefac = get_prefac_tensor(D, directory, save)

powers = tf.cast(tf.pow(mag_t, k) * tf.pow(mag_r, n_minus_k) * tf.pow(mag_r, N_minus_k) * tf.pow(mag_t, M_minus_n_plus_k), def_type)
phase = tf.exp(1j * tf.cast(phase_r * (n - N), def_type))
Expand Down

0 comments on commit 6509d49

Please sign in to comment.