diff --git a/doc/releases/changelog-dev.md b/doc/releases/changelog-dev.md index ccbc02c2268..5e28d76b6b9 100644 --- a/doc/releases/changelog-dev.md +++ b/doc/releases/changelog-dev.md @@ -356,6 +356,9 @@ of shape ``(batch_size,)``:

Improvements

+* `@qml.beta.QNode` now supports the `qml.specs` transform. + [(#1739)](https://github.com/PennyLaneAI/pennylane/pull/1739) + * `qml.circuit_drawer.drawable_layers` and `qml.circuit_drawer.drawable_grid` process a list of operations to layer positions for drawing. [(#1639)](https://github.com/PennyLaneAI/pennylane/pull/1639) diff --git a/pennylane/gradients/parameter_shift.py b/pennylane/gradients/parameter_shift.py index 304f05a9ada..90df624e0c0 100644 --- a/pennylane/gradients/parameter_shift.py +++ b/pennylane/gradients/parameter_shift.py @@ -545,12 +545,15 @@ def param_shift( _gradient_analysis(tape) gradient_tapes = [] + if argnum is None and not tape.trainable_params: + return gradient_tapes, lambda _: np.zeros([tape.output_dim, len(tape.trainable_params)]) + # TODO: replace the JacobianTape._grad_method_validation # functionality before deprecation. method = "analytic" if fallback_fn is None else "best" diff_methods = tape._grad_method_validation(method) all_params_grad_method_zero = all(g == "0" for g in diff_methods) - if not tape.trainable_params or all_params_grad_method_zero: + if all_params_grad_method_zero: return gradient_tapes, lambda _: np.zeros([tape.output_dim, len(tape.trainable_params)]) # TODO: replace the JacobianTape._choose_params_with_methods diff --git a/pennylane/transforms/specs.py b/pennylane/transforms/specs.py index f4994ef76f2..b45ad501592 100644 --- a/pennylane/transforms/specs.py +++ b/pennylane/transforms/specs.py @@ -12,6 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. """Code for resource estimation""" +import inspect + +import pennylane as qml + + +def _get_absolute_import_path(fn): + return f"{inspect.getmodule(fn).__name__}.{fn.__name__}" def specs(qnode, max_expansion=None): @@ -58,6 +65,41 @@ def circuit(x, add_ry=True): 'device_name': 'default.qubit.autograd', 'diff_method': 'backprop'} + .. UsageDetails:: + + ``qml.specs`` can also be used with :class:`~.beta.qnode`: + + .. code-block:: python3 + + x = np.array([0.1, 0.2]) + + dev = qml.device('default.qubit', wires=2) + @qml.beta.qnode(dev, diff_method="parameter-shift", shift=np.pi / 4) + def circuit(x, add_ry=True): + qml.RX(x[0], wires=0) + qml.CNOT(wires=(0,1)) + if add_ry: + qml.RY(x[1], wires=1) + return qml.probs(wires=(0,1)) + + >>> qml.specs(circuit)(x, add_ry=False) + {'gate_sizes': defaultdict(int, {1: 1, 2: 1}), + 'gate_types': defaultdict(int, {'RX': 1, 'CNOT': 1}), + 'num_operations': 2, + 'num_observables': 1, + 'num_diagonalizing_gates': 0, + 'num_used_wires': 2, + 'depth': 2, + 'num_trainable_params': 1, + 'num_device_wires': 2, + 'device_name': 'default.qubit', + 'diff_method': 'parameter-shift', + 'expansion_strategy': 'gradient', + 'gradient_options': {'shift': 0.7853981633974483}, + 'interface': 'autograd', + 'gradient_fn': 'pennylane.gradients.parameter_shift.param_shift', + 'num_gradient_executions': 2} + """ def specs_qnode(*args, **kwargs): @@ -83,15 +125,44 @@ def specs_qnode(*args, **kwargs): Returns: dict[str, Union[defaultdict,int]]: dictionaries that contain QNode specifications """ - if max_expansion is not None: - initial_max_expansion = qnode.max_expansion - qnode.max_expansion = max_expansion - - qnode.construct(args, kwargs) + initial_max_expansion = qnode.max_expansion + qnode.max_expansion = max_expansion - if max_expansion is not None: + try: + qnode.construct(args, kwargs) + finally: qnode.max_expansion = initial_max_expansion - return qnode.specs + if isinstance(qnode, qml.QNode): + # TODO: remove when the old QNode is removed + return qnode.specs + + info = qnode.qtape.specs.copy() + + info["num_device_wires"] = qnode.device.num_wires + info["device_name"] = qnode.device.short_name + info["expansion_strategy"] = qnode.expansion_strategy + info["gradient_options"] = qnode.gradient_kwargs + info["interface"] = qnode.interface + info["diff_method"] = ( + _get_absolute_import_path(qnode.diff_method) + if callable(qnode.diff_method) + else qnode.diff_method + ) + + if isinstance(qnode.gradient_fn, qml.gradients.gradient_transform): + info["gradient_fn"] = _get_absolute_import_path(qnode.gradient_fn) + + try: + info["num_gradient_executions"] = len(qnode.gradient_fn(qnode.qtape)[0]) + except Exception as e: # pylint: disable=broad-except + # In the case of a broad exception, we don't want the `qml.specs` transform + # to fail. Instead, we simply indicate that the number of gradient executions + # is not supported for the reason specified. + info["num_gradient_executions"] = f"NotSupported: {str(e)}" + else: + info["gradient_fn"] = qnode.gradient_fn + + return info return specs_qnode diff --git a/tests/gradients/test_parameter_shift.py b/tests/gradients/test_parameter_shift.py index 1b59d3e3ab6..167af364986 100644 --- a/tests/gradients/test_parameter_shift.py +++ b/tests/gradients/test_parameter_shift.py @@ -146,6 +146,23 @@ def test_behaviour(self): class TestParamShift: """Unit tests for the param_shift function""" + def test_empty_circuit(self): + """Test that an empty circuit works correctly""" + with qml.tape.JacobianTape() as tape: + qml.expval(qml.PauliZ(0)) + + tapes, _ = qml.gradients.param_shift(tape) + assert not tapes + + def test_all_parameters_independent(self): + """Test that a circuit where all parameters do not affect the output""" + with qml.tape.JacobianTape() as tape: + qml.RX(0.4, wires=0) + qml.expval(qml.PauliZ(1)) + + tapes, _ = qml.gradients.param_shift(tape) + assert not tapes + def test_state_non_differentiable_error(self): """Test error raised if attempting to differentiate with respect to a state""" diff --git a/tests/transforms/test_specs.py b/tests/transforms/test_specs.py index 8380c2ef095..cfa3d2affb5 100644 --- a/tests/transforms/test_specs.py +++ b/tests/transforms/test_specs.py @@ -20,7 +20,8 @@ class TestSpecsTransform: - """Tests for the transform specs""" + """Tests for the transform specs using the old QNode. This can be + removed when `qml.beta.QNode is made default.""" @pytest.mark.parametrize( "diff_method, len_info", [("backprop", 10), ("parameter-shift", 12), ("adjoint", 11)] @@ -163,3 +164,171 @@ def circuit(params): assert info["num_device_wires"] == 5 assert info["device_name"] == "default.qubit.autograd" assert info["diff_method"] == "backprop" + + +class TestSpecsTransformBetaQNode: + """Tests for the transform specs using the new QNode""" + + @pytest.mark.parametrize( + "diff_method, len_info", [("backprop", 15), ("parameter-shift", 16), ("adjoint", 15)] + ) + def test_empty(self, diff_method, len_info): + + dev = qml.device("default.qubit", wires=1) + + @qml.beta.qnode(dev, diff_method=diff_method) + def circ(): + return qml.expval(qml.PauliZ(0)) + + info_func = qml.specs(circ) + info = info_func() + assert len(info) == len_info + + assert info["gate_sizes"] == defaultdict(int) + assert info["gate_types"] == defaultdict(int) + assert info["num_observables"] == 1 + assert info["num_operations"] == 0 + assert info["num_diagonalizing_gates"] == 0 + assert info["num_used_wires"] == 1 + assert info["depth"] == 0 + assert info["num_device_wires"] == 1 + assert info["diff_method"] == diff_method + assert info["num_trainable_params"] == 0 + + if diff_method == "parameter-shift": + assert info["num_gradient_executions"] == 0 + assert info["gradient_fn"] == "pennylane.gradients.parameter_shift.param_shift" + + if diff_method != "backprop": + assert info["device_name"] == "default.qubit" + else: + assert info["device_name"] == "default.qubit.autograd" + + @pytest.mark.parametrize( + "diff_method, len_info", [("backprop", 15), ("parameter-shift", 16), ("adjoint", 15)] + ) + def test_specs(self, diff_method, len_info): + """Test the specs transforms works in standard situations""" + dev = qml.device("default.qubit", wires=4) + + @qml.beta.qnode(dev, diff_method=diff_method) + def circuit(x, y, add_RY=True): + qml.RX(x[0], wires=0) + qml.Toffoli(wires=(0, 1, 2)) + qml.CRY(x[1], wires=(0, 1)) + qml.Rot(x[2], x[3], y, wires=2) + if add_RY: + qml.RY(x[4], wires=1) + return qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliX(1)) + + x = np.array([0.05, 0.1, 0.2, 0.3, 0.5], requires_grad=True) + y = np.array(0.1, requires_grad=False) + + info_func = qml.specs(circuit) + + info = info_func(x, y, add_RY=False) + + circuit(x, y, add_RY=False) + + assert len(info) == len_info + + assert info["gate_sizes"] == defaultdict(int, {1: 2, 3: 1, 2: 1}) + assert info["gate_types"] == defaultdict(int, {"RX": 1, "Toffoli": 1, "CRY": 1, "Rot": 1}) + assert info["num_operations"] == 4 + assert info["num_observables"] == 2 + assert info["num_diagonalizing_gates"] == 1 + assert info["num_used_wires"] == 3 + assert info["depth"] == 3 + assert info["num_device_wires"] == 4 + assert info["diff_method"] == diff_method + assert info["num_trainable_params"] == 4 + + if diff_method == "parameter-shift": + assert info["num_gradient_executions"] == 6 + + if diff_method != "backprop": + assert info["device_name"] == "default.qubit" + else: + assert info["device_name"] == "default.qubit.autograd" + + @pytest.mark.parametrize( + "diff_method, len_info", [("backprop", 15), ("parameter-shift", 16), ("adjoint", 15)] + ) + def test_specs_state(self, diff_method, len_info): + """Test specs works when state returned""" + + dev = qml.device("default.qubit", wires=2) + + @qml.beta.qnode(dev, diff_method=diff_method) + def circuit(): + return qml.state() + + info_func = qml.specs(circuit) + info = info_func() + assert len(info) == len_info + + assert info["num_observables"] == 1 + assert info["num_diagonalizing_gates"] == 0 + + def test_max_expansion(self): + """Test that a user can calculation specifications for a different max + expansion parameter.""" + + n_layers = 2 + n_wires = 5 + + dev = qml.device("default.qubit", wires=n_wires) + + @qml.beta.qnode(dev) + def circuit(params): + qml.templates.BasicEntanglerLayers(params, wires=range(n_wires)) + return qml.expval(qml.PauliZ(0)) + + params_shape = qml.templates.BasicEntanglerLayers.shape(n_layers=n_layers, n_wires=n_wires) + rng = np.random.default_rng(seed=10) + params = rng.standard_normal(params_shape) + + assert circuit.max_expansion == 10 + info = qml.specs(circuit, max_expansion=0)(params) + assert circuit.max_expansion == 10 + + assert len(info) == 15 + + assert info["gate_sizes"] == defaultdict(int, {5: 1}) + assert info["gate_types"] == defaultdict(int, {"BasicEntanglerLayers": 1}) + assert info["num_operations"] == 1 + assert info["num_observables"] == 1 + assert info["num_used_wires"] == 5 + assert info["depth"] == 1 + assert info["num_device_wires"] == 5 + assert info["device_name"] == "default.qubit.autograd" + assert info["diff_method"] == "best" + assert info["gradient_fn"] == "backprop" + + def test_gradient_transform(self): + """Test that a gradient transform is properly labelled""" + dev = qml.device("default.qubit", wires=2) + + @qml.beta.qnode(dev, diff_method=qml.gradients.param_shift) + def circuit(): + return qml.probs(wires=0) + + info = qml.specs(circuit)() + assert info["diff_method"] == "pennylane.gradients.parameter_shift.param_shift" + assert info["gradient_fn"] == "pennylane.gradients.parameter_shift.param_shift" + + def test_custom_gradient_transform(self): + """Test that a custom gradient transform is properly labelled""" + dev = qml.device("default.qubit", wires=2) + + @qml.gradients.gradient_transform + def my_transform(tape): + return tape, None + + @qml.beta.qnode(dev, diff_method=my_transform) + def circuit(): + return qml.probs(wires=0) + + info = qml.specs(circuit)() + assert info["diff_method"] == "test_specs.my_transform" + assert info["gradient_fn"] == "test_specs.my_transform"