Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from executorch.backends.nxp.tests.executorch_pipeline import to_edge_program
from executorch.backends.nxp.tests.executors import convert_run_compare
from executorch.backends.nxp.tests.models import LinearModule
from executorch.exir.dialects._ops import ops as exir_ops


@pytest.fixture(autouse=True)
Expand All @@ -26,15 +27,23 @@ def test_linear_conversion__with_bias():

input_data = np.random.random(input_shape).astype(np.float32)

convert_run_compare(edge_program, input_data=input_data, atol=1.0e-6)
nodes = list(edge_program.graph.nodes)
assert nodes[4].target == exir_ops.edge.aten.addmm.default
assert len(nodes[4].args) == 3 # Has bias.

convert_run_compare(edge_program, input_data=input_data)


def test_linear_conversion__without_bias():
input_shape = (10, 32)
edge_program = to_edge_program(
LinearModule(bias=True), input_shape
LinearModule(bias=False), input_shape
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

curious how did we not test this?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We must have missed it in the rapid development during the early stages.
I have updated the tests again, to make sure the bias is (or is not) present.

).exported_program()

input_data = np.random.random(input_shape).astype(np.float32)

convert_run_compare(edge_program, input_data=input_data, atol=1.0e-6)
nodes = list(edge_program.graph.nodes)
assert nodes[3].target == exir_ops.edge.aten.mm.default
assert len(nodes[3].args) == 2 # No bias.

convert_run_compare(edge_program, input_data=input_data)
Loading