diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 23ac21a3..d3faa48e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -51,7 +51,7 @@ jobs: - name: Pull Test Data run: git lfs pull - name: Run tests - run: nox -t ${{ matrix.nox-tag }} --forcecolor -- --cov=onnx_ir --cov-report=xml --cov-append --cov-branch -n=auto --junitxml junit.xml + run: nox -t ${{ matrix.nox-tag }} --forcecolor -- --cov=src/onnx_ir --cov-report=xml --cov-append --cov-branch -n=auto --junitxml junit.xml - name: Upload coverage to Codecov if: always() uses: codecov/codecov-action@v5 diff --git a/REUSE.toml b/REUSE.toml index 521e4c37..93312d04 100644 --- a/REUSE.toml +++ b/REUSE.toml @@ -14,6 +14,7 @@ path = [ "**/*.ipynb", "**/*.md", "**/*.rst", + "**/*.textproto", "**/*.toml", "**/*.yml", "CODEOWNERS", diff --git a/testdata/e2e_models/README.md b/testdata/e2e_models/README.md new file mode 100644 index 00000000..2a435ede --- /dev/null +++ b/testdata/e2e_models/README.md @@ -0,0 +1,5 @@ +# Models for end-to-end testing + +The models under this directory are generated with [tools/create_test_model.py](/tools/create_test_model.py). ONNX models have all initializer data stripped and save as the textproto format. + +If a particular test requires the initializer data to be present, it should create random weights based on the tensor dtype/shape and add them to the `TensorProto`s in `model.graph.initializer`. diff --git a/testdata/e2e_models/Speech2Text2ForCausalLM/Speech2Text2ForCausalLM_dynamo.textproto b/testdata/e2e_models/Speech2Text2ForCausalLM/Speech2Text2ForCausalLM_dynamo.textproto new file mode 100644 index 00000000..34bcaa02 --- /dev/null +++ b/testdata/e2e_models/Speech2Text2ForCausalLM/Speech2Text2ForCausalLM_dynamo.textproto @@ -0,0 +1,24838 @@ +ir_version: 8 +producer_name: "pytorch" +producer_version: "2.2.0" +graph { + node { + input: "l_input_ids_" + input: "model.decoder.embed_tokens.weight" + input: "model.decoder.embed_positions.weights" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + input: "model.decoder.layers.0.self_attn_layer_norm.weight" + input: "model.decoder.layers.0.self_attn_layer_norm.bias" + input: "model.decoder.layers.0.fc1.weight" + input: "model.decoder.layers.0.fc1.bias" + input: "model.decoder.layers.0.fc2.weight" + input: "model.decoder.layers.0.fc2.bias" + input: "model.decoder.layers.0.final_layer_norm.weight" + input: "model.decoder.layers.0.final_layer_norm.bias" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + input: "model.decoder.layers.1.self_attn_layer_norm.weight" + input: "model.decoder.layers.1.self_attn_layer_norm.bias" + input: "model.decoder.layers.1.fc1.weight" + input: "model.decoder.layers.1.fc1.bias" + input: "model.decoder.layers.1.fc2.weight" + input: "model.decoder.layers.1.fc2.bias" + input: "model.decoder.layers.1.final_layer_norm.weight" + input: "model.decoder.layers.1.final_layer_norm.bias" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + input: "model.decoder.layers.2.self_attn_layer_norm.weight" + input: "model.decoder.layers.2.self_attn_layer_norm.bias" + input: "model.decoder.layers.2.fc1.weight" + input: "model.decoder.layers.2.fc1.bias" + input: "model.decoder.layers.2.fc2.weight" + input: "model.decoder.layers.2.fc2.bias" + input: "model.decoder.layers.2.final_layer_norm.weight" + input: "model.decoder.layers.2.final_layer_norm.bias" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + input: "model.decoder.layers.3.self_attn_layer_norm.weight" + input: "model.decoder.layers.3.self_attn_layer_norm.bias" + input: "model.decoder.layers.3.fc1.weight" + input: "model.decoder.layers.3.fc1.bias" + input: "model.decoder.layers.3.fc2.weight" + input: "model.decoder.layers.3.fc2.bias" + input: "model.decoder.layers.3.final_layer_norm.weight" + input: "model.decoder.layers.3.final_layer_norm.bias" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + input: "model.decoder.layers.4.self_attn_layer_norm.weight" + input: "model.decoder.layers.4.self_attn_layer_norm.bias" + input: "model.decoder.layers.4.fc1.weight" + input: "model.decoder.layers.4.fc1.bias" + input: "model.decoder.layers.4.fc2.weight" + input: "model.decoder.layers.4.fc2.bias" + input: "model.decoder.layers.4.final_layer_norm.weight" + input: "model.decoder.layers.4.final_layer_norm.bias" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + input: "model.decoder.layers.5.self_attn_layer_norm.weight" + input: "model.decoder.layers.5.self_attn_layer_norm.bias" + input: "model.decoder.layers.5.fc1.weight" + input: "model.decoder.layers.5.fc1.bias" + input: "model.decoder.layers.5.fc2.weight" + input: "model.decoder.layers.5.fc2.bias" + input: "model.decoder.layers.5.final_layer_norm.weight" + input: "model.decoder.layers.5.final_layer_norm.bias" + output: "model_decoder_1" + output: "model_decoder_1_1" + output: "model_decoder_1_2" + output: "model_decoder_1_3" + output: "model_decoder_1_4" + output: "model_decoder_1_5" + output: "model_decoder_1_6" + output: "model_decoder_1_7" + output: "model_decoder_1_8" + output: "model_decoder_1_9" + output: "model_decoder_1_10" + output: "model_decoder_1_11" + output: "model_decoder_1_12" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1_7" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_1_12" + input: "model.decoder.embed_tokens.weight" + output: "lm_head_1" + name: "torch_nn_modules_linear_Linear_lm_head_1_8" + op_type: "torch_nn_modules_linear_Linear_lm_head_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_114" + name: "Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377\020\'\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "lm_head_1" + input: "_val_114" + output: "view_138" + name: "aten_view_10" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_116" + name: "Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "l_labels_" + input: "_val_116" + output: "view_139" + name: "aten_view_12" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_138" + output: "_log_softmax" + name: "aten__log_softmax_13" + op_type: "aten__log_softmax" + attribute { + name: "dim" + i: 1 + type: INT + } + attribute { + name: "half_to_float" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_119" + name: "Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\234\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "view_139" + input: "_val_119" + output: "ne_1" + name: "aten_ne_15" + op_type: "aten_ne" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_121" + name: "Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_121" + output: "scalar_tensor" + name: "aten_scalar_tensor_sym_number_17" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 7 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "ne_1" + input: "view_139" + input: "scalar_tensor" + output: "where" + name: "aten_where_18" + op_type: "aten_where" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "where" + output: "unsqueeze_4" + name: "aten_unsqueeze_19" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "_log_softmax" + input: "unsqueeze_4" + output: "gather" + name: "aten_gather_20" + op_type: "aten_gather" + attribute { + name: "dim" + i: 1 + type: INT + } + attribute { + name: "sparse_grad" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "gather" + output: "squeeze" + name: "aten_squeeze_dim_21" + op_type: "aten_squeeze_dim" + attribute { + name: "dim" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "squeeze" + output: "neg" + name: "aten_neg_22" + op_type: "aten_neg" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_128" + name: "Constant_23" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\234\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "view_139" + input: "_val_128" + output: "ne_2" + name: "aten_ne_24" + op_type: "aten_ne" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_130" + name: "Constant_25" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_130" + output: "scalar_tensor_1" + name: "aten_scalar_tensor_sym_number_26" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "ne_2" + input: "neg" + input: "scalar_tensor_1" + output: "where_1" + name: "aten_where_27" + op_type: "aten_where" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_133" + name: "Constant_28" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\234\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "view_139" + input: "_val_133" + output: "ne_3" + name: "aten_ne_29" + op_type: "aten_ne" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "ne_3" + output: "convert_element_type_default_1" + name: "prims_convert_element_type_30" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 7 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_default_1" + output: "sum_1" + name: "_aten_sum_dim_none_31" + op_type: "_aten_sum_dim_none" + attribute { + name: "keepdim" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sum_1" + output: "convert_element_type_3" + name: "prims_convert_element_type_32" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "where_1" + output: "sum_2" + name: "_aten_sum_dim_none_33" + op_type: "_aten_sum_dim_none" + attribute { + name: "keepdim" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sum_2" + input: "convert_element_type_3" + output: "div" + name: "aten_div_34" + op_type: "aten_div" + domain: "pkg.onnxscript.torch_lib" + } + name: "main_graph" + initializer { + dims: 10000 + dims: 256 + data_type: 1 + name: "model.decoder.embed_tokens.weight" + raw_data: "" + } + initializer { + dims: 1026 + dims: 256 + data_type: 1 + name: "model.decoder.embed_positions.weights" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.q_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.q_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.k_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.k_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.v_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.v_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.out_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn.out_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.self_attn_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 2048 + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.fc1.weight" + raw_data: "" + } + initializer { + dims: 2048 + data_type: 1 + name: "model.decoder.layers.0.fc1.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 2048 + data_type: 1 + name: "model.decoder.layers.0.fc2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.fc2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.final_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.0.final_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.q_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.q_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.k_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.k_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.v_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.v_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.out_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn.out_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.self_attn_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 2048 + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.fc1.weight" + raw_data: "" + } + initializer { + dims: 2048 + data_type: 1 + name: "model.decoder.layers.1.fc1.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 2048 + data_type: 1 + name: "model.decoder.layers.1.fc2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.fc2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.final_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.1.final_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.q_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.q_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.k_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.k_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.v_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.v_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.out_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn.out_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.self_attn_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 2048 + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.fc1.weight" + raw_data: "" + } + initializer { + dims: 2048 + data_type: 1 + name: "model.decoder.layers.2.fc1.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 2048 + data_type: 1 + name: "model.decoder.layers.2.fc2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.fc2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.final_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.2.final_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.q_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.q_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.k_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.k_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.v_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.v_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.out_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn.out_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.self_attn_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 2048 + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.fc1.weight" + raw_data: "" + } + initializer { + dims: 2048 + data_type: 1 + name: "model.decoder.layers.3.fc1.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 2048 + data_type: 1 + name: "model.decoder.layers.3.fc2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.fc2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.final_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.3.final_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.q_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.q_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.k_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.k_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.v_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.v_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.out_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn.out_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.self_attn_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 2048 + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.fc1.weight" + raw_data: "" + } + initializer { + dims: 2048 + data_type: 1 + name: "model.decoder.layers.4.fc1.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 2048 + data_type: 1 + name: "model.decoder.layers.4.fc2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.fc2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.final_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.4.final_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.q_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.q_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.k_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.k_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.v_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.v_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.out_proj.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn.out_proj.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.self_attn_layer_norm.bias" + raw_data: "" + } + initializer { + dims: 2048 + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.fc1.weight" + raw_data: "" + } + initializer { + dims: 2048 + data_type: 1 + name: "model.decoder.layers.5.fc1.bias" + raw_data: "" + } + initializer { + dims: 256 + dims: 2048 + data_type: 1 + name: "model.decoder.layers.5.fc2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.fc2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.final_layer_norm.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "model.decoder.layers.5.final_layer_norm.bias" + raw_data: "" + } + input { + name: "l_input_ids_" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + input { + name: "l_labels_" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + output { + name: "div" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + output { + name: "lm_head_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 10000 + } + } + } + } + } + output { + name: "model_decoder_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + output { + name: "model_decoder_1_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "model.decoder.embed_tokens.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 10000 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.embed_positions.weights" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1026 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.q_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.q_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.k_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.k_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.v_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.v_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.out_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn.out_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.self_attn_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.fc1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.fc1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.fc2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.fc2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.final_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.0.final_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.q_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.q_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.k_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.k_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.v_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.v_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.out_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn.out_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.self_attn_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.fc1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.fc1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.fc2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.fc2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.final_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.1.final_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.q_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.q_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.k_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.k_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.v_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.v_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.out_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn.out_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.self_attn_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.fc1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.fc1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.fc2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.fc2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.final_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.2.final_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.q_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.q_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.k_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.k_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.v_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.v_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.out_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn.out_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.self_attn_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.fc1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.fc1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.fc2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.fc2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.final_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.3.final_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.q_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.q_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.k_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.k_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.v_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.v_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.out_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn.out_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.self_attn_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.fc1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.fc1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.fc2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.fc2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.final_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.4.final_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.q_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.q_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.k_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.k_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.v_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.v_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.out_proj.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn.out_proj.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.self_attn_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.fc1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.fc1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.fc2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.fc2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.final_layer_norm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model.decoder.layers.5.final_layer_norm.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "model_decoder_1_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "_val_114" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 2 + } + } + } + } + } + value_info { + name: "view_138" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 10000 + } + } + } + } + } + value_info { + name: "_val_116" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "view_139" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "_log_softmax" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 10000 + } + } + } + } + } + value_info { + name: "_val_119" + type { + tensor_type { + elem_type: 7 + shape { + } + } + } + } + value_info { + name: "ne_1" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "_val_121" + type { + tensor_type { + elem_type: 7 + shape { + } + } + } + } + value_info { + name: "scalar_tensor" + type { + tensor_type { + elem_type: 7 + shape { + } + } + } + } + value_info { + name: "where" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "unsqueeze_4" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "gather" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "squeeze" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "neg" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "_val_128" + type { + tensor_type { + elem_type: 7 + shape { + } + } + } + } + value_info { + name: "ne_2" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "_val_130" + type { + tensor_type { + elem_type: 7 + shape { + } + } + } + } + value_info { + name: "scalar_tensor_1" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "where_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "_val_133" + type { + tensor_type { + elem_type: 7 + shape { + } + } + } + } + value_info { + name: "ne_3" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "convert_element_type_default_1" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "sum_1" + type { + tensor_type { + elem_type: 7 + shape { + } + } + } + } + value_info { + name: "convert_element_type_3" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "sum_2" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_sparse_Embedding_model_decoder_embed_tokens_1/view" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_sparse_Embedding_model_decoder_embed_tokens_1/embedding" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/view" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/ne" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/convert_element_type" + type { + tensor_type { + elem_type: 6 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/convert_element_type_default" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/cumsum" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/convert_element_type_1" + type { + tensor_type { + elem_type: 6 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/scalar_tensor_default" + type { + tensor_type { + elem_type: 6 + shape { + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/add_1" + type { + tensor_type { + elem_type: 6 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/mul_1" + type { + tensor_type { + elem_type: 6 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/convert_element_type_2" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/add_2" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/view_2" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/index_select" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/view_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/detach" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1/detach_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1/clone" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1/view_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1/t" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1/addmm" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1/view_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1/clone" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1/view_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1/t_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1/addmm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1/view_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1/clone" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1/view_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1/t_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1/addmm_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1/view_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1/view_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1/view_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1/t_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1/addmm_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1/view_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/clone" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/masked_fill" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/model_decoder_layers_0_self_attn_q_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/mul_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/model_decoder_layers_0_self_attn_k_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/transpose" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/clone_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/model_decoder_layers_0_self_attn_v_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/transpose_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/clone_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/transpose_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/clone_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/transpose_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/bmm" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/unsqueeze_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/unsqueeze_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/slice_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/slice_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/add_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/_softmax" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/clone_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/bmm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/transpose_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/clone_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/view_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1/model_decoder_layers_0_self_attn_out_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_self_attn_layer_norm_1/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_self_attn_layer_norm_1/native_layer_norm" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_self_attn_layer_norm_1/native_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_self_attn_layer_norm_1/native_layer_norm_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1/getitem" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1/view_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1/t_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1/addmm_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1/view_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_0_activation_fn_1/view_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_0_activation_fn_1/relu" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1/clone_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1/view_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1/t_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1/addmm_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1/view_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_final_layer_norm_1/add_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_final_layer_norm_1/native_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_final_layer_norm_1/native_layer_norm_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_final_layer_norm_1/native_layer_norm_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/clone" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/masked_fill" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_self_attn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_self_attn_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_self_attn_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_self_attn_1_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/clone_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_self_attn_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_fc1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_activation_fn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/clone_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_fc2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/clone_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/add_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1/model_decoder_layers_0_final_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1/view_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1/t_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1/addmm_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1/view_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1/view_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1/t_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1/addmm_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1/view_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1/view_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1/t_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1/addmm_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1/view_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1/view_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1/view_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1/t_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1/addmm_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1/view_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/model_decoder_layers_1_self_attn_q_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/mul_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/model_decoder_layers_1_self_attn_k_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/transpose_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/clone_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/model_decoder_layers_1_self_attn_v_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/transpose_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/clone_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/transpose_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/clone_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/transpose_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/bmm_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/add_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/_softmax_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/clone_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/bmm_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/transpose_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/clone_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/view_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1/model_decoder_layers_1_self_attn_out_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_self_attn_layer_norm_1/add_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_self_attn_layer_norm_1/native_layer_norm_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_self_attn_layer_norm_1/native_layer_norm_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_self_attn_layer_norm_1/native_layer_norm_2_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1/getitem_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1/view_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1/t_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1/addmm_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1/view_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_1_activation_fn_1/view_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_1_activation_fn_1/relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1/clone_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1/view_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1/t_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1/addmm_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1/view_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_final_layer_norm_1/add_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_final_layer_norm_1/native_layer_norm_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_final_layer_norm_1/native_layer_norm_3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_final_layer_norm_1/native_layer_norm_3_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_self_attn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_self_attn_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_self_attn_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/clone_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/add_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_self_attn_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_fc1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_activation_fn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/clone_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_fc2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/clone_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/add_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1/model_decoder_layers_1_final_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1/getitem_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1/view_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1/t_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1/addmm_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1/view_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1/getitem_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1/view_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1/t_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1/addmm_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1/view_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1/getitem_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1/view_53" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1/t_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1/addmm_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1/view_54" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1/view_63" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1/view_64" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1/t_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1/addmm_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1/view_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/getitem_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/model_decoder_layers_2_self_attn_q_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/mul_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/model_decoder_layers_2_self_attn_k_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_52" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/transpose_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/clone_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/model_decoder_layers_2_self_attn_v_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_55" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/transpose_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/clone_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_56" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/transpose_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/clone_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_57" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_58" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_59" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/transpose_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/bmm_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_60" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/add_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_61" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/_softmax_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/clone_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/bmm_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_62" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/transpose_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/clone_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/view_63" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1/model_decoder_layers_2_self_attn_out_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_self_attn_layer_norm_1/add_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_self_attn_layer_norm_1/native_layer_norm_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_self_attn_layer_norm_1/native_layer_norm_4_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_self_attn_layer_norm_1/native_layer_norm_4_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1/getitem_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1/view_66" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1/t_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1/addmm_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1/view_67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_2_activation_fn_1/view_67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_2_activation_fn_1/relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1/clone_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1/view_68" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1/t_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1/addmm_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1/view_69" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_final_layer_norm_1/add_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_final_layer_norm_1/native_layer_norm_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_final_layer_norm_1/native_layer_norm_5_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_final_layer_norm_1/native_layer_norm_5_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/getitem_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_self_attn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_self_attn_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_self_attn_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/clone_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/add_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_self_attn_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_fc1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_activation_fn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/clone_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_fc2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/clone_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/add_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1/model_decoder_layers_2_final_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1/getitem_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1/view_70" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1/t_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1/addmm_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1/view_71" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1/getitem_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1/view_72" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1/t_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1/addmm_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1/view_73" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1/getitem_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1/view_75" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1/t_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1/addmm_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1/view_76" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1/view_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1/view_86" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1/t_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1/addmm_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1/view_87" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/getitem_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/model_decoder_layers_3_self_attn_q_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/mul_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/model_decoder_layers_3_self_attn_k_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_74" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/transpose_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/clone_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/model_decoder_layers_3_self_attn_v_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_77" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/transpose_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/clone_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_78" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/transpose_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/clone_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_79" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_80" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_81" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/transpose_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/bmm_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_82" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/add_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_83" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/_softmax_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/clone_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/bmm_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_84" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/transpose_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/clone_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/view_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1/model_decoder_layers_3_self_attn_out_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_self_attn_layer_norm_1/add_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_self_attn_layer_norm_1/native_layer_norm_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_self_attn_layer_norm_1/native_layer_norm_6_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_self_attn_layer_norm_1/native_layer_norm_6_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1/getitem_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1/view_88" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1/t_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1/addmm_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1/view_89" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_3_activation_fn_1/view_89" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_3_activation_fn_1/relu_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1/clone_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1/view_90" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1/t_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1/addmm_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1/view_91" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_final_layer_norm_1/add_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_final_layer_norm_1/native_layer_norm_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_final_layer_norm_1/native_layer_norm_7_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_final_layer_norm_1/native_layer_norm_7_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/getitem_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_self_attn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_self_attn_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_self_attn_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/clone_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/add_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_self_attn_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_fc1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_activation_fn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/clone_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_fc2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/clone_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/add_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1/model_decoder_layers_3_final_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1/getitem_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1/view_92" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1/t_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1/addmm_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1/view_93" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1/getitem_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1/view_94" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1/t_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1/addmm_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1/view_95" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1/getitem_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1/view_97" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1/t_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1/addmm_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1/view_98" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1/view_107" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1/view_108" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1/t_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1/addmm_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1/view_109" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/getitem_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/model_decoder_layers_4_self_attn_q_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/mul_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/model_decoder_layers_4_self_attn_k_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_96" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/transpose_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/clone_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/model_decoder_layers_4_self_attn_v_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_99" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/transpose_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/clone_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_100" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/transpose_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/clone_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_101" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_102" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_103" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/transpose_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/bmm_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_104" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/add_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_105" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/_softmax_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/clone_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/bmm_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_106" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/transpose_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/clone_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/view_107" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1/model_decoder_layers_4_self_attn_out_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_self_attn_layer_norm_1/add_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_self_attn_layer_norm_1/native_layer_norm_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_self_attn_layer_norm_1/native_layer_norm_8_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_self_attn_layer_norm_1/native_layer_norm_8_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1/getitem_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1/view_110" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1/t_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1/addmm_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1/view_111" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_4_activation_fn_1/view_111" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_4_activation_fn_1/relu_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1/clone_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1/view_112" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1/t_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1/addmm_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1/view_113" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_final_layer_norm_1/add_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_final_layer_norm_1/native_layer_norm_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_final_layer_norm_1/native_layer_norm_9_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_final_layer_norm_1/native_layer_norm_9_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/getitem_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_self_attn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_self_attn_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_self_attn_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/clone_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/add_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_self_attn_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_fc1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_activation_fn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/clone_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_fc2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/clone_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/add_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1/model_decoder_layers_4_final_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1/getitem_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1/view_114" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1/t_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1/addmm_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1/view_115" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1/getitem_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1/view_116" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1/t_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1/addmm_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1/view_117" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1/getitem_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1/view_119" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1/t_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1/addmm_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1/view_120" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1/view_129" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1/view_130" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1/t_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1/addmm_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1/view_131" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/getitem_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/model_decoder_layers_5_self_attn_q_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/mul_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/model_decoder_layers_5_self_attn_k_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_118" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/transpose_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/clone_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/model_decoder_layers_5_self_attn_v_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_121" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/transpose_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/clone_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_122" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/transpose_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/clone_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_123" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_124" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_125" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/transpose_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/bmm_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_126" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/add_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_127" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/_softmax_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/clone_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/bmm_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_128" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/transpose_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/clone_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 4 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/view_129" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1/model_decoder_layers_5_self_attn_out_proj_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_self_attn_layer_norm_1/add_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_self_attn_layer_norm_1/native_layer_norm_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_self_attn_layer_norm_1/native_layer_norm_10_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_self_attn_layer_norm_1/native_layer_norm_10_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1/getitem_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1/view_132" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1/t_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1/addmm_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1/view_133" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_5_activation_fn_1/view_133" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU_model_decoder_layers_5_activation_fn_1/relu_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1/clone_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1/view_134" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1/t_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2048 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1/addmm_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1/view_135" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_final_layer_norm_1/add_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_final_layer_norm_1/native_layer_norm_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_final_layer_norm_1/native_layer_norm_11_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_final_layer_norm_1/native_layer_norm_11_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/getitem_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/expand_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_self_attn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_self_attn_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_self_attn_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/clone_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/add_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_self_attn_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_fc1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_activation_fn_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/clone_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 2048 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_fc2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/clone_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/add_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1/model_decoder_layers_5_final_layer_norm_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/l_input_ids_" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/view" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_embed_tokens_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/mul" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/full" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/arange" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/add" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/view_1" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/lt" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/masked_fill" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_embed_positions_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/add_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/clone" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_0_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_0_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_0_1_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_1_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_1_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_2_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_2_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_3_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_3_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_4_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_4_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_4_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_5_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_5_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.transformers.4.34.0.dev0::transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1/model_decoder_layers_5_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_lm_head_1/getitem_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_lm_head_1/t_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 10000 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_lm_head_1/view_136" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_lm_head_1/mm" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 10000 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_lm_head_1/view_137" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 10000 + } + } + } + } + } +} +opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 +} +opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 +} +opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 +} +opset_import { + domain: "" + version: 18 +} +opset_import { + domain: "pkg.onnxscript.torch_lib.common" + version: 1 +} +functions { + name: "aten_embedding" + input: "weight" + input: "indices" + output: "return_val" + node { + input: "weight" + input: "indices" + output: "return_val" + name: "n0" + op_type: "Gather" + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "padding_idx" + i: -1 + type: INT + } + attribute_proto { + name: "scale_grad_by_freq" + i: 0 + type: INT + } + attribute_proto { + name: "sparse" + i: 0 + type: INT + } +} +functions { + name: "torch_nn_modules_sparse_Embedding_model_decoder_embed_tokens_1" + input: "view" + input: "model.decoder.embed_tokens.weight" + output: "embedding" + node { + input: "model.decoder.embed_tokens.weight" + input: "view" + output: "embedding" + name: "aten_embedding_0" + op_type: "aten_embedding" + attribute { + name: "padding_idx" + i: 1 + type: INT + } + attribute { + name: "scale_grad_by_freq" + i: 0 + type: INT + } + attribute { + name: "sparse" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "aten_ne" + input: "self" + input: "other" + output: "return_val" + node { + input: "self" + input: "other" + output: "tmp" + name: "n0" + op_type: "Equal" + } + node { + input: "tmp" + output: "return_val" + name: "n1" + op_type: "Not" + } + doc_string: "ne.Tensor(Tensor self, Tensor other) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "prims_convert_element_type" + input: "a" + output: "return_val" + attribute: "dtype" + node { + input: "a" + output: "return_val" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + type: INT + ref_attr_name: "dtype" + } + } + doc_string: "convert_element_type(Tensor a, ScalarType dtype) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "_aten_cumsum_onnx" + input: "self" + input: "dim" + output: "result_2" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "result_2" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "result" + name: "n0" + op_type: "Identity" + } + name: "thenGraph_5" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + input: "dim" + output: "result_1" + name: "n0" + op_type: "CumSum" + } + name: "elseGraph_5" + output { + name: "result_1" + } + } + type: GRAPH + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_scalar_tensor_sym_number" + input: "s" + output: "return_val" + node { + input: "s" + output: "return_val" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + type: INT + ref_attr_name: "dtype" + } + } + doc_string: "scalar_tensor(Scalar s, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "dtype" + i: 1 + type: INT + } +} +functions { + name: "aten_add" + input: "self" + input: "other" + output: "return_val" + node { + output: "alpha" + name: "n0" + op_type: "Constant" + attribute { + name: "value_float" + type: FLOAT + ref_attr_name: "alpha" + } + } + node { + input: "alpha" + input: "other" + output: "alpha_0" + name: "n1" + op_type: "CastLike" + } + node { + input: "other" + input: "alpha_0" + output: "other_1" + name: "n2" + op_type: "Mul" + } + node { + input: "self" + input: "other_1" + output: "return_val" + name: "n3" + op_type: "Add" + } + doc_string: "add.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "alpha" + f: 1.0 + type: FLOAT + } +} +functions { + name: "aten_mul" + input: "self" + input: "other" + output: "return_val" + node { + input: "other" + input: "self" + output: "other_0" + name: "n0" + op_type: "CastLike" + } + node { + input: "self" + input: "other_0" + output: "return_val" + name: "n1" + op_type: "Mul" + } + doc_string: "mul.Tensor(Tensor self, Tensor other) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_view" + input: "self" + input: "size" + output: "return_val" + node { + input: "size" + output: "size_0" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "self" + input: "size_0" + output: "return_val" + name: "n1" + op_type: "Reshape" + } + doc_string: "view(Tensor(a) self, SymInt[] size) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_index_select" + input: "self" + input: "index" + output: "result_10" + attribute: "dim" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "self_is_scalar" + name: "n4" + op_type: "Equal" + } + node { + input: "self_is_scalar" + output: "self_4" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_1" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "self" + input: "tmp_1" + output: "self_2" + name: "n1" + op_type: "Reshape" + } + name: "thenGraph_6" + output { + name: "self_2" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "self_3" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_6" + output { + name: "self_3" + } + } + type: GRAPH + } + } + node { + output: "tmp_5" + name: "n6" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "index" + input: "tmp_5" + output: "index_6" + name: "n7" + op_type: "Reshape" + } + node { + input: "index_6" + output: "index_7" + name: "n8" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "self_4" + input: "index_7" + output: "result" + name: "n9" + op_type: "Gather" + attribute { + name: "axis" + type: INT + ref_attr_name: "dim" + } + } + node { + input: "self_is_scalar" + output: "result_10" + name: "n10" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "result" + output: "result_8" + name: "n0" + op_type: "Squeeze" + } + name: "thenGraph_14" + output { + name: "result_8" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "result" + output: "result_9" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_14" + output { + name: "result_9" + } + } + type: GRAPH + } + } + doc_string: "index_select(Tensor self, int dim, Tensor index) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_detach" + input: "self" + output: "return_val" + node { + input: "self" + output: "return_val" + name: "n0" + op_type: "Identity" + } + doc_string: "detach(Tensor(a) self) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1" + input: "view" + input: "model.decoder.embed_positions.weights" + output: "detach_1" + node { + output: "_val_1" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "view" + input: "_val_1" + output: "ne" + name: "aten_ne_7" + op_type: "aten_ne" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "ne" + output: "convert_element_type" + name: "prims_convert_element_type_8" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 6 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type" + output: "convert_element_type_default" + name: "prims_convert_element_type_9" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 7 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "convert_element_type_default" + input: "_val_5" + output: "cumsum" + name: "_aten_cumsum_onnx_11" + op_type: "_aten_cumsum_onnx" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "cumsum" + output: "convert_element_type_1" + name: "prims_convert_element_type_12" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 6 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_8" + name: "Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_8" + output: "scalar_tensor_default" + name: "aten_scalar_tensor_sym_number_14" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 6 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_1" + input: "scalar_tensor_default" + output: "add_1" + name: "aten_add_15" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_1" + input: "convert_element_type" + output: "mul_1" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_1" + output: "convert_element_type_2" + name: "prims_convert_element_type_17" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 7 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_13" + name: "Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "convert_element_type_2" + input: "_val_13" + output: "add_2" + name: "aten_add_19" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_15" + name: "Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "add_2" + input: "_val_15" + output: "view_2" + name: "aten_view_21" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.embed_positions.weights" + input: "view_2" + output: "index_select" + name: "aten_index_select_22" + op_type: "aten_index_select" + attribute { + name: "dim" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_19" + name: "Constant_23" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "index_select" + input: "_val_19" + output: "view_3" + name: "aten_view_24" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_3" + output: "detach" + name: "aten_detach_25" + op_type: "aten_detach" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "detach" + output: "detach_1" + name: "aten_detach_26" + op_type: "aten_detach" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "aten_t" + input: "self" + output: "result_1" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "rank" + name: "n1" + op_type: "Size" + } + node { + output: "int64_2" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 2 + name: "int64_2" + } + type: TENSOR + } + } + node { + input: "int64_2" + input: "rank" + output: "int64_2_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "rank" + input: "int64_2_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "result_1" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "result" + name: "n0" + op_type: "Transpose" + attribute { + name: "perm" + ints: 1 + ints: 0 + type: INTS + } + } + name: "thenGraph_6" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "result_0" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_6" + output { + name: "result_0" + } + } + type: GRAPH + } + } + doc_string: "t(Tensor(a) self) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_addmm" + input: "self" + input: "mat1" + input: "mat2" + output: "return_val" + node { + input: "mat1" + input: "mat2" + input: "self" + output: "return_val" + name: "n0" + op_type: "Gemm" + attribute { + name: "alpha" + type: FLOAT + ref_attr_name: "alpha" + } + attribute { + name: "beta" + type: FLOAT + ref_attr_name: "beta" + } + } + doc_string: "addmm(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "beta" + f: 1.0 + type: FLOAT + } + attribute_proto { + name: "alpha" + f: 1.0 + type: FLOAT + } +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1" + input: "clone" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + output: "view_5" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone" + input: "_val_1" + output: "view_4" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.q_proj.weight" + output: "t" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.q_proj.bias" + input: "view_4" + input: "t" + output: "addmm" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm" + input: "_val_7" + output: "view_5" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1" + input: "clone" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + output: "view_7" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone" + input: "_val_1" + output: "view_6" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.k_proj.weight" + output: "t_1" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.k_proj.bias" + input: "view_6" + input: "t_1" + output: "addmm_1" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_1" + input: "_val_7" + output: "view_7" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1" + input: "clone" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + output: "view_10" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone" + input: "_val_1" + output: "view_9" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.v_proj.weight" + output: "t_2" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.v_proj.bias" + input: "view_9" + input: "t_2" + output: "addmm_2" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_2" + input: "_val_7" + output: "view_10" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1" + input: "view_19" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + output: "view_21" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "view_19" + input: "_val_1" + output: "view_20" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.out_proj.weight" + output: "t_3" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.self_attn.out_proj.bias" + input: "view_20" + input: "t_3" + output: "addmm_3" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_3" + input: "_val_7" + output: "view_21" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "aten_clone" + input: "self" + output: "return_val" + node { + input: "self" + output: "return_val" + name: "n0" + op_type: "Identity" + } + doc_string: "clone(Tensor self, *, MemoryFormat? memory_format=None) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "memory_format" + s: "" + type: STRING + } +} +functions { + name: "aten_bmm" + input: "self" + input: "mat2" + output: "return_val" + node { + input: "self" + input: "mat2" + output: "return_val" + name: "n0" + op_type: "MatMul" + } + doc_string: "bmm(Tensor self, Tensor mat2) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_unsqueeze" + input: "self" + output: "return_val" + attribute: "dim" + node { + output: "dim" + name: "n0" + op_type: "Constant" + attribute { + name: "value_int" + type: INT + ref_attr_name: "dim" + } + } + node { + input: "dim" + output: "dim_0" + name: "n1" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "self" + input: "dim_0" + output: "return_val" + name: "n2" + op_type: "Unsqueeze" + } + doc_string: "unsqueeze(Tensor(a) self, int dim) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_expand" + input: "self" + input: "size" + output: "return_val" + node { + input: "size" + output: "size_0" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "size_0" + output: "size_1" + name: "n1" + op_type: "Abs" + } + node { + input: "self" + input: "size_1" + output: "return_val" + name: "n2" + op_type: "Expand" + } + doc_string: "expand(Tensor(a) self, SymInt[] size, *, bool implicit=False) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_softmax_no_dtype" + input: "self" + output: "result_7" + attribute: "dim" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "self_is_scalar" + name: "n4" + op_type: "Equal" + } + node { + input: "self_is_scalar" + output: "self_4" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_1" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "self" + input: "tmp_1" + output: "self_2" + name: "n1" + op_type: "Unsqueeze" + } + name: "thenGraph_6" + output { + name: "self_2" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "self_3" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_6" + output { + name: "self_3" + } + } + type: GRAPH + } + } + node { + input: "self_4" + output: "result" + name: "n6" + op_type: "Softmax" + attribute { + name: "axis" + type: INT + ref_attr_name: "dim" + } + } + node { + input: "self_is_scalar" + output: "result_7" + name: "n7" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "result" + output: "result_5" + name: "n0" + op_type: "Squeeze" + } + name: "thenGraph_9" + output { + name: "result_5" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "result" + output: "result_6" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_9" + output { + name: "result_6" + } + } + type: GRAPH + } + } + doc_string: "softmax(Tensor self, int dim, ScalarType? dtype=None) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1" + input: "clone" + input: "masked_fill" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + output: "clone_1" + output: "clone_2" + output: "expand_1" + output: "model_decoder_layers_0_self_attn_out_proj_1" + node { + input: "clone" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + output: "model_decoder_layers_0_self_attn_q_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1_20" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_q_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_5" + name: "Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000>" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_0_self_attn_q_proj_1" + input: "_val_5" + output: "mul_2" + name: "aten_mul_22" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + output: "model_decoder_layers_0_self_attn_k_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1_23" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_k_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_10" + name: "Constant_24" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_0_self_attn_k_proj_1" + input: "_val_10" + output: "view_8" + name: "aten_view_25" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_8" + output: "transpose" + name: "Transpose_26" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose" + output: "clone_1" + name: "aten_clone_27" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + output: "model_decoder_layers_0_self_attn_v_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1_28" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_v_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_17" + name: "Constant_29" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_0_self_attn_v_proj_1" + input: "_val_17" + output: "view_11" + name: "aten_view_30" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_11" + output: "transpose_1" + name: "Transpose_31" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_1" + output: "clone_2" + name: "aten_clone_32" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_21" + name: "Constant_33" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mul_2" + input: "_val_21" + output: "view_12" + name: "aten_view_34" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_12" + output: "transpose_2" + name: "Transpose_35" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_2" + output: "clone_3" + name: "aten_clone_36" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_25" + name: "Constant_37" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_3" + input: "_val_25" + output: "view_13" + name: "aten_view_38" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_27" + name: "Constant_39" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_1" + input: "_val_27" + output: "view_14" + name: "aten_view_40" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_29" + name: "Constant_41" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_2" + input: "_val_29" + output: "view_15" + name: "aten_view_42" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_14" + output: "transpose_3" + name: "Transpose_43" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + type: INTS + } + } + node { + input: "view_13" + input: "transpose_3" + output: "bmm" + name: "aten_bmm_44" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_33" + name: "Constant_45" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm" + input: "_val_33" + output: "view_16" + name: "aten_view_46" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "masked_fill" + output: "unsqueeze_2" + name: "aten_unsqueeze_47" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_2" + output: "unsqueeze_3" + name: "aten_unsqueeze_48" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_37" + name: "Constant_49" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_37" + output: "_val_38" + name: "Cast_50" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_39" + name: "Constant_51" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_38" + input: "_val_39" + output: "_val_40" + name: "Reshape_52" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + output: "_val_41" + name: "Constant_53" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + input: "_val_41" + output: "_val_42" + name: "Cast_54" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_43" + name: "Constant_55" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_42" + input: "_val_43" + output: "_val_44" + name: "Reshape_56" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + output: "_val_45" + name: "Constant_57" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_45" + output: "_val_46" + name: "Cast_58" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_47" + name: "Constant_59" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_46" + input: "_val_47" + output: "_val_48" + name: "Reshape_60" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + output: "_val_49" + name: "Constant_61" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_49" + output: "_val_50" + name: "Cast_62" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_51" + name: "Constant_63" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_50" + input: "_val_51" + output: "_val_52" + name: "Reshape_64" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "unsqueeze_3" + input: "_val_40" + input: "_val_44" + input: "_val_48" + input: "_val_52" + output: "slice_3" + name: "Slice_65" + op_type: "Slice" + } + node { + output: "_val_54" + name: "Constant_66" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_54" + output: "_val_55" + name: "Cast_67" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_56" + name: "Constant_68" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_55" + input: "_val_56" + output: "_val_57" + name: "Reshape_69" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + output: "_val_58" + name: "Constant_70" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + input: "_val_58" + output: "_val_59" + name: "Cast_71" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_60" + name: "Constant_72" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_59" + input: "_val_60" + output: "_val_61" + name: "Reshape_73" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + output: "_val_62" + name: "Constant_74" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_62" + output: "_val_63" + name: "Cast_75" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_64" + name: "Constant_76" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_63" + input: "_val_64" + output: "_val_65" + name: "Reshape_77" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + output: "_val_66" + name: "Constant_78" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_66" + output: "_val_67" + name: "Cast_79" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + output: "_val_68" + name: "Constant_80" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "_val_67" + input: "_val_68" + output: "_val_69" + name: "Reshape_81" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "slice_3" + input: "_val_57" + input: "_val_61" + input: "_val_65" + input: "_val_69" + output: "slice_4" + name: "Slice_82" + op_type: "Slice" + } + node { + output: "_val_71" + name: "Constant_83" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "slice_4" + input: "_val_71" + output: "expand_1" + name: "aten_expand_84" + op_type: "aten_expand" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_16" + input: "expand_1" + output: "add_4" + name: "aten_add_85" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_74" + name: "Constant_86" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "add_4" + input: "_val_74" + output: "view_17" + name: "aten_view_87" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_17" + output: "_softmax" + name: "aten_softmax_no_dtype_88" + op_type: "aten_softmax_no_dtype" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "_softmax" + output: "clone_4" + name: "aten_clone_89" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_4" + input: "view_15" + output: "bmm_1" + name: "aten_bmm_90" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_79" + name: "Constant_91" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_1" + input: "_val_79" + output: "view_18" + name: "aten_view_92" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_18" + output: "transpose_4" + name: "Transpose_93" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_4" + output: "clone_5" + name: "aten_clone_94" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_83" + name: "Constant_95" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_5" + input: "_val_83" + output: "view_19" + name: "aten_view_96" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_19" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + output: "model_decoder_layers_0_self_attn_out_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1_97" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_0_self_attn_out_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_self_attn_layer_norm_1" + input: "add_5" + input: "model.decoder.layers.0.self_attn_layer_norm.weight" + input: "model.decoder.layers.0.self_attn_layer_norm.bias" + output: "native_layer_norm" + node { + input: "add_5" + input: "model.decoder.layers.0.self_attn_layer_norm.weight" + input: "model.decoder.layers.0.self_attn_layer_norm.bias" + output: "native_layer_norm" + output: "native_layer_norm_1" + output: "native_layer_norm_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1" + input: "getitem" + input: "model.decoder.layers.0.fc1.weight" + input: "model.decoder.layers.0.fc1.bias" + output: "view_23" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem" + input: "_val_1" + output: "view_22" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.fc1.weight" + output: "t_4" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.fc1.bias" + input: "view_22" + input: "t_4" + output: "addmm_4" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_4" + input: "_val_7" + output: "view_23" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "aten_relu" + input: "self" + output: "return_val" + node { + input: "self" + output: "return_val" + name: "n0" + op_type: "Relu" + } + doc_string: "relu(Tensor self) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_0_activation_fn_1" + input: "view_23" + output: "relu" + node { + input: "view_23" + output: "relu" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1" + input: "clone_7" + input: "model.decoder.layers.0.fc2.weight" + input: "model.decoder.layers.0.fc2.bias" + output: "view_25" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_7" + input: "_val_1" + output: "view_24" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.fc2.weight" + output: "t_5" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.0.fc2.bias" + input: "view_24" + input: "t_5" + output: "addmm_5" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_5" + input: "_val_7" + output: "view_25" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_final_layer_norm_1" + input: "add_6" + input: "model.decoder.layers.0.final_layer_norm.weight" + input: "model.decoder.layers.0.final_layer_norm.bias" + output: "native_layer_norm_1" + node { + input: "add_6" + input: "model.decoder.layers.0.final_layer_norm.weight" + input: "model.decoder.layers.0.final_layer_norm.bias" + output: "native_layer_norm_1" + output: "native_layer_norm_1_1" + output: "native_layer_norm_1_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1" + input: "clone" + input: "masked_fill" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + input: "model.decoder.layers.0.self_attn_layer_norm.weight" + input: "model.decoder.layers.0.self_attn_layer_norm.bias" + input: "model.decoder.layers.0.fc1.weight" + input: "model.decoder.layers.0.fc1.bias" + input: "model.decoder.layers.0.fc2.weight" + input: "model.decoder.layers.0.fc2.bias" + input: "model.decoder.layers.0.final_layer_norm.weight" + input: "model.decoder.layers.0.final_layer_norm.bias" + output: "model_decoder_layers_0_self_attn_1" + output: "model_decoder_layers_0_self_attn_1_1" + output: "model_decoder_layers_0_self_attn_1_2" + output: "model_decoder_layers_0_final_layer_norm_1" + node { + input: "clone" + input: "masked_fill" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + output: "model_decoder_layers_0_self_attn_1" + output: "model_decoder_layers_0_self_attn_1_1" + output: "model_decoder_layers_0_self_attn_1_2" + output: "model_decoder_layers_0_self_attn_1_3" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1_0" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_0_self_attn_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_0_self_attn_1_3" + output: "clone_6" + name: "aten_clone_1" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone" + input: "clone_6" + output: "add_5" + name: "aten_add_2" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_5" + input: "model.decoder.layers.0.self_attn_layer_norm.weight" + input: "model.decoder.layers.0.self_attn_layer_norm.bias" + output: "model_decoder_layers_0_self_attn_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_self_attn_layer_norm_1_3" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_self_attn_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_0_self_attn_layer_norm_1" + input: "model.decoder.layers.0.fc1.weight" + input: "model.decoder.layers.0.fc1.bias" + output: "model_decoder_layers_0_fc1_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1_4" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_0_fc1_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_0_fc1_1" + output: "model_decoder_layers_0_activation_fn_1" + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_0_activation_fn_1_5" + op_type: "torch_nn_modules_activation_ReLU_model_decoder_layers_0_activation_fn_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_0_activation_fn_1" + output: "clone_7" + name: "aten_clone_6" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_7" + input: "model.decoder.layers.0.fc2.weight" + input: "model.decoder.layers.0.fc2.bias" + output: "model_decoder_layers_0_fc2_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1_7" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_0_fc2_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_0_fc2_1" + output: "clone_8" + name: "aten_clone_8" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model_decoder_layers_0_self_attn_layer_norm_1" + input: "clone_8" + output: "add_6" + name: "aten_add_9" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_6" + input: "model.decoder.layers.0.final_layer_norm.weight" + input: "model.decoder.layers.0.final_layer_norm.bias" + output: "model_decoder_layers_0_final_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_final_layer_norm_1_10" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_0_final_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1" + input: "getitem_3" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + output: "view_27" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_3" + input: "_val_1" + output: "view_26" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.q_proj.weight" + output: "t_6" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.q_proj.bias" + input: "view_26" + input: "t_6" + output: "addmm_6" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_6" + input: "_val_7" + output: "view_27" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1" + input: "getitem_3" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + output: "view_29" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_3" + input: "_val_1" + output: "view_28" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.k_proj.weight" + output: "t_7" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.k_proj.bias" + input: "view_28" + input: "t_7" + output: "addmm_7" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_7" + input: "_val_7" + output: "view_29" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1" + input: "getitem_3" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + output: "view_32" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_3" + input: "_val_1" + output: "view_31" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.v_proj.weight" + output: "t_8" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.v_proj.bias" + input: "view_31" + input: "t_8" + output: "addmm_8" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_8" + input: "_val_7" + output: "view_32" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1" + input: "view_41" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + output: "view_43" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "view_41" + input: "_val_1" + output: "view_42" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.out_proj.weight" + output: "t_9" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.self_attn.out_proj.bias" + input: "view_42" + input: "t_9" + output: "addmm_9" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_9" + input: "_val_7" + output: "view_43" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1" + input: "getitem_3" + input: "expand_1" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + output: "clone_9" + output: "clone_10" + output: "model_decoder_layers_1_self_attn_out_proj_1" + node { + input: "getitem_3" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + output: "model_decoder_layers_1_self_attn_q_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1_11" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_q_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_5" + name: "Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000>" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_1_self_attn_q_proj_1" + input: "_val_5" + output: "mul_3" + name: "aten_mul_13" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_3" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + output: "model_decoder_layers_1_self_attn_k_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1_14" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_k_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_10" + name: "Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_1_self_attn_k_proj_1" + input: "_val_10" + output: "view_30" + name: "aten_view_16" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_30" + output: "transpose_5" + name: "Transpose_17" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_5" + output: "clone_9" + name: "aten_clone_18" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_3" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + output: "model_decoder_layers_1_self_attn_v_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1_19" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_v_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_17" + name: "Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_1_self_attn_v_proj_1" + input: "_val_17" + output: "view_33" + name: "aten_view_21" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_33" + output: "transpose_6" + name: "Transpose_22" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_6" + output: "clone_10" + name: "aten_clone_23" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_21" + name: "Constant_24" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mul_3" + input: "_val_21" + output: "view_34" + name: "aten_view_25" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_34" + output: "transpose_7" + name: "Transpose_26" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_7" + output: "clone_11" + name: "aten_clone_27" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_25" + name: "Constant_28" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_11" + input: "_val_25" + output: "view_35" + name: "aten_view_29" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_27" + name: "Constant_30" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_9" + input: "_val_27" + output: "view_36" + name: "aten_view_31" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_29" + name: "Constant_32" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_10" + input: "_val_29" + output: "view_37" + name: "aten_view_33" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_36" + output: "transpose_8" + name: "Transpose_34" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + type: INTS + } + } + node { + input: "view_35" + input: "transpose_8" + output: "bmm_2" + name: "aten_bmm_35" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_33" + name: "Constant_36" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_2" + input: "_val_33" + output: "view_38" + name: "aten_view_37" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_38" + input: "expand_1" + output: "add_7" + name: "aten_add_38" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_36" + name: "Constant_39" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "add_7" + input: "_val_36" + output: "view_39" + name: "aten_view_40" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_39" + output: "_softmax_1" + name: "aten_softmax_no_dtype_41" + op_type: "aten_softmax_no_dtype" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "_softmax_1" + output: "clone_12" + name: "aten_clone_42" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_12" + input: "view_37" + output: "bmm_3" + name: "aten_bmm_43" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_41" + name: "Constant_44" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_3" + input: "_val_41" + output: "view_40" + name: "aten_view_45" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_40" + output: "transpose_9" + name: "Transpose_46" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_9" + output: "clone_13" + name: "aten_clone_47" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_45" + name: "Constant_48" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_13" + input: "_val_45" + output: "view_41" + name: "aten_view_49" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_41" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + output: "model_decoder_layers_1_self_attn_out_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1_50" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_1_self_attn_out_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_self_attn_layer_norm_1" + input: "add_8" + input: "model.decoder.layers.1.self_attn_layer_norm.weight" + input: "model.decoder.layers.1.self_attn_layer_norm.bias" + output: "native_layer_norm_2" + node { + input: "add_8" + input: "model.decoder.layers.1.self_attn_layer_norm.weight" + input: "model.decoder.layers.1.self_attn_layer_norm.bias" + output: "native_layer_norm_2" + output: "native_layer_norm_2_1" + output: "native_layer_norm_2_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1" + input: "getitem_6" + input: "model.decoder.layers.1.fc1.weight" + input: "model.decoder.layers.1.fc1.bias" + output: "view_45" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_6" + input: "_val_1" + output: "view_44" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.fc1.weight" + output: "t_10" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.fc1.bias" + input: "view_44" + input: "t_10" + output: "addmm_10" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_10" + input: "_val_7" + output: "view_45" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_1_activation_fn_1" + input: "view_45" + output: "relu_1" + node { + input: "view_45" + output: "relu_1" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1" + input: "clone_15" + input: "model.decoder.layers.1.fc2.weight" + input: "model.decoder.layers.1.fc2.bias" + output: "view_47" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_15" + input: "_val_1" + output: "view_46" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.fc2.weight" + output: "t_11" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.1.fc2.bias" + input: "view_46" + input: "t_11" + output: "addmm_11" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_11" + input: "_val_7" + output: "view_47" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_final_layer_norm_1" + input: "add_9" + input: "model.decoder.layers.1.final_layer_norm.weight" + input: "model.decoder.layers.1.final_layer_norm.bias" + output: "native_layer_norm_3" + node { + input: "add_9" + input: "model.decoder.layers.1.final_layer_norm.weight" + input: "model.decoder.layers.1.final_layer_norm.bias" + output: "native_layer_norm_3" + output: "native_layer_norm_3_1" + output: "native_layer_norm_3_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1" + input: "getitem_3" + input: "expand_1" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + input: "model.decoder.layers.1.self_attn_layer_norm.weight" + input: "model.decoder.layers.1.self_attn_layer_norm.bias" + input: "model.decoder.layers.1.fc1.weight" + input: "model.decoder.layers.1.fc1.bias" + input: "model.decoder.layers.1.fc2.weight" + input: "model.decoder.layers.1.fc2.bias" + input: "model.decoder.layers.1.final_layer_norm.weight" + input: "model.decoder.layers.1.final_layer_norm.bias" + output: "model_decoder_layers_1_self_attn_1" + output: "model_decoder_layers_1_self_attn_1_1" + output: "model_decoder_layers_1_final_layer_norm_1" + node { + input: "getitem_3" + input: "expand_1" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + output: "model_decoder_layers_1_self_attn_1" + output: "model_decoder_layers_1_self_attn_1_1" + output: "model_decoder_layers_1_self_attn_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1_0" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_1_self_attn_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_1_self_attn_1_2" + output: "clone_14" + name: "aten_clone_1" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_3" + input: "clone_14" + output: "add_8" + name: "aten_add_2" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_8" + input: "model.decoder.layers.1.self_attn_layer_norm.weight" + input: "model.decoder.layers.1.self_attn_layer_norm.bias" + output: "model_decoder_layers_1_self_attn_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_self_attn_layer_norm_1_3" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_self_attn_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_1_self_attn_layer_norm_1" + input: "model.decoder.layers.1.fc1.weight" + input: "model.decoder.layers.1.fc1.bias" + output: "model_decoder_layers_1_fc1_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1_4" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_1_fc1_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_1_fc1_1" + output: "model_decoder_layers_1_activation_fn_1" + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_1_activation_fn_1_5" + op_type: "torch_nn_modules_activation_ReLU_model_decoder_layers_1_activation_fn_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_1_activation_fn_1" + output: "clone_15" + name: "aten_clone_6" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_15" + input: "model.decoder.layers.1.fc2.weight" + input: "model.decoder.layers.1.fc2.bias" + output: "model_decoder_layers_1_fc2_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1_7" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_1_fc2_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_1_fc2_1" + output: "clone_16" + name: "aten_clone_8" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model_decoder_layers_1_self_attn_layer_norm_1" + input: "clone_16" + output: "add_9" + name: "aten_add_9" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_9" + input: "model.decoder.layers.1.final_layer_norm.weight" + input: "model.decoder.layers.1.final_layer_norm.bias" + output: "model_decoder_layers_1_final_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_final_layer_norm_1_10" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_1_final_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1" + input: "getitem_9" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + output: "view_49" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_9" + input: "_val_1" + output: "view_48" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.q_proj.weight" + output: "t_12" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.q_proj.bias" + input: "view_48" + input: "t_12" + output: "addmm_12" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_12" + input: "_val_7" + output: "view_49" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1" + input: "getitem_9" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + output: "view_51" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_9" + input: "_val_1" + output: "view_50" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.k_proj.weight" + output: "t_13" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.k_proj.bias" + input: "view_50" + input: "t_13" + output: "addmm_13" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_13" + input: "_val_7" + output: "view_51" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1" + input: "getitem_9" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + output: "view_54" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_9" + input: "_val_1" + output: "view_53" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.v_proj.weight" + output: "t_14" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.v_proj.bias" + input: "view_53" + input: "t_14" + output: "addmm_14" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_14" + input: "_val_7" + output: "view_54" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1" + input: "view_63" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + output: "view_65" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "view_63" + input: "_val_1" + output: "view_64" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.out_proj.weight" + output: "t_15" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.self_attn.out_proj.bias" + input: "view_64" + input: "t_15" + output: "addmm_15" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_15" + input: "_val_7" + output: "view_65" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1" + input: "getitem_9" + input: "expand_1" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + output: "clone_17" + output: "clone_18" + output: "model_decoder_layers_2_self_attn_out_proj_1" + node { + input: "getitem_9" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + output: "model_decoder_layers_2_self_attn_q_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1_11" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_q_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_5" + name: "Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000>" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_2_self_attn_q_proj_1" + input: "_val_5" + output: "mul_4" + name: "aten_mul_13" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_9" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + output: "model_decoder_layers_2_self_attn_k_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1_14" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_k_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_10" + name: "Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_2_self_attn_k_proj_1" + input: "_val_10" + output: "view_52" + name: "aten_view_16" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_52" + output: "transpose_10" + name: "Transpose_17" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_10" + output: "clone_17" + name: "aten_clone_18" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_9" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + output: "model_decoder_layers_2_self_attn_v_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1_19" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_v_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_17" + name: "Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_2_self_attn_v_proj_1" + input: "_val_17" + output: "view_55" + name: "aten_view_21" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_55" + output: "transpose_11" + name: "Transpose_22" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_11" + output: "clone_18" + name: "aten_clone_23" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_21" + name: "Constant_24" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mul_4" + input: "_val_21" + output: "view_56" + name: "aten_view_25" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_56" + output: "transpose_12" + name: "Transpose_26" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_12" + output: "clone_19" + name: "aten_clone_27" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_25" + name: "Constant_28" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_19" + input: "_val_25" + output: "view_57" + name: "aten_view_29" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_27" + name: "Constant_30" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_17" + input: "_val_27" + output: "view_58" + name: "aten_view_31" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_29" + name: "Constant_32" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_18" + input: "_val_29" + output: "view_59" + name: "aten_view_33" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_58" + output: "transpose_13" + name: "Transpose_34" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + type: INTS + } + } + node { + input: "view_57" + input: "transpose_13" + output: "bmm_4" + name: "aten_bmm_35" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_33" + name: "Constant_36" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_4" + input: "_val_33" + output: "view_60" + name: "aten_view_37" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_60" + input: "expand_1" + output: "add_10" + name: "aten_add_38" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_36" + name: "Constant_39" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "add_10" + input: "_val_36" + output: "view_61" + name: "aten_view_40" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_61" + output: "_softmax_2" + name: "aten_softmax_no_dtype_41" + op_type: "aten_softmax_no_dtype" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "_softmax_2" + output: "clone_20" + name: "aten_clone_42" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_20" + input: "view_59" + output: "bmm_5" + name: "aten_bmm_43" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_41" + name: "Constant_44" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_5" + input: "_val_41" + output: "view_62" + name: "aten_view_45" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_62" + output: "transpose_14" + name: "Transpose_46" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_14" + output: "clone_21" + name: "aten_clone_47" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_45" + name: "Constant_48" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_21" + input: "_val_45" + output: "view_63" + name: "aten_view_49" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_63" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + output: "model_decoder_layers_2_self_attn_out_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1_50" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_2_self_attn_out_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_self_attn_layer_norm_1" + input: "add_11" + input: "model.decoder.layers.2.self_attn_layer_norm.weight" + input: "model.decoder.layers.2.self_attn_layer_norm.bias" + output: "native_layer_norm_4" + node { + input: "add_11" + input: "model.decoder.layers.2.self_attn_layer_norm.weight" + input: "model.decoder.layers.2.self_attn_layer_norm.bias" + output: "native_layer_norm_4" + output: "native_layer_norm_4_1" + output: "native_layer_norm_4_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1" + input: "getitem_12" + input: "model.decoder.layers.2.fc1.weight" + input: "model.decoder.layers.2.fc1.bias" + output: "view_67" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_12" + input: "_val_1" + output: "view_66" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.fc1.weight" + output: "t_16" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.fc1.bias" + input: "view_66" + input: "t_16" + output: "addmm_16" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_16" + input: "_val_7" + output: "view_67" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_2_activation_fn_1" + input: "view_67" + output: "relu_2" + node { + input: "view_67" + output: "relu_2" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1" + input: "clone_23" + input: "model.decoder.layers.2.fc2.weight" + input: "model.decoder.layers.2.fc2.bias" + output: "view_69" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_23" + input: "_val_1" + output: "view_68" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.fc2.weight" + output: "t_17" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.2.fc2.bias" + input: "view_68" + input: "t_17" + output: "addmm_17" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_17" + input: "_val_7" + output: "view_69" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_final_layer_norm_1" + input: "add_12" + input: "model.decoder.layers.2.final_layer_norm.weight" + input: "model.decoder.layers.2.final_layer_norm.bias" + output: "native_layer_norm_5" + node { + input: "add_12" + input: "model.decoder.layers.2.final_layer_norm.weight" + input: "model.decoder.layers.2.final_layer_norm.bias" + output: "native_layer_norm_5" + output: "native_layer_norm_5_1" + output: "native_layer_norm_5_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1" + input: "getitem_9" + input: "expand_1" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + input: "model.decoder.layers.2.self_attn_layer_norm.weight" + input: "model.decoder.layers.2.self_attn_layer_norm.bias" + input: "model.decoder.layers.2.fc1.weight" + input: "model.decoder.layers.2.fc1.bias" + input: "model.decoder.layers.2.fc2.weight" + input: "model.decoder.layers.2.fc2.bias" + input: "model.decoder.layers.2.final_layer_norm.weight" + input: "model.decoder.layers.2.final_layer_norm.bias" + output: "model_decoder_layers_2_self_attn_1" + output: "model_decoder_layers_2_self_attn_1_1" + output: "model_decoder_layers_2_final_layer_norm_1" + node { + input: "getitem_9" + input: "expand_1" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + output: "model_decoder_layers_2_self_attn_1" + output: "model_decoder_layers_2_self_attn_1_1" + output: "model_decoder_layers_2_self_attn_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1_0" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_2_self_attn_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_2_self_attn_1_2" + output: "clone_22" + name: "aten_clone_1" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_9" + input: "clone_22" + output: "add_11" + name: "aten_add_2" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_11" + input: "model.decoder.layers.2.self_attn_layer_norm.weight" + input: "model.decoder.layers.2.self_attn_layer_norm.bias" + output: "model_decoder_layers_2_self_attn_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_self_attn_layer_norm_1_3" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_self_attn_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_2_self_attn_layer_norm_1" + input: "model.decoder.layers.2.fc1.weight" + input: "model.decoder.layers.2.fc1.bias" + output: "model_decoder_layers_2_fc1_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1_4" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_2_fc1_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_2_fc1_1" + output: "model_decoder_layers_2_activation_fn_1" + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_2_activation_fn_1_5" + op_type: "torch_nn_modules_activation_ReLU_model_decoder_layers_2_activation_fn_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_2_activation_fn_1" + output: "clone_23" + name: "aten_clone_6" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_23" + input: "model.decoder.layers.2.fc2.weight" + input: "model.decoder.layers.2.fc2.bias" + output: "model_decoder_layers_2_fc2_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1_7" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_2_fc2_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_2_fc2_1" + output: "clone_24" + name: "aten_clone_8" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model_decoder_layers_2_self_attn_layer_norm_1" + input: "clone_24" + output: "add_12" + name: "aten_add_9" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_12" + input: "model.decoder.layers.2.final_layer_norm.weight" + input: "model.decoder.layers.2.final_layer_norm.bias" + output: "model_decoder_layers_2_final_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_final_layer_norm_1_10" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_2_final_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1" + input: "getitem_15" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + output: "view_71" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_15" + input: "_val_1" + output: "view_70" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.q_proj.weight" + output: "t_18" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.q_proj.bias" + input: "view_70" + input: "t_18" + output: "addmm_18" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_18" + input: "_val_7" + output: "view_71" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1" + input: "getitem_15" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + output: "view_73" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_15" + input: "_val_1" + output: "view_72" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.k_proj.weight" + output: "t_19" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.k_proj.bias" + input: "view_72" + input: "t_19" + output: "addmm_19" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_19" + input: "_val_7" + output: "view_73" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1" + input: "getitem_15" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + output: "view_76" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_15" + input: "_val_1" + output: "view_75" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.v_proj.weight" + output: "t_20" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.v_proj.bias" + input: "view_75" + input: "t_20" + output: "addmm_20" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_20" + input: "_val_7" + output: "view_76" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1" + input: "view_85" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + output: "view_87" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "view_85" + input: "_val_1" + output: "view_86" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.out_proj.weight" + output: "t_21" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.self_attn.out_proj.bias" + input: "view_86" + input: "t_21" + output: "addmm_21" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_21" + input: "_val_7" + output: "view_87" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1" + input: "getitem_15" + input: "expand_1" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + output: "clone_25" + output: "clone_26" + output: "model_decoder_layers_3_self_attn_out_proj_1" + node { + input: "getitem_15" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + output: "model_decoder_layers_3_self_attn_q_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1_11" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_q_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_5" + name: "Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000>" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_3_self_attn_q_proj_1" + input: "_val_5" + output: "mul_5" + name: "aten_mul_13" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_15" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + output: "model_decoder_layers_3_self_attn_k_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1_14" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_k_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_10" + name: "Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_3_self_attn_k_proj_1" + input: "_val_10" + output: "view_74" + name: "aten_view_16" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_74" + output: "transpose_15" + name: "Transpose_17" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_15" + output: "clone_25" + name: "aten_clone_18" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_15" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + output: "model_decoder_layers_3_self_attn_v_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1_19" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_v_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_17" + name: "Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_3_self_attn_v_proj_1" + input: "_val_17" + output: "view_77" + name: "aten_view_21" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_77" + output: "transpose_16" + name: "Transpose_22" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_16" + output: "clone_26" + name: "aten_clone_23" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_21" + name: "Constant_24" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mul_5" + input: "_val_21" + output: "view_78" + name: "aten_view_25" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_78" + output: "transpose_17" + name: "Transpose_26" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_17" + output: "clone_27" + name: "aten_clone_27" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_25" + name: "Constant_28" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_27" + input: "_val_25" + output: "view_79" + name: "aten_view_29" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_27" + name: "Constant_30" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_25" + input: "_val_27" + output: "view_80" + name: "aten_view_31" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_29" + name: "Constant_32" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_26" + input: "_val_29" + output: "view_81" + name: "aten_view_33" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_80" + output: "transpose_18" + name: "Transpose_34" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + type: INTS + } + } + node { + input: "view_79" + input: "transpose_18" + output: "bmm_6" + name: "aten_bmm_35" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_33" + name: "Constant_36" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_6" + input: "_val_33" + output: "view_82" + name: "aten_view_37" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_82" + input: "expand_1" + output: "add_13" + name: "aten_add_38" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_36" + name: "Constant_39" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "add_13" + input: "_val_36" + output: "view_83" + name: "aten_view_40" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_83" + output: "_softmax_3" + name: "aten_softmax_no_dtype_41" + op_type: "aten_softmax_no_dtype" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "_softmax_3" + output: "clone_28" + name: "aten_clone_42" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_28" + input: "view_81" + output: "bmm_7" + name: "aten_bmm_43" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_41" + name: "Constant_44" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_7" + input: "_val_41" + output: "view_84" + name: "aten_view_45" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_84" + output: "transpose_19" + name: "Transpose_46" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_19" + output: "clone_29" + name: "aten_clone_47" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_45" + name: "Constant_48" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_29" + input: "_val_45" + output: "view_85" + name: "aten_view_49" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_85" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + output: "model_decoder_layers_3_self_attn_out_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1_50" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_3_self_attn_out_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_self_attn_layer_norm_1" + input: "add_14" + input: "model.decoder.layers.3.self_attn_layer_norm.weight" + input: "model.decoder.layers.3.self_attn_layer_norm.bias" + output: "native_layer_norm_6" + node { + input: "add_14" + input: "model.decoder.layers.3.self_attn_layer_norm.weight" + input: "model.decoder.layers.3.self_attn_layer_norm.bias" + output: "native_layer_norm_6" + output: "native_layer_norm_6_1" + output: "native_layer_norm_6_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1" + input: "getitem_18" + input: "model.decoder.layers.3.fc1.weight" + input: "model.decoder.layers.3.fc1.bias" + output: "view_89" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_18" + input: "_val_1" + output: "view_88" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.fc1.weight" + output: "t_22" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.fc1.bias" + input: "view_88" + input: "t_22" + output: "addmm_22" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_22" + input: "_val_7" + output: "view_89" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_3_activation_fn_1" + input: "view_89" + output: "relu_3" + node { + input: "view_89" + output: "relu_3" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1" + input: "clone_31" + input: "model.decoder.layers.3.fc2.weight" + input: "model.decoder.layers.3.fc2.bias" + output: "view_91" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_31" + input: "_val_1" + output: "view_90" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.fc2.weight" + output: "t_23" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.3.fc2.bias" + input: "view_90" + input: "t_23" + output: "addmm_23" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_23" + input: "_val_7" + output: "view_91" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_final_layer_norm_1" + input: "add_15" + input: "model.decoder.layers.3.final_layer_norm.weight" + input: "model.decoder.layers.3.final_layer_norm.bias" + output: "native_layer_norm_7" + node { + input: "add_15" + input: "model.decoder.layers.3.final_layer_norm.weight" + input: "model.decoder.layers.3.final_layer_norm.bias" + output: "native_layer_norm_7" + output: "native_layer_norm_7_1" + output: "native_layer_norm_7_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1" + input: "getitem_15" + input: "expand_1" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + input: "model.decoder.layers.3.self_attn_layer_norm.weight" + input: "model.decoder.layers.3.self_attn_layer_norm.bias" + input: "model.decoder.layers.3.fc1.weight" + input: "model.decoder.layers.3.fc1.bias" + input: "model.decoder.layers.3.fc2.weight" + input: "model.decoder.layers.3.fc2.bias" + input: "model.decoder.layers.3.final_layer_norm.weight" + input: "model.decoder.layers.3.final_layer_norm.bias" + output: "model_decoder_layers_3_self_attn_1" + output: "model_decoder_layers_3_self_attn_1_1" + output: "model_decoder_layers_3_final_layer_norm_1" + node { + input: "getitem_15" + input: "expand_1" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + output: "model_decoder_layers_3_self_attn_1" + output: "model_decoder_layers_3_self_attn_1_1" + output: "model_decoder_layers_3_self_attn_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1_0" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_3_self_attn_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_3_self_attn_1_2" + output: "clone_30" + name: "aten_clone_1" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_15" + input: "clone_30" + output: "add_14" + name: "aten_add_2" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_14" + input: "model.decoder.layers.3.self_attn_layer_norm.weight" + input: "model.decoder.layers.3.self_attn_layer_norm.bias" + output: "model_decoder_layers_3_self_attn_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_self_attn_layer_norm_1_3" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_self_attn_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_3_self_attn_layer_norm_1" + input: "model.decoder.layers.3.fc1.weight" + input: "model.decoder.layers.3.fc1.bias" + output: "model_decoder_layers_3_fc1_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1_4" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_3_fc1_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_3_fc1_1" + output: "model_decoder_layers_3_activation_fn_1" + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_3_activation_fn_1_5" + op_type: "torch_nn_modules_activation_ReLU_model_decoder_layers_3_activation_fn_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_3_activation_fn_1" + output: "clone_31" + name: "aten_clone_6" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_31" + input: "model.decoder.layers.3.fc2.weight" + input: "model.decoder.layers.3.fc2.bias" + output: "model_decoder_layers_3_fc2_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1_7" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_3_fc2_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_3_fc2_1" + output: "clone_32" + name: "aten_clone_8" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model_decoder_layers_3_self_attn_layer_norm_1" + input: "clone_32" + output: "add_15" + name: "aten_add_9" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_15" + input: "model.decoder.layers.3.final_layer_norm.weight" + input: "model.decoder.layers.3.final_layer_norm.bias" + output: "model_decoder_layers_3_final_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_final_layer_norm_1_10" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_3_final_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1" + input: "getitem_21" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + output: "view_93" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_21" + input: "_val_1" + output: "view_92" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.q_proj.weight" + output: "t_24" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.q_proj.bias" + input: "view_92" + input: "t_24" + output: "addmm_24" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_24" + input: "_val_7" + output: "view_93" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1" + input: "getitem_21" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + output: "view_95" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_21" + input: "_val_1" + output: "view_94" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.k_proj.weight" + output: "t_25" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.k_proj.bias" + input: "view_94" + input: "t_25" + output: "addmm_25" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_25" + input: "_val_7" + output: "view_95" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1" + input: "getitem_21" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + output: "view_98" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_21" + input: "_val_1" + output: "view_97" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.v_proj.weight" + output: "t_26" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.v_proj.bias" + input: "view_97" + input: "t_26" + output: "addmm_26" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_26" + input: "_val_7" + output: "view_98" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1" + input: "view_107" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + output: "view_109" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "view_107" + input: "_val_1" + output: "view_108" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.out_proj.weight" + output: "t_27" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.self_attn.out_proj.bias" + input: "view_108" + input: "t_27" + output: "addmm_27" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_27" + input: "_val_7" + output: "view_109" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1" + input: "getitem_21" + input: "expand_1" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + output: "clone_33" + output: "clone_34" + output: "model_decoder_layers_4_self_attn_out_proj_1" + node { + input: "getitem_21" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + output: "model_decoder_layers_4_self_attn_q_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1_11" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_q_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_5" + name: "Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000>" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_4_self_attn_q_proj_1" + input: "_val_5" + output: "mul_6" + name: "aten_mul_13" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_21" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + output: "model_decoder_layers_4_self_attn_k_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1_14" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_k_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_10" + name: "Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_4_self_attn_k_proj_1" + input: "_val_10" + output: "view_96" + name: "aten_view_16" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_96" + output: "transpose_20" + name: "Transpose_17" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_20" + output: "clone_33" + name: "aten_clone_18" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_21" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + output: "model_decoder_layers_4_self_attn_v_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1_19" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_v_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_17" + name: "Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_4_self_attn_v_proj_1" + input: "_val_17" + output: "view_99" + name: "aten_view_21" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_99" + output: "transpose_21" + name: "Transpose_22" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_21" + output: "clone_34" + name: "aten_clone_23" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_21" + name: "Constant_24" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mul_6" + input: "_val_21" + output: "view_100" + name: "aten_view_25" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_100" + output: "transpose_22" + name: "Transpose_26" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_22" + output: "clone_35" + name: "aten_clone_27" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_25" + name: "Constant_28" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_35" + input: "_val_25" + output: "view_101" + name: "aten_view_29" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_27" + name: "Constant_30" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_33" + input: "_val_27" + output: "view_102" + name: "aten_view_31" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_29" + name: "Constant_32" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_34" + input: "_val_29" + output: "view_103" + name: "aten_view_33" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_102" + output: "transpose_23" + name: "Transpose_34" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + type: INTS + } + } + node { + input: "view_101" + input: "transpose_23" + output: "bmm_8" + name: "aten_bmm_35" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_33" + name: "Constant_36" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_8" + input: "_val_33" + output: "view_104" + name: "aten_view_37" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_104" + input: "expand_1" + output: "add_16" + name: "aten_add_38" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_36" + name: "Constant_39" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "add_16" + input: "_val_36" + output: "view_105" + name: "aten_view_40" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_105" + output: "_softmax_4" + name: "aten_softmax_no_dtype_41" + op_type: "aten_softmax_no_dtype" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "_softmax_4" + output: "clone_36" + name: "aten_clone_42" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_36" + input: "view_103" + output: "bmm_9" + name: "aten_bmm_43" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_41" + name: "Constant_44" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_9" + input: "_val_41" + output: "view_106" + name: "aten_view_45" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_106" + output: "transpose_24" + name: "Transpose_46" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_24" + output: "clone_37" + name: "aten_clone_47" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_45" + name: "Constant_48" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_37" + input: "_val_45" + output: "view_107" + name: "aten_view_49" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_107" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + output: "model_decoder_layers_4_self_attn_out_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1_50" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_4_self_attn_out_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_self_attn_layer_norm_1" + input: "add_17" + input: "model.decoder.layers.4.self_attn_layer_norm.weight" + input: "model.decoder.layers.4.self_attn_layer_norm.bias" + output: "native_layer_norm_8" + node { + input: "add_17" + input: "model.decoder.layers.4.self_attn_layer_norm.weight" + input: "model.decoder.layers.4.self_attn_layer_norm.bias" + output: "native_layer_norm_8" + output: "native_layer_norm_8_1" + output: "native_layer_norm_8_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1" + input: "getitem_24" + input: "model.decoder.layers.4.fc1.weight" + input: "model.decoder.layers.4.fc1.bias" + output: "view_111" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_24" + input: "_val_1" + output: "view_110" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.fc1.weight" + output: "t_28" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.fc1.bias" + input: "view_110" + input: "t_28" + output: "addmm_28" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_28" + input: "_val_7" + output: "view_111" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_4_activation_fn_1" + input: "view_111" + output: "relu_4" + node { + input: "view_111" + output: "relu_4" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1" + input: "clone_39" + input: "model.decoder.layers.4.fc2.weight" + input: "model.decoder.layers.4.fc2.bias" + output: "view_113" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_39" + input: "_val_1" + output: "view_112" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.fc2.weight" + output: "t_29" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.4.fc2.bias" + input: "view_112" + input: "t_29" + output: "addmm_29" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_29" + input: "_val_7" + output: "view_113" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_final_layer_norm_1" + input: "add_18" + input: "model.decoder.layers.4.final_layer_norm.weight" + input: "model.decoder.layers.4.final_layer_norm.bias" + output: "native_layer_norm_9" + node { + input: "add_18" + input: "model.decoder.layers.4.final_layer_norm.weight" + input: "model.decoder.layers.4.final_layer_norm.bias" + output: "native_layer_norm_9" + output: "native_layer_norm_9_1" + output: "native_layer_norm_9_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1" + input: "getitem_21" + input: "expand_1" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + input: "model.decoder.layers.4.self_attn_layer_norm.weight" + input: "model.decoder.layers.4.self_attn_layer_norm.bias" + input: "model.decoder.layers.4.fc1.weight" + input: "model.decoder.layers.4.fc1.bias" + input: "model.decoder.layers.4.fc2.weight" + input: "model.decoder.layers.4.fc2.bias" + input: "model.decoder.layers.4.final_layer_norm.weight" + input: "model.decoder.layers.4.final_layer_norm.bias" + output: "model_decoder_layers_4_self_attn_1" + output: "model_decoder_layers_4_self_attn_1_1" + output: "model_decoder_layers_4_final_layer_norm_1" + node { + input: "getitem_21" + input: "expand_1" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + output: "model_decoder_layers_4_self_attn_1" + output: "model_decoder_layers_4_self_attn_1_1" + output: "model_decoder_layers_4_self_attn_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1_0" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_4_self_attn_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_4_self_attn_1_2" + output: "clone_38" + name: "aten_clone_1" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_21" + input: "clone_38" + output: "add_17" + name: "aten_add_2" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_17" + input: "model.decoder.layers.4.self_attn_layer_norm.weight" + input: "model.decoder.layers.4.self_attn_layer_norm.bias" + output: "model_decoder_layers_4_self_attn_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_self_attn_layer_norm_1_3" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_self_attn_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_4_self_attn_layer_norm_1" + input: "model.decoder.layers.4.fc1.weight" + input: "model.decoder.layers.4.fc1.bias" + output: "model_decoder_layers_4_fc1_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1_4" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_4_fc1_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_4_fc1_1" + output: "model_decoder_layers_4_activation_fn_1" + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_4_activation_fn_1_5" + op_type: "torch_nn_modules_activation_ReLU_model_decoder_layers_4_activation_fn_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_4_activation_fn_1" + output: "clone_39" + name: "aten_clone_6" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_39" + input: "model.decoder.layers.4.fc2.weight" + input: "model.decoder.layers.4.fc2.bias" + output: "model_decoder_layers_4_fc2_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1_7" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_4_fc2_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_4_fc2_1" + output: "clone_40" + name: "aten_clone_8" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model_decoder_layers_4_self_attn_layer_norm_1" + input: "clone_40" + output: "add_18" + name: "aten_add_9" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_18" + input: "model.decoder.layers.4.final_layer_norm.weight" + input: "model.decoder.layers.4.final_layer_norm.bias" + output: "model_decoder_layers_4_final_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_final_layer_norm_1_10" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_4_final_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1" + input: "getitem_27" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + output: "view_115" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_27" + input: "_val_1" + output: "view_114" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.q_proj.weight" + output: "t_30" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.q_proj.bias" + input: "view_114" + input: "t_30" + output: "addmm_30" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_30" + input: "_val_7" + output: "view_115" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1" + input: "getitem_27" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + output: "view_117" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_27" + input: "_val_1" + output: "view_116" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.k_proj.weight" + output: "t_31" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.k_proj.bias" + input: "view_116" + input: "t_31" + output: "addmm_31" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_31" + input: "_val_7" + output: "view_117" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1" + input: "getitem_27" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + output: "view_120" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_27" + input: "_val_1" + output: "view_119" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.v_proj.weight" + output: "t_32" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.v_proj.bias" + input: "view_119" + input: "t_32" + output: "addmm_32" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_32" + input: "_val_7" + output: "view_120" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1" + input: "view_129" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + output: "view_131" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "view_129" + input: "_val_1" + output: "view_130" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.out_proj.weight" + output: "t_33" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.self_attn.out_proj.bias" + input: "view_130" + input: "t_33" + output: "addmm_33" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_33" + input: "_val_7" + output: "view_131" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1" + input: "getitem_27" + input: "expand_1" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + output: "clone_41" + output: "clone_42" + output: "model_decoder_layers_5_self_attn_out_proj_1" + node { + input: "getitem_27" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + output: "model_decoder_layers_5_self_attn_q_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1_11" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_q_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_5" + name: "Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000>" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_5_self_attn_q_proj_1" + input: "_val_5" + output: "mul_7" + name: "aten_mul_13" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_27" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + output: "model_decoder_layers_5_self_attn_k_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1_14" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_k_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_10" + name: "Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_5_self_attn_k_proj_1" + input: "_val_10" + output: "view_118" + name: "aten_view_16" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_118" + output: "transpose_25" + name: "Transpose_17" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_25" + output: "clone_41" + name: "aten_clone_18" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_27" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + output: "model_decoder_layers_5_self_attn_v_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1_19" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_v_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_17" + name: "Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model_decoder_layers_5_self_attn_v_proj_1" + input: "_val_17" + output: "view_121" + name: "aten_view_21" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_121" + output: "transpose_26" + name: "Transpose_22" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_26" + output: "clone_42" + name: "aten_clone_23" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_21" + name: "Constant_24" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mul_7" + input: "_val_21" + output: "view_122" + name: "aten_view_25" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_122" + output: "transpose_27" + name: "Transpose_26" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_27" + output: "clone_43" + name: "aten_clone_27" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_25" + name: "Constant_28" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_43" + input: "_val_25" + output: "view_123" + name: "aten_view_29" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_27" + name: "Constant_30" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_41" + input: "_val_27" + output: "view_124" + name: "aten_view_31" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_29" + name: "Constant_32" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\377\377\377\377\377\377\377\377@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_42" + input: "_val_29" + output: "view_125" + name: "aten_view_33" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_124" + output: "transpose_28" + name: "Transpose_34" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + type: INTS + } + } + node { + input: "view_123" + input: "transpose_28" + output: "bmm_10" + name: "aten_bmm_35" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_33" + name: "Constant_36" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_10" + input: "_val_33" + output: "view_126" + name: "aten_view_37" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_126" + input: "expand_1" + output: "add_19" + name: "aten_add_38" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_36" + name: "Constant_39" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "add_19" + input: "_val_36" + output: "view_127" + name: "aten_view_40" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_127" + output: "_softmax_5" + name: "aten_softmax_no_dtype_41" + op_type: "aten_softmax_no_dtype" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "_softmax_5" + output: "clone_44" + name: "aten_clone_42" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_44" + input: "view_125" + output: "bmm_11" + name: "aten_bmm_43" + op_type: "aten_bmm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_41" + name: "Constant_44" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000@\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "bmm_11" + input: "_val_41" + output: "view_128" + name: "aten_view_45" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_128" + output: "transpose_29" + name: "Transpose_46" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "transpose_29" + output: "clone_45" + name: "aten_clone_47" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_45" + name: "Constant_48" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_45" + input: "_val_45" + output: "view_129" + name: "aten_view_49" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_129" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + output: "model_decoder_layers_5_self_attn_out_proj_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1_50" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_5_self_attn_out_proj_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_self_attn_layer_norm_1" + input: "add_20" + input: "model.decoder.layers.5.self_attn_layer_norm.weight" + input: "model.decoder.layers.5.self_attn_layer_norm.bias" + output: "native_layer_norm_10" + node { + input: "add_20" + input: "model.decoder.layers.5.self_attn_layer_norm.weight" + input: "model.decoder.layers.5.self_attn_layer_norm.bias" + output: "native_layer_norm_10" + output: "native_layer_norm_10_1" + output: "native_layer_norm_10_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1" + input: "getitem_30" + input: "model.decoder.layers.5.fc1.weight" + input: "model.decoder.layers.5.fc1.bias" + output: "view_133" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_30" + input: "_val_1" + output: "view_132" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.fc1.weight" + output: "t_34" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.fc1.bias" + input: "view_132" + input: "t_34" + output: "addmm_34" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_34" + input: "_val_7" + output: "view_133" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_5_activation_fn_1" + input: "view_133" + output: "relu_5" + node { + input: "view_133" + output: "relu_5" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1" + input: "clone_47" + input: "model.decoder.layers.5.fc2.weight" + input: "model.decoder.layers.5.fc2.bias" + output: "view_135" + node { + output: "_val_1" + name: "Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "clone_47" + input: "_val_1" + output: "view_134" + name: "aten_view_3" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.fc2.weight" + output: "t_35" + name: "aten_t_4" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model.decoder.layers.5.fc2.bias" + input: "view_134" + input: "t_35" + output: "addmm_35" + name: "aten_addmm_5" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "addmm_35" + input: "_val_7" + output: "view_135" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_final_layer_norm_1" + input: "add_21" + input: "model.decoder.layers.5.final_layer_norm.weight" + input: "model.decoder.layers.5.final_layer_norm.bias" + output: "native_layer_norm_11" + node { + input: "add_21" + input: "model.decoder.layers.5.final_layer_norm.weight" + input: "model.decoder.layers.5.final_layer_norm.bias" + output: "native_layer_norm_11" + output: "native_layer_norm_11_1" + output: "native_layer_norm_11_2" + name: "LayerNormalization_0" + op_type: "LayerNormalization" + attribute { + name: "axis" + i: -1 + type: INT + } + attribute { + name: "epsilon" + f: 1e-05 + type: FLOAT + } + attribute { + name: "stash_type" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1" + input: "getitem_27" + input: "expand_1" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + input: "model.decoder.layers.5.self_attn_layer_norm.weight" + input: "model.decoder.layers.5.self_attn_layer_norm.bias" + input: "model.decoder.layers.5.fc1.weight" + input: "model.decoder.layers.5.fc1.bias" + input: "model.decoder.layers.5.fc2.weight" + input: "model.decoder.layers.5.fc2.bias" + input: "model.decoder.layers.5.final_layer_norm.weight" + input: "model.decoder.layers.5.final_layer_norm.bias" + output: "model_decoder_layers_5_self_attn_1" + output: "model_decoder_layers_5_self_attn_1_1" + output: "model_decoder_layers_5_final_layer_norm_1" + node { + input: "getitem_27" + input: "expand_1" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + output: "model_decoder_layers_5_self_attn_1" + output: "model_decoder_layers_5_self_attn_1_1" + output: "model_decoder_layers_5_self_attn_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1_0" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Attention_model_decoder_layers_5_self_attn_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_5_self_attn_1_2" + output: "clone_46" + name: "aten_clone_1" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_27" + input: "clone_46" + output: "add_20" + name: "aten_add_2" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_20" + input: "model.decoder.layers.5.self_attn_layer_norm.weight" + input: "model.decoder.layers.5.self_attn_layer_norm.bias" + output: "model_decoder_layers_5_self_attn_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_self_attn_layer_norm_1_3" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_self_attn_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_5_self_attn_layer_norm_1" + input: "model.decoder.layers.5.fc1.weight" + input: "model.decoder.layers.5.fc1.bias" + output: "model_decoder_layers_5_fc1_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1_4" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_5_fc1_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_5_fc1_1" + output: "model_decoder_layers_5_activation_fn_1" + name: "torch_nn_modules_activation_ReLU_model_decoder_layers_5_activation_fn_1_5" + op_type: "torch_nn_modules_activation_ReLU_model_decoder_layers_5_activation_fn_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_5_activation_fn_1" + output: "clone_47" + name: "aten_clone_6" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone_47" + input: "model.decoder.layers.5.fc2.weight" + input: "model.decoder.layers.5.fc2.bias" + output: "model_decoder_layers_5_fc2_1" + name: "torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1_7" + op_type: "torch_nn_modules_linear_Linear_model_decoder_layers_5_fc2_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "model_decoder_layers_5_fc2_1" + output: "clone_48" + name: "aten_clone_8" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "model_decoder_layers_5_self_attn_layer_norm_1" + input: "clone_48" + output: "add_21" + name: "aten_add_9" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_21" + input: "model.decoder.layers.5.final_layer_norm.weight" + input: "model.decoder.layers.5.final_layer_norm.bias" + output: "model_decoder_layers_5_final_layer_norm_1" + name: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_final_layer_norm_1_10" + op_type: "torch_nn_modules_normalization_LayerNorm_model_decoder_layers_5_final_layer_norm_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "aten_full" + input: "size" + input: "fill_value" + output: "return_val" + node { + input: "size" + output: "size_0" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "fill_value" + output: "fill_value_1" + name: "n1" + op_type: "Cast" + attribute { + name: "to" + type: INT + ref_attr_name: "dtype" + } + } + node { + input: "fill_value_1" + input: "size_0" + output: "return_val" + name: "n2" + op_type: "Expand" + } + doc_string: "full(SymInt[] size, Scalar fill_value, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "dtype" + i: 1 + type: INT + } +} +functions { + name: "aten_lt" + input: "self" + input: "other" + output: "return_val" + node { + input: "self" + input: "other" + output: "return_val" + name: "n0" + op_type: "Less" + } + doc_string: "lt.Tensor(Tensor self, Tensor other) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_masked_fill" + input: "self" + input: "mask" + input: "value" + output: "return_val" + node { + input: "value" + input: "self" + output: "value_cast" + name: "n0" + op_type: "CastLike" + } + node { + input: "mask" + input: "value_cast" + input: "self" + output: "return_val" + name: "n1" + op_type: "Where" + } + doc_string: "masked_fill.Tensor(Tensor self, Tensor mask, Tensor value) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2Decoder_model_decoder_1" + input: "l_input_ids_" + input: "model.decoder.embed_tokens.weight" + input: "model.decoder.embed_positions.weights" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + input: "model.decoder.layers.0.self_attn_layer_norm.weight" + input: "model.decoder.layers.0.self_attn_layer_norm.bias" + input: "model.decoder.layers.0.fc1.weight" + input: "model.decoder.layers.0.fc1.bias" + input: "model.decoder.layers.0.fc2.weight" + input: "model.decoder.layers.0.fc2.bias" + input: "model.decoder.layers.0.final_layer_norm.weight" + input: "model.decoder.layers.0.final_layer_norm.bias" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + input: "model.decoder.layers.1.self_attn_layer_norm.weight" + input: "model.decoder.layers.1.self_attn_layer_norm.bias" + input: "model.decoder.layers.1.fc1.weight" + input: "model.decoder.layers.1.fc1.bias" + input: "model.decoder.layers.1.fc2.weight" + input: "model.decoder.layers.1.fc2.bias" + input: "model.decoder.layers.1.final_layer_norm.weight" + input: "model.decoder.layers.1.final_layer_norm.bias" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + input: "model.decoder.layers.2.self_attn_layer_norm.weight" + input: "model.decoder.layers.2.self_attn_layer_norm.bias" + input: "model.decoder.layers.2.fc1.weight" + input: "model.decoder.layers.2.fc1.bias" + input: "model.decoder.layers.2.fc2.weight" + input: "model.decoder.layers.2.fc2.bias" + input: "model.decoder.layers.2.final_layer_norm.weight" + input: "model.decoder.layers.2.final_layer_norm.bias" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + input: "model.decoder.layers.3.self_attn_layer_norm.weight" + input: "model.decoder.layers.3.self_attn_layer_norm.bias" + input: "model.decoder.layers.3.fc1.weight" + input: "model.decoder.layers.3.fc1.bias" + input: "model.decoder.layers.3.fc2.weight" + input: "model.decoder.layers.3.fc2.bias" + input: "model.decoder.layers.3.final_layer_norm.weight" + input: "model.decoder.layers.3.final_layer_norm.bias" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + input: "model.decoder.layers.4.self_attn_layer_norm.weight" + input: "model.decoder.layers.4.self_attn_layer_norm.bias" + input: "model.decoder.layers.4.fc1.weight" + input: "model.decoder.layers.4.fc1.bias" + input: "model.decoder.layers.4.fc2.weight" + input: "model.decoder.layers.4.fc2.bias" + input: "model.decoder.layers.4.final_layer_norm.weight" + input: "model.decoder.layers.4.final_layer_norm.bias" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + input: "model.decoder.layers.5.self_attn_layer_norm.weight" + input: "model.decoder.layers.5.self_attn_layer_norm.bias" + input: "model.decoder.layers.5.fc1.weight" + input: "model.decoder.layers.5.fc1.bias" + input: "model.decoder.layers.5.fc2.weight" + input: "model.decoder.layers.5.fc2.bias" + input: "model.decoder.layers.5.final_layer_norm.weight" + input: "model.decoder.layers.5.final_layer_norm.bias" + output: "model_decoder_layers_0_1" + output: "model_decoder_layers_0_1_1" + output: "model_decoder_layers_1_1" + output: "model_decoder_layers_1_1_1" + output: "model_decoder_layers_2_1" + output: "model_decoder_layers_2_1_1" + output: "model_decoder_layers_3_1" + output: "model_decoder_layers_3_1_1" + output: "model_decoder_layers_4_1" + output: "model_decoder_layers_4_1_1" + output: "model_decoder_layers_5_1" + output: "model_decoder_layers_5_1_1" + output: "model_decoder_layers_5_1_2" + node { + output: "_val_1" + name: "Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "l_input_ids_" + input: "_val_1" + output: "view" + name: "aten_view_13" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view" + input: "model.decoder.embed_tokens.weight" + output: "model_decoder_embed_tokens_1" + name: "torch_nn_modules_sparse_Embedding_model_decoder_embed_tokens_1_14" + op_type: "torch_nn_modules_sparse_Embedding_model_decoder_embed_tokens_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + output: "_val_5" + name: "Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200A" + } + type: TENSOR + } + } + node { + input: "model_decoder_embed_tokens_1" + input: "_val_5" + output: "mul" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_7" + name: "Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "_val_8" + name: "Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\377\377\177\377" + } + type: TENSOR + } + } + node { + input: "_val_7" + input: "_val_8" + output: "full" + name: "aten_full_19" + op_type: "aten_full" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_10" + name: "Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "_val_11" + name: "Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_10" + input: "_val_11" + output: "_val_12" + name: "CastLike_22" + op_type: "CastLike" + } + node { + output: "_val_13" + name: "Constant_23" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + output: "_val_14" + name: "Constant_24" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_13" + input: "_val_14" + output: "_val_15" + name: "CastLike_25" + op_type: "CastLike" + } + node { + output: "_val_16" + name: "Constant_26" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_12" + input: "_val_16" + input: "_val_15" + output: "arange" + name: "Range_27" + op_type: "Range" + } + node { + output: "_val_18" + name: "Constant_28" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "arange" + input: "_val_18" + output: "add" + name: "aten_add_29" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_20" + name: "Constant_30" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "add" + input: "_val_20" + output: "view_1" + name: "aten_view_31" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "arange" + input: "view_1" + output: "lt" + name: "aten_lt_32" + op_type: "aten_lt" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_23" + name: "Constant_33" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "full" + input: "lt" + input: "_val_23" + output: "masked_fill" + name: "aten_masked_fill_34" + op_type: "aten_masked_fill" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view" + input: "model.decoder.embed_positions.weights" + output: "model_decoder_embed_positions_1" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1_35" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2SinusoidalPositionalEmbedding_model_decoder_embed_positions_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "mul" + input: "model_decoder_embed_positions_1" + output: "add_3" + name: "aten_add_36" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_3" + output: "clone" + name: "aten_clone_37" + op_type: "aten_clone" + attribute { + name: "memory_format" + s: "" + type: STRING + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "clone" + input: "masked_fill" + input: "model.decoder.layers.0.self_attn.q_proj.weight" + input: "model.decoder.layers.0.self_attn.q_proj.bias" + input: "model.decoder.layers.0.self_attn.k_proj.weight" + input: "model.decoder.layers.0.self_attn.k_proj.bias" + input: "model.decoder.layers.0.self_attn.v_proj.weight" + input: "model.decoder.layers.0.self_attn.v_proj.bias" + input: "model.decoder.layers.0.self_attn.out_proj.weight" + input: "model.decoder.layers.0.self_attn.out_proj.bias" + input: "model.decoder.layers.0.self_attn_layer_norm.weight" + input: "model.decoder.layers.0.self_attn_layer_norm.bias" + input: "model.decoder.layers.0.fc1.weight" + input: "model.decoder.layers.0.fc1.bias" + input: "model.decoder.layers.0.fc2.weight" + input: "model.decoder.layers.0.fc2.bias" + input: "model.decoder.layers.0.final_layer_norm.weight" + input: "model.decoder.layers.0.final_layer_norm.bias" + output: "model_decoder_layers_0_1" + output: "model_decoder_layers_0_1_1" + output: "model_decoder_layers_0_1_2" + output: "model_decoder_layers_0_1_3" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1_38" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_0_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_0_1_3" + input: "model_decoder_layers_0_1_2" + input: "model.decoder.layers.1.self_attn.q_proj.weight" + input: "model.decoder.layers.1.self_attn.q_proj.bias" + input: "model.decoder.layers.1.self_attn.k_proj.weight" + input: "model.decoder.layers.1.self_attn.k_proj.bias" + input: "model.decoder.layers.1.self_attn.v_proj.weight" + input: "model.decoder.layers.1.self_attn.v_proj.bias" + input: "model.decoder.layers.1.self_attn.out_proj.weight" + input: "model.decoder.layers.1.self_attn.out_proj.bias" + input: "model.decoder.layers.1.self_attn_layer_norm.weight" + input: "model.decoder.layers.1.self_attn_layer_norm.bias" + input: "model.decoder.layers.1.fc1.weight" + input: "model.decoder.layers.1.fc1.bias" + input: "model.decoder.layers.1.fc2.weight" + input: "model.decoder.layers.1.fc2.bias" + input: "model.decoder.layers.1.final_layer_norm.weight" + input: "model.decoder.layers.1.final_layer_norm.bias" + output: "model_decoder_layers_1_1" + output: "model_decoder_layers_1_1_1" + output: "model_decoder_layers_1_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1_39" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_1_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_1_1_2" + input: "model_decoder_layers_0_1_2" + input: "model.decoder.layers.2.self_attn.q_proj.weight" + input: "model.decoder.layers.2.self_attn.q_proj.bias" + input: "model.decoder.layers.2.self_attn.k_proj.weight" + input: "model.decoder.layers.2.self_attn.k_proj.bias" + input: "model.decoder.layers.2.self_attn.v_proj.weight" + input: "model.decoder.layers.2.self_attn.v_proj.bias" + input: "model.decoder.layers.2.self_attn.out_proj.weight" + input: "model.decoder.layers.2.self_attn.out_proj.bias" + input: "model.decoder.layers.2.self_attn_layer_norm.weight" + input: "model.decoder.layers.2.self_attn_layer_norm.bias" + input: "model.decoder.layers.2.fc1.weight" + input: "model.decoder.layers.2.fc1.bias" + input: "model.decoder.layers.2.fc2.weight" + input: "model.decoder.layers.2.fc2.bias" + input: "model.decoder.layers.2.final_layer_norm.weight" + input: "model.decoder.layers.2.final_layer_norm.bias" + output: "model_decoder_layers_2_1" + output: "model_decoder_layers_2_1_1" + output: "model_decoder_layers_2_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1_40" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_2_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_2_1_2" + input: "model_decoder_layers_0_1_2" + input: "model.decoder.layers.3.self_attn.q_proj.weight" + input: "model.decoder.layers.3.self_attn.q_proj.bias" + input: "model.decoder.layers.3.self_attn.k_proj.weight" + input: "model.decoder.layers.3.self_attn.k_proj.bias" + input: "model.decoder.layers.3.self_attn.v_proj.weight" + input: "model.decoder.layers.3.self_attn.v_proj.bias" + input: "model.decoder.layers.3.self_attn.out_proj.weight" + input: "model.decoder.layers.3.self_attn.out_proj.bias" + input: "model.decoder.layers.3.self_attn_layer_norm.weight" + input: "model.decoder.layers.3.self_attn_layer_norm.bias" + input: "model.decoder.layers.3.fc1.weight" + input: "model.decoder.layers.3.fc1.bias" + input: "model.decoder.layers.3.fc2.weight" + input: "model.decoder.layers.3.fc2.bias" + input: "model.decoder.layers.3.final_layer_norm.weight" + input: "model.decoder.layers.3.final_layer_norm.bias" + output: "model_decoder_layers_3_1" + output: "model_decoder_layers_3_1_1" + output: "model_decoder_layers_3_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1_41" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_3_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_3_1_2" + input: "model_decoder_layers_0_1_2" + input: "model.decoder.layers.4.self_attn.q_proj.weight" + input: "model.decoder.layers.4.self_attn.q_proj.bias" + input: "model.decoder.layers.4.self_attn.k_proj.weight" + input: "model.decoder.layers.4.self_attn.k_proj.bias" + input: "model.decoder.layers.4.self_attn.v_proj.weight" + input: "model.decoder.layers.4.self_attn.v_proj.bias" + input: "model.decoder.layers.4.self_attn.out_proj.weight" + input: "model.decoder.layers.4.self_attn.out_proj.bias" + input: "model.decoder.layers.4.self_attn_layer_norm.weight" + input: "model.decoder.layers.4.self_attn_layer_norm.bias" + input: "model.decoder.layers.4.fc1.weight" + input: "model.decoder.layers.4.fc1.bias" + input: "model.decoder.layers.4.fc2.weight" + input: "model.decoder.layers.4.fc2.bias" + input: "model.decoder.layers.4.final_layer_norm.weight" + input: "model.decoder.layers.4.final_layer_norm.bias" + output: "model_decoder_layers_4_1" + output: "model_decoder_layers_4_1_1" + output: "model_decoder_layers_4_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1_42" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_4_1" + domain: "pkg.transformers.4.34.0.dev0" + } + node { + input: "model_decoder_layers_4_1_2" + input: "model_decoder_layers_0_1_2" + input: "model.decoder.layers.5.self_attn.q_proj.weight" + input: "model.decoder.layers.5.self_attn.q_proj.bias" + input: "model.decoder.layers.5.self_attn.k_proj.weight" + input: "model.decoder.layers.5.self_attn.k_proj.bias" + input: "model.decoder.layers.5.self_attn.v_proj.weight" + input: "model.decoder.layers.5.self_attn.v_proj.bias" + input: "model.decoder.layers.5.self_attn.out_proj.weight" + input: "model.decoder.layers.5.self_attn.out_proj.bias" + input: "model.decoder.layers.5.self_attn_layer_norm.weight" + input: "model.decoder.layers.5.self_attn_layer_norm.bias" + input: "model.decoder.layers.5.fc1.weight" + input: "model.decoder.layers.5.fc1.bias" + input: "model.decoder.layers.5.fc2.weight" + input: "model.decoder.layers.5.fc2.bias" + input: "model.decoder.layers.5.final_layer_norm.weight" + input: "model.decoder.layers.5.final_layer_norm.bias" + output: "model_decoder_layers_5_1" + output: "model_decoder_layers_5_1_1" + output: "model_decoder_layers_5_1_2" + name: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1_43" + op_type: "transformers_models_speech_to_text_2_modeling_speech_to_text_2_Speech2Text2DecoderLayer_model_decoder_layers_5_1" + domain: "pkg.transformers.4.34.0.dev0" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + opset_import { + domain: "pkg.transformers.4.34.0.dev0" + version: 1 + } + domain: "pkg.transformers.4.34.0.dev0" +} +functions { + name: "aten_mm" + input: "self" + input: "mat2" + output: "return_val" + node { + input: "self" + input: "mat2" + output: "return_val" + name: "n0" + op_type: "MatMul" + } + doc_string: "mm(Tensor self, Tensor mat2) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "torch_nn_modules_linear_Linear_lm_head_1" + input: "getitem_33" + input: "model.decoder.embed_tokens.weight" + output: "view_137" + node { + input: "model.decoder.embed_tokens.weight" + output: "t_36" + name: "aten_t_2" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\200\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "getitem_33" + input: "_val_3" + output: "view_136" + name: "aten_view_4" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view_136" + input: "t_36" + output: "mm" + name: "aten_mm_5" + op_type: "aten_mm" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_6" + name: "Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\020\'\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mm" + input: "_val_6" + output: "view_137" + name: "aten_view_7" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "aten__log_softmax" + input: "self" + output: "result_7" + attribute: "dim" + attribute: "half_to_float" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "self_is_scalar" + name: "n4" + op_type: "Equal" + } + node { + input: "self_is_scalar" + output: "self_4" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_1" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "self" + input: "tmp_1" + output: "self_2" + name: "n1" + op_type: "Unsqueeze" + } + name: "thenGraph_8" + output { + name: "self_2" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "self_3" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_8" + output { + name: "self_3" + } + } + type: GRAPH + } + } + node { + input: "self_4" + output: "result" + name: "n6" + op_type: "LogSoftmax" + attribute { + name: "axis" + type: INT + ref_attr_name: "dim" + } + } + node { + input: "self_is_scalar" + output: "result_7" + name: "n7" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "result" + output: "result_5" + name: "n0" + op_type: "Squeeze" + } + name: "thenGraph_11" + output { + name: "result_5" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "result" + output: "result_6" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_11" + output { + name: "result_6" + } + } + type: GRAPH + } + } + doc_string: "_log_softmax(Tensor self, int dim, bool half_to_float) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_where" + input: "condition" + input: "self" + input: "other" + output: "return_val" + node { + input: "condition" + input: "self" + input: "other" + output: "return_val" + name: "n0" + op_type: "Where" + } + doc_string: "where.self(Tensor condition, Tensor self, Tensor other) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_gather" + input: "self" + input: "index" + output: "result_16" + attribute: "dim" + node { + input: "index" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "result_16" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "result" + name: "n0" + op_type: "Identity" + } + name: "thenGraph_10" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "tmp_1" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp_1" + output: "tmp_2" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0_3" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0_3" + } + type: TENSOR + } + } + node { + input: "int64_0_3" + input: "tmp_2" + output: "int64_0_3_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_2" + input: "int64_0_3_cast" + output: "cond_4" + name: "n4" + op_type: "Equal" + } + node { + input: "cond_4" + output: "self_8" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_5" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "self" + input: "tmp_5" + output: "self_6" + name: "n1" + op_type: "Reshape" + } + name: "thenGraph_13" + output { + name: "self_6" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "self_7" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_13" + output { + name: "self_7" + } + } + type: GRAPH + } + } + node { + input: "index" + output: "tmp_9" + name: "n6" + op_type: "Size" + } + node { + output: "int64_0_10" + name: "n7" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0_10" + } + type: TENSOR + } + } + node { + input: "int64_0_10" + input: "tmp_9" + output: "int64_0_10_cast" + name: "n8" + op_type: "CastLike" + } + node { + input: "tmp_9" + input: "int64_0_10_cast" + output: "cond_11" + name: "n9" + op_type: "Equal" + } + node { + input: "cond_11" + output: "result_15" + name: "n10" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "index" + input: "self_8" + output: "result_12" + name: "n0" + op_type: "CastLike" + } + name: "thenGraph_15" + output { + name: "result_12" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "index" + output: "index_13" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "self_8" + input: "index_13" + output: "result_14" + name: "n1" + op_type: "GatherElements" + attribute { + name: "axis" + type: INT + ref_attr_name: "dim" + } + } + name: "elseGraph_15" + output { + name: "result_14" + } + } + type: GRAPH + } + } + name: "elseGraph_10" + output { + name: "result_15" + } + } + type: GRAPH + } + } + doc_string: "gather(Tensor self, int dim, Tensor index, *, bool sparse_grad=False) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "sparse_grad" + i: 0 + type: INT + } +} +functions { + name: "aten_squeeze_dim" + input: "self" + output: "result_7" + attribute: "dim" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "cond" + name: "n4" + op_type: "Greater" + } + node { + input: "cond" + output: "result_7" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "shape" + name: "n0" + op_type: "Shape" + } + node { + output: "dim" + name: "n1" + op_type: "Constant" + attribute { + name: "value_int" + type: INT + ref_attr_name: "dim" + } + } + node { + input: "shape" + input: "dim" + output: "dim_size" + name: "n2" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "int64_1" + name: "n3" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 1 + name: "int64_1" + } + type: TENSOR + } + } + node { + input: "int64_1" + input: "dim_size" + output: "int64_1_cast" + name: "n4" + op_type: "CastLike" + } + node { + input: "dim_size" + input: "int64_1_cast" + output: "cond_1" + name: "n5" + op_type: "Equal" + } + node { + input: "cond_1" + output: "result_5" + name: "n6" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "dim_2" + name: "n0" + op_type: "Constant" + attribute { + name: "value_int" + type: INT + ref_attr_name: "dim" + } + } + node { + output: "tmp_3" + name: "n1" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "dim_2" + input: "tmp_3" + output: "dims" + name: "n2" + op_type: "Reshape" + } + node { + input: "self" + input: "dims" + output: "result" + name: "n3" + op_type: "Squeeze" + } + name: "thenGraph_8" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "result_4" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_8" + output { + name: "result_4" + } + } + type: GRAPH + } + } + name: "thenGraph_4" + output { + name: "result_5" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "result_6" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_4" + output { + name: "result_6" + } + } + type: GRAPH + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_neg" + input: "self" + output: "return_val" + node { + input: "self" + output: "return_val" + name: "n0" + op_type: "Neg" + } + doc_string: "neg(Tensor self) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "_aten_sum_dim_none" + input: "self" + output: "result_7" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "self_is_scalar" + name: "n4" + op_type: "Equal" + } + node { + input: "self_is_scalar" + output: "self_4" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_1" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: -1 + type: INTS + } + } + node { + input: "self" + input: "tmp_1" + output: "self_2" + name: "n1" + op_type: "Reshape" + } + name: "thenGraph_4" + output { + name: "self_2" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "self_3" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_4" + output { + name: "self_3" + } + } + type: GRAPH + } + } + node { + input: "self_4" + output: "result" + name: "n6" + op_type: "ReduceSum" + attribute { + name: "keepdims" + type: INT + ref_attr_name: "keepdim" + } + } + node { + input: "self_is_scalar" + output: "result_7" + name: "n7" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "result" + output: "result_5" + name: "n0" + op_type: "Squeeze" + } + name: "thenGraph_9" + output { + name: "result_5" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "result" + output: "result_6" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_9" + output { + name: "result_6" + } + } + type: GRAPH + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "keepdim" + i: 0 + type: INT + } +} +functions { + name: "aten_div" + input: "self" + input: "other" + output: "return_val" + node { + input: "self" + input: "other" + output: "return_val" + name: "n0" + op_type: "Div" + } + doc_string: "div.Tensor(Tensor self, Tensor other) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "Rank" + input: "input" + output: "return_val" + node { + input: "input" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "return_val" + name: "n1" + op_type: "Size" + } + doc_string: "Take the rank of the input tensor." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib.common" +} +functions { + name: "IsScalar" + input: "input" + output: "return_val" + node { + input: "input" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "tmp_1" + name: "n2" + op_type: "Constant" + attribute { + name: "value_int" + i: 0 + type: INT + } + } + node { + input: "tmp_0" + input: "tmp_1" + output: "return_val" + name: "n3" + op_type: "Equal" + } + doc_string: "Return whether the input has rank 0, or is a scalar." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib.common" +} diff --git a/testdata/e2e_models/mobilenetv2_100/mobilenetv2_100_dynamo.textproto b/testdata/e2e_models/mobilenetv2_100/mobilenetv2_100_dynamo.textproto new file mode 100644 index 00000000..157f1a34 --- /dev/null +++ b/testdata/e2e_models/mobilenetv2_100/mobilenetv2_100_dynamo.textproto @@ -0,0 +1,58635 @@ +ir_version: 8 +producer_name: "pytorch" +producer_version: "2.2.0" +graph { + node { + input: "l_x_" + input: "conv_stem.weight" + output: "conv_stem_1" + name: "torch_nn_modules_conv_Conv2d_conv_stem_1_0" + op_type: "torch_nn_modules_conv_Conv2d_conv_stem_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "conv_stem_1" + input: "bn1.running_mean" + input: "bn1.running_var" + input: "bn1.weight" + input: "bn1.bias" + output: "bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "bn1_1" + input: "blocks.0.0.conv_dw.weight" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + input: "blocks.0.0.conv_pw.weight" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + input: "blocks.1.0.conv_pw.weight" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + input: "blocks.1.0.conv_dw.weight" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + input: "blocks.1.0.conv_pwl.weight" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + input: "blocks.1.1.conv_pw.weight" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + input: "blocks.1.1.conv_dw.weight" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + input: "blocks.1.1.conv_pwl.weight" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + input: "blocks.2.0.conv_pw.weight" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + input: "blocks.2.0.conv_dw.weight" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + input: "blocks.2.0.conv_pwl.weight" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + input: "blocks.2.1.conv_pw.weight" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + input: "blocks.2.1.conv_dw.weight" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + input: "blocks.2.1.conv_pwl.weight" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + input: "blocks.2.2.conv_pw.weight" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + input: "blocks.2.2.conv_dw.weight" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + input: "blocks.2.2.conv_pwl.weight" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + input: "blocks.3.0.conv_pw.weight" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + input: "blocks.3.0.conv_dw.weight" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + input: "blocks.3.0.conv_pwl.weight" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + input: "blocks.3.1.conv_pw.weight" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + input: "blocks.3.1.conv_dw.weight" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + input: "blocks.3.1.conv_pwl.weight" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + input: "blocks.3.2.conv_pw.weight" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + input: "blocks.3.2.conv_dw.weight" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + input: "blocks.3.2.conv_pwl.weight" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + input: "blocks.3.3.conv_pw.weight" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + input: "blocks.3.3.conv_dw.weight" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + input: "blocks.3.3.conv_pwl.weight" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + input: "blocks.4.0.conv_pw.weight" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + input: "blocks.4.0.conv_dw.weight" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + input: "blocks.4.0.conv_pwl.weight" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + input: "blocks.4.1.conv_pw.weight" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + input: "blocks.4.1.conv_dw.weight" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + input: "blocks.4.1.conv_pwl.weight" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + input: "blocks.4.2.conv_pw.weight" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + input: "blocks.4.2.conv_dw.weight" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + input: "blocks.4.2.conv_pwl.weight" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + input: "blocks.5.0.conv_pw.weight" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + input: "blocks.5.0.conv_dw.weight" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + input: "blocks.5.0.conv_pwl.weight" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + input: "blocks.5.1.conv_pw.weight" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + input: "blocks.5.1.conv_dw.weight" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + input: "blocks.5.1.conv_pwl.weight" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + input: "blocks.5.2.conv_pw.weight" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + input: "blocks.5.2.conv_dw.weight" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + input: "blocks.5.2.conv_pwl.weight" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + input: "blocks.6.0.conv_pw.weight" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + input: "blocks.6.0.conv_dw.weight" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + input: "blocks.6.0.conv_pwl.weight" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "blocks_1" + name: "torch_nn_modules_container_Sequential_blocks_1_2" + op_type: "torch_nn_modules_container_Sequential_blocks_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "blocks_1" + input: "conv_head.weight" + output: "conv_head_1" + name: "torch_nn_modules_conv_Conv2d_conv_head_1_3" + op_type: "torch_nn_modules_conv_Conv2d_conv_head_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "conv_head_1" + input: "bn2.running_mean" + input: "bn2.running_var" + input: "bn2.weight" + input: "bn2.bias" + output: "bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_bn2_1_4" + op_type: "timm_layers_norm_act_BatchNormAct2d_bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "bn2_1" + output: "global_pool_1" + name: "timm_layers_adaptive_avgmax_pool_SelectAdaptivePool2d_global_pool_1_5" + op_type: "timm_layers_adaptive_avgmax_pool_SelectAdaptivePool2d_global_pool_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "global_pool_1" + input: "classifier.weight" + input: "classifier.bias" + output: "classifier_1" + name: "torch_nn_modules_linear_Linear_classifier_1_6" + op_type: "torch_nn_modules_linear_Linear_classifier_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + name: "main_graph" + initializer { + dims: 32 + dims: 3 + dims: 3 + dims: 3 + data_type: 1 + name: "conv_stem.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "bn1.running_mean" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "bn1.running_var" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "bn1.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "bn1.bias" + raw_data: "" + } + initializer { + dims: 32 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.0.0.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.0.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.0.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.0.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.0.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 16 + dims: 32 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.0.0.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 16 + data_type: 1 + name: "blocks.0.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 16 + data_type: 1 + name: "blocks.0.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 16 + data_type: 1 + name: "blocks.0.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 16 + data_type: 1 + name: "blocks.0.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 96 + dims: 16 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.1.0.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 96 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.1.0.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.1.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 24 + dims: 96 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.1.0.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.0.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.0.bn3.running_var" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.0.bn3.weight" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.0.bn3.bias" + raw_data: "" + } + initializer { + dims: 144 + dims: 24 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.1.1.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 144 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.1.1.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.1.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 24 + dims: 144 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.1.1.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.1.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.1.bn3.running_var" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.1.bn3.weight" + raw_data: "" + } + initializer { + dims: 24 + data_type: 1 + name: "blocks.1.1.bn3.bias" + raw_data: "" + } + initializer { + dims: 144 + dims: 24 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.2.0.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 144 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.2.0.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 144 + data_type: 1 + name: "blocks.2.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 32 + dims: 144 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.2.0.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.0.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.0.bn3.running_var" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.0.bn3.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.0.bn3.bias" + raw_data: "" + } + initializer { + dims: 192 + dims: 32 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.2.1.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 192 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.2.1.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 32 + dims: 192 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.2.1.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.1.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.1.bn3.running_var" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.1.bn3.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.1.bn3.bias" + raw_data: "" + } + initializer { + dims: 192 + dims: 32 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.2.2.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn1.running_var" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn1.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn1.bias" + raw_data: "" + } + initializer { + dims: 192 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.2.2.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn2.running_var" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn2.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.2.2.bn2.bias" + raw_data: "" + } + initializer { + dims: 32 + dims: 192 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.2.2.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.2.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.2.bn3.running_var" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.2.bn3.weight" + raw_data: "" + } + initializer { + dims: 32 + data_type: 1 + name: "blocks.2.2.bn3.bias" + raw_data: "" + } + initializer { + dims: 192 + dims: 32 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.0.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 192 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.3.0.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 192 + data_type: 1 + name: "blocks.3.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 64 + dims: 192 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.0.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.0.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.0.bn3.running_var" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.0.bn3.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.0.bn3.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 64 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.1.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.3.1.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 64 + dims: 384 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.1.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.1.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.1.bn3.running_var" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.1.bn3.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.1.bn3.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 64 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.2.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn1.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn1.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn1.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.3.2.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn2.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn2.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.2.bn2.bias" + raw_data: "" + } + initializer { + dims: 64 + dims: 384 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.2.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.2.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.2.bn3.running_var" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.2.bn3.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.2.bn3.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 64 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.3.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn1.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn1.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn1.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.3.3.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn2.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn2.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.3.3.bn2.bias" + raw_data: "" + } + initializer { + dims: 64 + dims: 384 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.3.3.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.3.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.3.bn3.running_var" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.3.bn3.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "blocks.3.3.bn3.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 64 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.4.0.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 384 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.4.0.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 384 + data_type: 1 + name: "blocks.4.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 96 + dims: 384 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.4.0.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.0.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.0.bn3.running_var" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.0.bn3.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.0.bn3.bias" + raw_data: "" + } + initializer { + dims: 576 + dims: 96 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.4.1.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 576 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.4.1.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 96 + dims: 576 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.4.1.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.1.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.1.bn3.running_var" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.1.bn3.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.1.bn3.bias" + raw_data: "" + } + initializer { + dims: 576 + dims: 96 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.4.2.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn1.running_var" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn1.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn1.bias" + raw_data: "" + } + initializer { + dims: 576 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.4.2.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn2.running_var" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn2.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.4.2.bn2.bias" + raw_data: "" + } + initializer { + dims: 96 + dims: 576 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.4.2.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.2.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.2.bn3.running_var" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.2.bn3.weight" + raw_data: "" + } + initializer { + dims: 96 + data_type: 1 + name: "blocks.4.2.bn3.bias" + raw_data: "" + } + initializer { + dims: 576 + dims: 96 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.5.0.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 576 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.5.0.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 576 + data_type: 1 + name: "blocks.5.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 160 + dims: 576 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.5.0.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.0.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.0.bn3.running_var" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.0.bn3.weight" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.0.bn3.bias" + raw_data: "" + } + initializer { + dims: 960 + dims: 160 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.5.1.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 960 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.5.1.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 160 + dims: 960 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.5.1.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.1.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.1.bn3.running_var" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.1.bn3.weight" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.1.bn3.bias" + raw_data: "" + } + initializer { + dims: 960 + dims: 160 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.5.2.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn1.running_var" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn1.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn1.bias" + raw_data: "" + } + initializer { + dims: 960 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.5.2.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn2.running_var" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn2.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.5.2.bn2.bias" + raw_data: "" + } + initializer { + dims: 160 + dims: 960 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.5.2.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.2.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.2.bn3.running_var" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.2.bn3.weight" + raw_data: "" + } + initializer { + dims: 160 + data_type: 1 + name: "blocks.5.2.bn3.bias" + raw_data: "" + } + initializer { + dims: 960 + dims: 160 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.6.0.conv_pw.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 960 + dims: 1 + dims: 3 + dims: 3 + data_type: 1 + name: "blocks.6.0.conv_dw.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 960 + data_type: 1 + name: "blocks.6.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 320 + dims: 960 + dims: 1 + dims: 1 + data_type: 1 + name: "blocks.6.0.conv_pwl.weight" + raw_data: "" + } + initializer { + dims: 320 + data_type: 1 + name: "blocks.6.0.bn3.running_mean" + raw_data: "" + } + initializer { + dims: 320 + data_type: 1 + name: "blocks.6.0.bn3.running_var" + raw_data: "" + } + initializer { + dims: 320 + data_type: 1 + name: "blocks.6.0.bn3.weight" + raw_data: "" + } + initializer { + dims: 320 + data_type: 1 + name: "blocks.6.0.bn3.bias" + raw_data: "" + } + initializer { + dims: 1280 + dims: 320 + dims: 1 + dims: 1 + data_type: 1 + name: "conv_head.weight" + raw_data: "" + } + initializer { + dims: 1280 + data_type: 1 + name: "bn2.running_mean" + raw_data: "" + } + initializer { + dims: 1280 + data_type: 1 + name: "bn2.running_var" + raw_data: "" + } + initializer { + dims: 1280 + data_type: 1 + name: "bn2.weight" + raw_data: "" + } + initializer { + dims: 1280 + data_type: 1 + name: "bn2.bias" + raw_data: "" + } + initializer { + dims: 1000 + dims: 1280 + data_type: 1 + name: "classifier.weight" + raw_data: "" + } + initializer { + dims: 1000 + data_type: 1 + name: "classifier.bias" + raw_data: "" + } + input { + name: "l_x_" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 224 + } + dim { + dim_value: 224 + } + } + } + } + } + output { + name: "classifier_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1000 + } + } + } + } + } + value_info { + name: "conv_stem.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.0.0.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.0.0.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "blocks.0.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "blocks.1.0.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.1.0.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.1.0.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.1.1.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.1.1.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.1.1.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "blocks.2.0.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "blocks.2.0.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.0.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.1.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.1.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.1.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.2.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.2.2.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.2.2.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "blocks.3.0.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "blocks.3.0.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.0.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.1.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.1.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.1.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.2.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.2.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.2.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.3.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.3.3.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.3.3.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "blocks.4.0.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "blocks.4.0.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.0.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.1.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.1.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.1.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.2.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.4.2.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.4.2.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "blocks.5.0.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "blocks.5.0.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.0.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.1.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.1.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.1.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.2.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.5.2.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.5.2.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "blocks.6.0.conv_pw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.conv_dw.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "blocks.6.0.conv_pwl.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn3.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn3.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn3.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "blocks.6.0.bn3.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "conv_head.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "classifier.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1000 + } + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "classifier.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1000 + } + } + } + } + } + value_info { + name: "conv_stem_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "blocks_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "conv_head_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "global_pool_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_conv_stem_1/l_x_" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 224 + } + dim { + dim_value: 224 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_conv_stem_1/convolution" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_bn1_act_1/add_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_bn1_act_1/hardtanh" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_bn1_act_1/copy" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/convolution" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/convert_element_type" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/convert_element_type_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/add" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/sqrt" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/reciprocal" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/scalar_tensor_default" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/mul" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/sub" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/mul_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/mul_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/unsqueeze_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/add_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn1_1/bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_dw_1/copy" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_dw_1/convolution_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___0_____0___bn1_act_1/add_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___0_____0___bn1_act_1/hardtanh_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___0_____0___bn1_act_1/copy_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/convolution_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/convert_element_type_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/convert_element_type_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/add_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/sqrt_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/reciprocal_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/scalar_tensor_default_1" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/mul_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/sub_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/mul_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/mul_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/unsqueeze_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/add_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1/getattr_getattr_l__self___blocks___0_____0___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_pw_1/copy_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_pw_1/convolution_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/convolution_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/convert_element_type_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/convert_element_type_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/add_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/sqrt_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/reciprocal_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/scalar_tensor_default_2" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/mul_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/sub_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/mul_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/mul_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/unsqueeze_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1/copy" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1/getattr_getattr_l__self___blocks___0_____0___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1/getattr_getattr_l__self___blocks___0_____0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1/getattr_getattr_l__self___blocks___0_____0___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1/getattr_getattr_l__self___blocks___0_____0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_0_1/copy" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_0_1/blocks_0_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pw_1/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pw_1/convolution_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn1_act_1/add_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn1_act_1/hardtanh_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn1_act_1/copy_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/convolution_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/convert_element_type_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/convert_element_type_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/add_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/sqrt_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/reciprocal_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/scalar_tensor_default_3" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/mul_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/sub_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/mul_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/mul_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/unsqueeze_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/add_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1/getattr_getattr_l__self___blocks___1_____0___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_dw_1/copy_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_dw_1/convolution_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn2_act_1/add_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn2_act_1/hardtanh_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn2_act_1/copy_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/convolution_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/convert_element_type_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/convert_element_type_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/add_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/sqrt_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/reciprocal_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/scalar_tensor_default_4" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/mul_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/sub_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/mul_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/mul_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/unsqueeze_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/add_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1/getattr_getattr_l__self___blocks___1_____0___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pwl_1/copy_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pwl_1/convolution_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/convolution_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/convert_element_type_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/convert_element_type_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/add_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/sqrt_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/reciprocal_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/scalar_tensor_default_5" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/mul_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/sub_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/mul_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/mul_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/unsqueeze_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1/add_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1/getattr_getattr_l__self___blocks___1_____0___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1/getattr_getattr_l__self___blocks___1_____0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1/getattr_getattr_l__self___blocks___1_____0___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1/getattr_getattr_l__self___blocks___1_____0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1/getattr_getattr_l__self___blocks___1_____0___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1/getattr_getattr_l__self___blocks___1_____0___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pw_1/add_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pw_1/convolution_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn1_act_1/add_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn1_act_1/hardtanh_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn1_act_1/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/convolution_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/convert_element_type_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/convert_element_type_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/add_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/sqrt_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/reciprocal_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/scalar_tensor_default_6" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/mul_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/sub_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/mul_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_52" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_53" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/mul_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_54" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/unsqueeze_55" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/add_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1/getattr_getattr_l__self___blocks___1_____1___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_dw_1/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_dw_1/convolution_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn2_act_1/add_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn2_act_1/hardtanh_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn2_act_1/copy_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/convolution_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/convert_element_type_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/convert_element_type_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/add_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/sqrt_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/reciprocal_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/scalar_tensor_default_7" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/mul_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_56" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_57" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_58" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_59" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/sub_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/mul_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_60" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_61" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/mul_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_62" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/unsqueeze_63" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/add_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1/getattr_getattr_l__self___blocks___1_____1___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pwl_1/copy_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pwl_1/convolution_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/convolution_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/convert_element_type_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/convert_element_type_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/add_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/sqrt_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/reciprocal_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/scalar_tensor_default_8" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/mul_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_64" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_66" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/sub_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/mul_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_68" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_69" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/mul_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_70" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/unsqueeze_71" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 24 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1/add_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/add_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/getattr_getattr_l__self___blocks___1_____1___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/getattr_getattr_l__self___blocks___1_____1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/getattr_getattr_l__self___blocks___1_____1___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/getattr_getattr_l__self___blocks___1_____1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/getattr_getattr_l__self___blocks___1_____1___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/getattr_getattr_l__self___blocks___1_____1___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1/add_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1_1/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1_1/blocks_1_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1_1/blocks_1_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pw_1/add_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pw_1/convolution_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn1_act_1/add_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn1_act_1/hardtanh_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn1_act_1/copy_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/convolution_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/convert_element_type_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/convert_element_type_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/add_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/sqrt_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/reciprocal_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/scalar_tensor_default_9" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/mul_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_72" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_73" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_74" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_75" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/sub_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/mul_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_76" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_77" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/mul_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_78" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/unsqueeze_79" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/add_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1/getattr_getattr_l__self___blocks___2_____0___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_dw_1/copy_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_dw_1/convolution_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn2_act_1/add_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn2_act_1/hardtanh_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn2_act_1/copy_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/convolution_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/convert_element_type_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/convert_element_type_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/add_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/sqrt_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/reciprocal_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/scalar_tensor_default_10" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/mul_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_80" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_81" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_82" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_83" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/sub_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/mul_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_84" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/mul_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_86" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/unsqueeze_87" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 144 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/add_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1/getattr_getattr_l__self___blocks___2_____0___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pwl_1/copy_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pwl_1/convolution_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/convolution_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/convert_element_type_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/convert_element_type_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/add_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/sqrt_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/reciprocal_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/scalar_tensor_default_11" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/mul_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_88" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_89" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_90" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_91" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/sub_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/mul_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_92" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_93" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/mul_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_94" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/unsqueeze_95" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1/add_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1/add_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1/getattr_getattr_l__self___blocks___2_____0___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1/getattr_getattr_l__self___blocks___2_____0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1/getattr_getattr_l__self___blocks___2_____0___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1/getattr_getattr_l__self___blocks___2_____0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 144 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1/getattr_getattr_l__self___blocks___2_____0___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1/getattr_getattr_l__self___blocks___2_____0___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pw_1/add_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pw_1/convolution_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn1_act_1/add_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn1_act_1/hardtanh_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn1_act_1/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/convolution_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/convert_element_type_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/convert_element_type_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/add_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/sqrt_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/reciprocal_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/scalar_tensor_default_12" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/mul_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_96" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_97" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_98" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_99" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/sub_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/mul_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_100" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_101" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/mul_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_102" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/unsqueeze_103" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/add_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1/getattr_getattr_l__self___blocks___2_____1___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_dw_1/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_dw_1/convolution_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn2_act_1/add_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn2_act_1/hardtanh_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn2_act_1/copy_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/convolution_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/convert_element_type_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/convert_element_type_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/add_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/sqrt_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/reciprocal_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/scalar_tensor_default_13" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/mul_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_104" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_105" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_106" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_107" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/sub_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/mul_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_108" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_109" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/mul_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_110" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/unsqueeze_111" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/add_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1/getattr_getattr_l__self___blocks___2_____1___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pwl_1/copy_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pwl_1/convolution_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/convolution_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/convert_element_type_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/convert_element_type_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/add_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/sqrt_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/reciprocal_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/scalar_tensor_default_14" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/mul_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_112" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_113" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_114" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_115" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/sub_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/mul_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_116" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_117" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/mul_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_118" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/unsqueeze_119" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1/add_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/add_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/getattr_getattr_l__self___blocks___2_____1___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/getattr_getattr_l__self___blocks___2_____1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/getattr_getattr_l__self___blocks___2_____1___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/getattr_getattr_l__self___blocks___2_____1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/getattr_getattr_l__self___blocks___2_____1___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/getattr_getattr_l__self___blocks___2_____1___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1/add_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pw_1/add_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pw_1/convolution_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn1_act_1/add_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn1_act_1/hardtanh_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn1_act_1/copy_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/convolution_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/convert_element_type_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/convert_element_type_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/add_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/sqrt_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/reciprocal_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/scalar_tensor_default_15" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/mul_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_120" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_121" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_122" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_123" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/sub_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/mul_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_124" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_125" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/mul_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_126" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/unsqueeze_127" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/add_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1/getattr_getattr_l__self___blocks___2_____2___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_dw_1/copy_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_dw_1/convolution_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn2_act_1/add_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn2_act_1/hardtanh_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn2_act_1/copy_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/convolution_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/convert_element_type_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/convert_element_type_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/add_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/sqrt_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/reciprocal_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/scalar_tensor_default_16" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/mul_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_128" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_129" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_130" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_131" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/sub_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/mul_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_132" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_133" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/mul_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_134" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/unsqueeze_135" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/add_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1/getattr_getattr_l__self___blocks___2_____2___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pwl_1/copy_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pwl_1/convolution_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/convolution_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/convert_element_type_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/convert_element_type_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/add_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/sqrt_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/reciprocal_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/scalar_tensor_default_17" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/mul_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_136" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_137" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_138" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_139" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/sub_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/mul_52" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_140" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_141" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/mul_53" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_142" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/unsqueeze_143" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 32 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1/add_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/add_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/getattr_getattr_l__self___blocks___2_____2___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/getattr_getattr_l__self___blocks___2_____2___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/getattr_getattr_l__self___blocks___2_____2___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/getattr_getattr_l__self___blocks___2_____2___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/getattr_getattr_l__self___blocks___2_____2___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/getattr_getattr_l__self___blocks___2_____2___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1/add_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_2_1/add_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_2_1/blocks_2_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_2_1/blocks_2_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_2_1/blocks_2_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pw_1/add_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pw_1/convolution_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn1_act_1/add_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn1_act_1/hardtanh_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn1_act_1/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/convolution_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/convert_element_type_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/convert_element_type_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/add_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/sqrt_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/reciprocal_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/scalar_tensor_default_18" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/mul_54" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_144" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_145" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_146" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_147" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/sub_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/mul_55" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_148" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_149" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/mul_56" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_150" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/unsqueeze_151" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/add_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1/getattr_getattr_l__self___blocks___3_____0___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_dw_1/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_dw_1/convolution_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn2_act_1/add_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn2_act_1/hardtanh_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn2_act_1/copy_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/convolution_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/convert_element_type_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/convert_element_type_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/add_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/sqrt_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/reciprocal_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/scalar_tensor_default_19" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/mul_57" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_152" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_153" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_154" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_155" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/sub_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/mul_58" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_156" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_157" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/mul_59" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_158" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/unsqueeze_159" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 192 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/add_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1/getattr_getattr_l__self___blocks___3_____0___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pwl_1/copy_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pwl_1/convolution_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/convolution_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/convert_element_type_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/convert_element_type_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/add_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/sqrt_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/reciprocal_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/scalar_tensor_default_20" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/mul_60" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_160" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_161" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_162" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_163" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/sub_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/mul_61" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_164" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_165" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/mul_62" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_166" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/unsqueeze_167" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1/add_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1/add_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1/getattr_getattr_l__self___blocks___3_____0___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1/getattr_getattr_l__self___blocks___3_____0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1/getattr_getattr_l__self___blocks___3_____0___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1/getattr_getattr_l__self___blocks___3_____0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 192 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1/getattr_getattr_l__self___blocks___3_____0___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1/getattr_getattr_l__self___blocks___3_____0___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pw_1/add_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pw_1/convolution_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn1_act_1/add_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn1_act_1/hardtanh_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn1_act_1/copy_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/convolution_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/convert_element_type_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/convert_element_type_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/add_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/sqrt_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/reciprocal_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/scalar_tensor_default_21" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/mul_63" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_168" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_169" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_170" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_171" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/sub_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/mul_64" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_172" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_173" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/mul_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_174" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/unsqueeze_175" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/add_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1/getattr_getattr_l__self___blocks___3_____1___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_dw_1/copy_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_dw_1/convolution_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn2_act_1/add_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn2_act_1/hardtanh_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn2_act_1/copy_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/convolution_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/convert_element_type_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/convert_element_type_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/add_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/sqrt_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/reciprocal_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/scalar_tensor_default_22" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/mul_66" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_176" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_177" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_178" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_179" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/sub_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/mul_67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_180" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_181" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/mul_68" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_182" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/unsqueeze_183" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/add_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1/getattr_getattr_l__self___blocks___3_____1___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pwl_1/copy_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pwl_1/convolution_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/convolution_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/convert_element_type_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/convert_element_type_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/add_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/sqrt_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/reciprocal_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/scalar_tensor_default_23" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/mul_69" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_184" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_185" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_186" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_187" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/sub_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/mul_70" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_188" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_189" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/mul_71" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_190" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/unsqueeze_191" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1/add_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/add_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/getattr_getattr_l__self___blocks___3_____1___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/getattr_getattr_l__self___blocks___3_____1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/getattr_getattr_l__self___blocks___3_____1___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/getattr_getattr_l__self___blocks___3_____1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/getattr_getattr_l__self___blocks___3_____1___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/getattr_getattr_l__self___blocks___3_____1___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1/add_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pw_1/add_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pw_1/convolution_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn1_act_1/add_53" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn1_act_1/hardtanh_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn1_act_1/copy_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/convolution_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/convert_element_type_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/convert_element_type_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/add_52" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/sqrt_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/reciprocal_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/scalar_tensor_default_24" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/mul_72" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_192" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_193" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_194" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_195" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/sub_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/mul_73" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_196" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_197" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/mul_74" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_198" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/unsqueeze_199" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/add_53" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1/getattr_getattr_l__self___blocks___3_____2___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_dw_1/copy_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_dw_1/convolution_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn2_act_1/add_55" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn2_act_1/hardtanh_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn2_act_1/copy_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/convolution_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/convert_element_type_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/convert_element_type_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/add_54" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/sqrt_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/reciprocal_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/scalar_tensor_default_25" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/mul_75" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_200" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_201" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_202" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_203" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/sub_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/mul_76" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_204" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_205" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/mul_77" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_206" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/unsqueeze_207" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/add_55" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1/getattr_getattr_l__self___blocks___3_____2___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pwl_1/copy_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pwl_1/convolution_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/convolution_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/convert_element_type_52" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/convert_element_type_53" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/add_56" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/sqrt_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/reciprocal_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/scalar_tensor_default_26" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/mul_78" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_208" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_209" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_210" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_211" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/sub_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/mul_79" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_212" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_213" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/mul_80" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_214" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/unsqueeze_215" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1/add_57" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/add_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/getattr_getattr_l__self___blocks___3_____2___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/getattr_getattr_l__self___blocks___3_____2___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/getattr_getattr_l__self___blocks___3_____2___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/getattr_getattr_l__self___blocks___3_____2___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/getattr_getattr_l__self___blocks___3_____2___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/getattr_getattr_l__self___blocks___3_____2___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1/add_58" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pw_1/add_58" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pw_1/convolution_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn1_act_1/add_60" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn1_act_1/hardtanh_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn1_act_1/copy_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/convolution_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/convert_element_type_54" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/convert_element_type_55" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/add_59" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/sqrt_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/reciprocal_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/scalar_tensor_default_27" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/mul_81" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_216" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_217" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_218" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_219" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/sub_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/mul_82" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_220" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_221" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/mul_83" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_222" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/unsqueeze_223" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/add_60" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1/getattr_getattr_l__self___blocks___3_____3___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_dw_1/copy_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_dw_1/convolution_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn2_act_1/add_62" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn2_act_1/hardtanh_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn2_act_1/copy_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/convolution_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/convert_element_type_56" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/convert_element_type_57" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/add_61" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/sqrt_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/reciprocal_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/scalar_tensor_default_28" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/mul_84" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_224" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_225" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_226" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_227" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/sub_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/mul_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_228" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_229" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/mul_86" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_230" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/unsqueeze_231" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/add_62" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1/getattr_getattr_l__self___blocks___3_____3___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pwl_1/copy_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pwl_1/convolution_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/convolution_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/convert_element_type_58" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/convert_element_type_59" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/add_63" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/sqrt_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/reciprocal_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/scalar_tensor_default_29" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/mul_87" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_232" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_233" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_234" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_235" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/sub_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/mul_88" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_236" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_237" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/mul_89" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_238" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/unsqueeze_239" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1/add_64" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/add_58" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/getattr_getattr_l__self___blocks___3_____3___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/getattr_getattr_l__self___blocks___3_____3___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/getattr_getattr_l__self___blocks___3_____3___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/getattr_getattr_l__self___blocks___3_____3___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/getattr_getattr_l__self___blocks___3_____3___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/getattr_getattr_l__self___blocks___3_____3___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1/add_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_3_1/add_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_3_1/blocks_3_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_3_1/blocks_3_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_3_1/blocks_3_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_3_1/blocks_3_3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pw_1/add_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pw_1/convolution_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn1_act_1/add_67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn1_act_1/hardtanh_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn1_act_1/copy_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/convolution_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/convert_element_type_60" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/convert_element_type_61" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/add_66" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/sqrt_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/reciprocal_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/scalar_tensor_default_30" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/mul_90" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_240" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_241" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_242" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_243" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/sub_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/mul_91" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_244" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_245" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/mul_92" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_246" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/unsqueeze_247" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/add_67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1/getattr_getattr_l__self___blocks___4_____0___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_dw_1/copy_20" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_dw_1/convolution_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn2_act_1/add_69" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn2_act_1/hardtanh_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn2_act_1/copy_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/convolution_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/convert_element_type_62" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/convert_element_type_63" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/add_68" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/sqrt_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/reciprocal_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/scalar_tensor_default_31" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/mul_93" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_248" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_249" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_250" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_251" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/sub_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/mul_94" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_252" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_253" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/mul_95" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_254" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/unsqueeze_255" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 384 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/add_69" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1/getattr_getattr_l__self___blocks___4_____0___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pwl_1/copy_21" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pwl_1/convolution_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/convolution_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/convert_element_type_64" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/convert_element_type_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/add_70" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/sqrt_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/reciprocal_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/scalar_tensor_default_32" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/mul_96" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_256" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_257" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_258" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_259" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/sub_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/mul_97" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_260" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_261" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/mul_98" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_262" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/unsqueeze_263" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1/add_71" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1/add_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1/getattr_getattr_l__self___blocks___4_____0___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1/getattr_getattr_l__self___blocks___4_____0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1/getattr_getattr_l__self___blocks___4_____0___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1/getattr_getattr_l__self___blocks___4_____0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 384 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1/getattr_getattr_l__self___blocks___4_____0___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1/getattr_getattr_l__self___blocks___4_____0___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pw_1/add_71" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pw_1/convolution_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn1_act_1/add_73" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn1_act_1/hardtanh_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn1_act_1/copy_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/convolution_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/convert_element_type_66" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/convert_element_type_67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/add_72" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/sqrt_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/reciprocal_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/scalar_tensor_default_33" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/mul_99" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_264" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_265" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_266" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_267" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/sub_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/mul_100" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_268" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_269" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/mul_101" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_270" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/unsqueeze_271" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/add_73" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1/getattr_getattr_l__self___blocks___4_____1___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_dw_1/copy_22" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_dw_1/convolution_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn2_act_1/add_75" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn2_act_1/hardtanh_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn2_act_1/copy_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/convolution_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/convert_element_type_68" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/convert_element_type_69" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/add_74" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/sqrt_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/reciprocal_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/scalar_tensor_default_34" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/mul_102" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_272" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_273" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_274" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_275" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/sub_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/mul_103" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_276" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_277" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/mul_104" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_278" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/unsqueeze_279" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/add_75" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1/getattr_getattr_l__self___blocks___4_____1___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pwl_1/copy_23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pwl_1/convolution_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/convolution_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/convert_element_type_70" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/convert_element_type_71" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/add_76" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/sqrt_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/reciprocal_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/scalar_tensor_default_35" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/mul_105" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_280" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_281" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_282" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_283" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/sub_35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/mul_106" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_284" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_285" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/mul_107" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_286" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/unsqueeze_287" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1/add_77" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/add_71" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/getattr_getattr_l__self___blocks___4_____1___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/getattr_getattr_l__self___blocks___4_____1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/getattr_getattr_l__self___blocks___4_____1___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/getattr_getattr_l__self___blocks___4_____1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/getattr_getattr_l__self___blocks___4_____1___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/getattr_getattr_l__self___blocks___4_____1___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1/add_78" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pw_1/add_78" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pw_1/convolution_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn1_act_1/add_80" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn1_act_1/hardtanh_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn1_act_1/copy_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/convolution_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/convert_element_type_72" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/convert_element_type_73" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/add_79" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/sqrt_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/reciprocal_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/scalar_tensor_default_36" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/mul_108" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_288" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_289" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_290" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_291" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/sub_36" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/mul_109" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_292" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_293" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/mul_110" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_294" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/unsqueeze_295" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/add_80" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1/getattr_getattr_l__self___blocks___4_____2___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_dw_1/copy_24" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_dw_1/convolution_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn2_act_1/add_82" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn2_act_1/hardtanh_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn2_act_1/copy_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/convolution_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/convert_element_type_74" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/convert_element_type_75" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/add_81" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/sqrt_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/reciprocal_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/scalar_tensor_default_37" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/mul_111" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_296" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_297" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_298" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_299" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/sub_37" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/mul_112" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_300" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_301" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/mul_113" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_302" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/unsqueeze_303" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/add_82" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1/getattr_getattr_l__self___blocks___4_____2___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pwl_1/copy_25" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pwl_1/convolution_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/convolution_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/convert_element_type_76" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/convert_element_type_77" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/add_83" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/sqrt_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/reciprocal_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/scalar_tensor_default_38" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/mul_114" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_304" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_305" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_306" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_307" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/sub_38" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/mul_115" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_308" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_309" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/mul_116" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_310" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/unsqueeze_311" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 96 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1/add_84" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/add_78" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/getattr_getattr_l__self___blocks___4_____2___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/getattr_getattr_l__self___blocks___4_____2___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/getattr_getattr_l__self___blocks___4_____2___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/getattr_getattr_l__self___blocks___4_____2___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/getattr_getattr_l__self___blocks___4_____2___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/getattr_getattr_l__self___blocks___4_____2___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1/add_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_4_1/add_65" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_4_1/blocks_4_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_4_1/blocks_4_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_4_1/blocks_4_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pw_1/add_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pw_1/convolution_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn1_act_1/add_87" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn1_act_1/hardtanh_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn1_act_1/copy_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/convolution_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/convert_element_type_78" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/convert_element_type_79" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/add_86" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/sqrt_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/reciprocal_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/scalar_tensor_default_39" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/mul_117" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_312" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_313" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_314" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_315" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/sub_39" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/mul_118" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_316" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_317" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/mul_119" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_318" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/unsqueeze_319" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/add_87" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1/getattr_getattr_l__self___blocks___5_____0___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_dw_1/copy_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_dw_1/convolution_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn2_act_1/add_89" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn2_act_1/hardtanh_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn2_act_1/copy_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/convolution_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/convert_element_type_80" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/convert_element_type_81" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/add_88" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/sqrt_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/reciprocal_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/scalar_tensor_default_40" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/mul_120" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_320" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_321" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_322" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_323" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/sub_40" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/mul_121" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_324" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_325" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/mul_122" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_326" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/unsqueeze_327" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 576 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/add_89" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1/getattr_getattr_l__self___blocks___5_____0___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pwl_1/copy_27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pwl_1/convolution_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/convolution_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/convert_element_type_82" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/convert_element_type_83" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/add_90" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/sqrt_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/reciprocal_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/scalar_tensor_default_41" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/mul_123" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_328" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_329" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_330" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_331" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/sub_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/mul_124" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_332" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_333" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/mul_125" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_334" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/unsqueeze_335" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1/add_91" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1/add_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1/getattr_getattr_l__self___blocks___5_____0___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1/getattr_getattr_l__self___blocks___5_____0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1/getattr_getattr_l__self___blocks___5_____0___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1/getattr_getattr_l__self___blocks___5_____0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 576 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1/getattr_getattr_l__self___blocks___5_____0___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1/getattr_getattr_l__self___blocks___5_____0___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pw_1/add_91" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pw_1/convolution_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn1_act_1/add_93" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn1_act_1/hardtanh_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn1_act_1/copy_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/convolution_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/convert_element_type_84" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/convert_element_type_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/add_92" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/sqrt_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/reciprocal_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/scalar_tensor_default_42" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/mul_126" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_336" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_337" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_338" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_339" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/sub_42" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/mul_127" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_340" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_341" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/mul_128" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_342" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/unsqueeze_343" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/add_93" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1/getattr_getattr_l__self___blocks___5_____1___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_dw_1/copy_28" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_dw_1/convolution_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn2_act_1/add_95" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn2_act_1/hardtanh_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn2_act_1/copy_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/convolution_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/convert_element_type_86" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/convert_element_type_87" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/add_94" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/sqrt_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/reciprocal_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/scalar_tensor_default_43" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/mul_129" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_344" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_345" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_346" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_347" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/sub_43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/mul_130" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_348" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_349" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/mul_131" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_350" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/unsqueeze_351" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/add_95" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1/getattr_getattr_l__self___blocks___5_____1___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pwl_1/copy_29" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pwl_1/convolution_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/convolution_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/convert_element_type_88" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/convert_element_type_89" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/add_96" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/sqrt_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/reciprocal_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/scalar_tensor_default_44" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/mul_132" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_352" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_353" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_354" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_355" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/sub_44" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/mul_133" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_356" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_357" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/mul_134" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_358" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/unsqueeze_359" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1/add_97" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/add_91" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/getattr_getattr_l__self___blocks___5_____1___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/getattr_getattr_l__self___blocks___5_____1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/getattr_getattr_l__self___blocks___5_____1___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/getattr_getattr_l__self___blocks___5_____1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/getattr_getattr_l__self___blocks___5_____1___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/getattr_getattr_l__self___blocks___5_____1___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1/add_98" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pw_1/add_98" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pw_1/convolution_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn1_act_1/add_100" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn1_act_1/hardtanh_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn1_act_1/copy_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/convolution_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/convert_element_type_90" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/convert_element_type_91" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/add_99" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/sqrt_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/reciprocal_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/scalar_tensor_default_45" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/mul_135" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_360" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_361" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_362" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_363" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/sub_45" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/mul_136" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_364" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_365" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/mul_137" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_366" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/unsqueeze_367" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/add_100" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1/getattr_getattr_l__self___blocks___5_____2___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_dw_1/copy_30" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_dw_1/convolution_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn2_act_1/add_102" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn2_act_1/hardtanh_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn2_act_1/copy_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/convolution_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/convert_element_type_92" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/convert_element_type_93" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/add_101" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/sqrt_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/reciprocal_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/scalar_tensor_default_46" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/mul_138" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_368" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_369" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_370" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_371" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/sub_46" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/mul_139" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_372" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_373" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/mul_140" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_374" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/unsqueeze_375" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/add_102" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1/getattr_getattr_l__self___blocks___5_____2___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pwl_1/copy_31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pwl_1/convolution_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/convolution_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/convert_element_type_94" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/convert_element_type_95" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/add_103" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/sqrt_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/reciprocal_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/scalar_tensor_default_47" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/mul_141" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_376" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_377" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_378" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_379" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/sub_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/mul_142" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_380" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_381" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/mul_143" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_382" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/unsqueeze_383" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 160 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1/add_104" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/add_98" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/getattr_getattr_l__self___blocks___5_____2___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/getattr_getattr_l__self___blocks___5_____2___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/getattr_getattr_l__self___blocks___5_____2___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/getattr_getattr_l__self___blocks___5_____2___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/getattr_getattr_l__self___blocks___5_____2___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/getattr_getattr_l__self___blocks___5_____2___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1/add_105" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_5_1/add_85" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_5_1/blocks_5_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_5_1/blocks_5_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_5_1/blocks_5_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pw_1/add_105" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pw_1/convolution_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn1_act_1/add_107" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn1_act_1/hardtanh_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn1_act_1/copy_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/convolution_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/convert_element_type_96" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/convert_element_type_97" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/add_106" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/sqrt_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/reciprocal_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/scalar_tensor_default_48" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/mul_144" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_384" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_385" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_386" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_387" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/sub_48" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/mul_145" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_388" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_389" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/mul_146" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_390" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/unsqueeze_391" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/add_107" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1/getattr_getattr_l__self___blocks___6_____0___bn1_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_dw_1/copy_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_dw_1/convolution_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn2_act_1/add_109" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn2_act_1/hardtanh_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn2_act_1/copy_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/convolution_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/convert_element_type_98" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/convert_element_type_99" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/add_108" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/sqrt_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/reciprocal_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/scalar_tensor_default_49" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/mul_147" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_392" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_393" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_394" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_395" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/sub_49" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/mul_148" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_396" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_397" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/mul_149" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_398" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/unsqueeze_399" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 960 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/add_109" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1/getattr_getattr_l__self___blocks___6_____0___bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pwl_1/copy_33" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pwl_1/convolution_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/convolution_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/convert_element_type_100" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/convert_element_type_101" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/add_110" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/sqrt_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/reciprocal_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/scalar_tensor_default_50" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/mul_150" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_400" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_401" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_402" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_403" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/sub_50" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/mul_151" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_404" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_405" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/mul_152" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_406" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/unsqueeze_407" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 320 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1/add_111" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1/add_105" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1/getattr_getattr_l__self___blocks___6_____0___conv_pw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1/getattr_getattr_l__self___blocks___6_____0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1/getattr_getattr_l__self___blocks___6_____0___conv_dw_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1/getattr_getattr_l__self___blocks___6_____0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 960 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1/getattr_getattr_l__self___blocks___6_____0___conv_pwl_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1/getattr_getattr_l__self___blocks___6_____0___bn3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_6_1/add_105" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_6_1/blocks_6_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/copy" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/blocks_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 16 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/blocks_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 24 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/blocks_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 32 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/blocks_3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/blocks_4_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 96 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/blocks_5_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 160 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_container_Sequential_blocks_1/blocks_6_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_conv_head_1/add_111" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 320 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_conv_Conv2d_conv_head_1/convolution_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_bn2_act_1/add_113" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_bn2_act_1/hardtanh_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_activation_ReLU6_bn2_act_1/copy_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/convolution_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/convert_element_type_102" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/convert_element_type_103" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/add_112" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/sqrt_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/reciprocal_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/scalar_tensor_default_51" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/mul_153" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_408" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_409" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_410" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_411" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/sub_51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/mul_154" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_412" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_413" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/mul_155" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_414" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/unsqueeze_415" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/add_113" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_norm_act_BatchNormAct2d_bn2_1/bn2_act_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_pooling_AdaptiveAvgPool2d_global_pool_pool_1/copy_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_pooling_AdaptiveAvgPool2d_global_pool_pool_1/mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_flatten_Flatten_global_pool_flatten_1/mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_flatten_Flatten_global_pool_flatten_1/view" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_adaptive_avgmax_pool_SelectAdaptivePool2d_global_pool_1/copy_34" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_adaptive_avgmax_pool_SelectAdaptivePool2d_global_pool_1/global_pool_pool_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.timm.0.9.7::timm_layers_adaptive_avgmax_pool_SelectAdaptivePool2d_global_pool_1/global_pool_flatten_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_classifier_1/view" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1280 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_classifier_1/t" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1280 + } + dim { + dim_value: 1000 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git63d65dd::torch_nn_modules_linear_Linear_classifier_1/addmm" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1000 + } + } + } + } + } +} +opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 +} +opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 +} +opset_import { + domain: "pkg.timm.0.9.7" + version: 1 +} +opset_import { + domain: "" + version: 18 +} +opset_import { + domain: "pkg.onnxscript.torch_lib.common" + version: 1 +} +functions { + name: "_aten_convolution_onnx" + input: "input" + input: "weight" + input: "bias" + input: "transposed" + output: "result_12" + attribute: "strides" + attribute: "pads" + attribute: "dilations" + node { + input: "weight" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "weight_size" + name: "n1" + op_type: "Size" + } + node { + input: "input" + output: "tmp_0" + name: "n2" + op_type: "Shape" + } + node { + input: "tmp_0" + output: "tmp_1" + name: "n3" + op_type: "Size" + } + node { + input: "tmp_1" + input: "weight_size" + output: "tmp_2" + name: "n4" + op_type: "Equal" + } + node { + input: "tmp_2" + output: "no_batch" + name: "n5" + op_type: "Not" + } + node { + input: "no_batch" + output: "input_6" + name: "n6" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_3" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "input" + input: "tmp_3" + output: "input_4" + name: "n1" + op_type: "Unsqueeze" + } + name: "thenGraph_23" + output { + name: "input_4" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "input" + output: "input_5" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_23" + output { + name: "input_5" + } + } + type: GRAPH + } + } + node { + input: "transposed" + output: "result_8" + name: "n7" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "input_6" + input: "weight" + input: "bias" + output: "result" + name: "n0" + op_type: "ConvTranspose" + attribute { + name: "dilations" + type: INTS + ref_attr_name: "dilations" + } + attribute { + name: "group" + type: INT + ref_attr_name: "groups" + } + attribute { + name: "output_padding" + type: INTS + ref_attr_name: "output_padding" + } + attribute { + name: "pads" + type: INTS + ref_attr_name: "pads" + } + attribute { + name: "strides" + type: INTS + ref_attr_name: "strides" + } + } + name: "thenGraph_26" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "input_6" + input: "weight" + input: "bias" + output: "result_7" + name: "n0" + op_type: "Conv" + attribute { + name: "dilations" + type: INTS + ref_attr_name: "dilations" + } + attribute { + name: "group" + type: INT + ref_attr_name: "groups" + } + attribute { + name: "pads" + type: INTS + ref_attr_name: "pads" + } + attribute { + name: "strides" + type: INTS + ref_attr_name: "strides" + } + } + name: "elseGraph_26" + output { + name: "result_7" + } + } + type: GRAPH + } + } + node { + input: "no_batch" + output: "result_12" + name: "n8" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_9" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "result_8" + input: "tmp_9" + output: "result_10" + name: "n1" + op_type: "Squeeze" + } + name: "thenGraph_48" + output { + name: "result_10" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "result_8" + output: "result_11" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_48" + output { + name: "result_11" + } + } + type: GRAPH + } + } + doc_string: "ConvXd with attributes pre-computed to fit the ONNX spec." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "output_padding" + ints: 0 + type: INTS + } + attribute_proto { + name: "groups" + i: 1 + type: INT + } +} +functions { + name: "torch_nn_modules_conv_Conv2d_conv_stem_1" + input: "l_x_" + input: "conv_stem.weight" + output: "convolution" + node { + input: "conv_stem.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "l_x_" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "l_x_" + input: "conv_stem.weight" + input: "_val_7" + input: "_val_8" + output: "convolution" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "aten_hardtanh" + input: "self" + output: "return_val" + node { + output: "min_val" + name: "n0" + op_type: "Constant" + attribute { + name: "value_float" + type: FLOAT + ref_attr_name: "min_val" + } + } + node { + output: "max_val" + name: "n1" + op_type: "Constant" + attribute { + name: "value_float" + type: FLOAT + ref_attr_name: "max_val" + } + } + node { + input: "min_val" + input: "self" + output: "min_val_cast" + name: "n2" + op_type: "CastLike" + } + node { + input: "max_val" + input: "self" + output: "max_val_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "self" + input: "min_val_cast" + input: "max_val_cast" + output: "return_val" + name: "n4" + op_type: "Clip" + } + doc_string: "hardtanh(Tensor self, Scalar min_val=-1, Scalar max_val=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "min_val" + f: -1.0 + type: FLOAT + } + attribute_proto { + name: "max_val" + f: 1.0 + type: FLOAT + } +} +functions { + name: "aten_copy" + input: "self" + input: "src" + output: "self_0" + node { + input: "src" + output: "self_0" + name: "n0" + op_type: "Identity" + } + doc_string: "copy(Tensor self, Tensor src, bool non_blocking=False) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "non_blocking" + i: 0 + type: INT + } +} +functions { + name: "torch_nn_modules_activation_ReLU6_bn1_act_1" + input: "add_1" + output: "copy" + node { + input: "add_1" + output: "hardtanh" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_1" + input: "hardtanh" + output: "copy" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "prims_convert_element_type" + input: "a" + output: "return_val" + attribute: "dtype" + node { + input: "a" + output: "return_val" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + type: INT + ref_attr_name: "dtype" + } + } + doc_string: "convert_element_type(Tensor a, ScalarType dtype) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_add" + input: "self" + input: "other" + output: "return_val" + node { + output: "alpha" + name: "n0" + op_type: "Constant" + attribute { + name: "value_float" + type: FLOAT + ref_attr_name: "alpha" + } + } + node { + input: "alpha" + input: "other" + output: "alpha_0" + name: "n1" + op_type: "CastLike" + } + node { + input: "other" + input: "alpha_0" + output: "other_1" + name: "n2" + op_type: "Mul" + } + node { + input: "self" + input: "other_1" + output: "return_val" + name: "n3" + op_type: "Add" + } + doc_string: "add.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "alpha" + f: 1.0 + type: FLOAT + } +} +functions { + name: "aten_sqrt" + input: "self" + output: "return_val" + node { + input: "self" + output: "return_val" + name: "n0" + op_type: "Sqrt" + } + doc_string: "sqrt(Tensor self) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_reciprocal" + input: "self" + output: "return_val" + node { + input: "self" + output: "return_val" + name: "n0" + op_type: "Reciprocal" + } + doc_string: "reciprocal(Tensor self) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_scalar_tensor_sym_number" + input: "s" + output: "return_val" + node { + input: "s" + output: "return_val" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + type: INT + ref_attr_name: "dtype" + } + } + doc_string: "scalar_tensor(Scalar s, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "dtype" + i: 1 + type: INT + } +} +functions { + name: "aten_mul" + input: "self" + input: "other" + output: "return_val" + node { + input: "other" + input: "self" + output: "other_0" + name: "n0" + op_type: "CastLike" + } + node { + input: "self" + input: "other_0" + output: "return_val" + name: "n1" + op_type: "Mul" + } + doc_string: "mul.Tensor(Tensor self, Tensor other) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_unsqueeze" + input: "self" + output: "return_val" + attribute: "dim" + node { + output: "dim" + name: "n0" + op_type: "Constant" + attribute { + name: "value_int" + type: INT + ref_attr_name: "dim" + } + } + node { + input: "dim" + output: "dim_0" + name: "n1" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "self" + input: "dim_0" + output: "return_val" + name: "n2" + op_type: "Unsqueeze" + } + doc_string: "unsqueeze(Tensor(a) self, int dim) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_sub" + input: "self" + input: "other" + output: "return_val" + node { + output: "alpha" + name: "n0" + op_type: "Constant" + attribute { + name: "value_float" + type: FLOAT + ref_attr_name: "alpha" + } + } + node { + input: "alpha" + input: "other" + output: "alpha_0" + name: "n1" + op_type: "CastLike" + } + node { + input: "other" + input: "alpha_0" + output: "other_1" + name: "n2" + op_type: "Mul" + } + node { + input: "self" + input: "other_1" + output: "return_val" + name: "n3" + op_type: "Sub" + } + doc_string: "sub.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "alpha" + f: 1.0 + type: FLOAT + } +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_bn1_1" + input: "convolution" + input: "bn1.running_mean" + input: "bn1.running_var" + input: "bn1.weight" + input: "bn1.bias" + output: "bn1_act_1" + node { + input: "bn1.running_mean" + output: "convert_element_type" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "bn1.running_var" + output: "convert_element_type_1" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_1" + input: "_val_5" + output: "add" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add" + output: "sqrt" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt" + output: "reciprocal" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal" + input: "scalar_tensor_default" + output: "mul" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type" + output: "unsqueeze" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze" + output: "unsqueeze_1" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul" + output: "unsqueeze_2" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_2" + output: "unsqueeze_3" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution" + input: "unsqueeze_1" + output: "sub" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub" + input: "unsqueeze_3" + output: "mul_1" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "bn1.weight" + output: "unsqueeze_4" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_4" + output: "unsqueeze_5" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_1" + input: "unsqueeze_5" + output: "mul_2" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "bn1.bias" + output: "unsqueeze_6" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_6" + output: "unsqueeze_7" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_2" + input: "unsqueeze_7" + output: "add_1" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_1" + output: "bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_dw_1" + input: "copy" + input: "blocks.0.0.conv_dw.weight" + output: "convolution_1" + node { + input: "blocks.0.0.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy" + input: "blocks.0.0.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_1" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 32 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___0_____0___bn1_act_1" + input: "add_3" + output: "copy_1" + node { + input: "add_3" + output: "hardtanh_1" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_3" + input: "hardtanh_1" + output: "copy_1" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1" + input: "convolution_1" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___0_____0___bn1_act_1" + node { + input: "blocks.0.0.bn1.running_mean" + output: "convert_element_type_2" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.0.0.bn1.running_var" + output: "convert_element_type_3" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_3" + input: "_val_5" + output: "add_2" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_2" + output: "sqrt_1" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_1" + output: "reciprocal_1" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_1" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_1" + input: "scalar_tensor_default_1" + output: "mul_3" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_2" + output: "unsqueeze_8" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_8" + output: "unsqueeze_9" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_3" + output: "unsqueeze_10" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_10" + output: "unsqueeze_11" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_1" + input: "unsqueeze_9" + output: "sub_1" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_1" + input: "unsqueeze_11" + output: "mul_4" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.0.0.bn1.weight" + output: "unsqueeze_12" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_12" + output: "unsqueeze_13" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_4" + input: "unsqueeze_13" + output: "mul_5" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.0.0.bn1.bias" + output: "unsqueeze_14" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_14" + output: "unsqueeze_15" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_5" + input: "unsqueeze_15" + output: "add_3" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_3" + output: "getattr_getattr_l__self___blocks___0_____0___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___0_____0___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___0_____0___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_pw_1" + input: "copy_1" + input: "blocks.0.0.conv_pw.weight" + output: "convolution_2" + node { + input: "blocks.0.0.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_1" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_1" + input: "blocks.0.0.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_2" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1" + input: "convolution_2" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + output: "add_5" + node { + input: "blocks.0.0.bn2.running_mean" + output: "convert_element_type_4" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.0.0.bn2.running_var" + output: "convert_element_type_5" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_5" + input: "_val_5" + output: "add_4" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_4" + output: "sqrt_2" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_2" + output: "reciprocal_2" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_2" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_2" + input: "scalar_tensor_default_2" + output: "mul_6" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_4" + output: "unsqueeze_16" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_16" + output: "unsqueeze_17" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_6" + output: "unsqueeze_18" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_18" + output: "unsqueeze_19" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_2" + input: "unsqueeze_17" + output: "sub_2" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_2" + input: "unsqueeze_19" + output: "mul_7" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.0.0.bn2.weight" + output: "unsqueeze_20" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_20" + output: "unsqueeze_21" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_7" + input: "unsqueeze_21" + output: "mul_8" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.0.0.bn2.bias" + output: "unsqueeze_22" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_22" + output: "unsqueeze_23" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_8" + input: "unsqueeze_23" + output: "add_5" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1" + input: "copy" + input: "blocks.0.0.conv_dw.weight" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + input: "blocks.0.0.conv_pw.weight" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___0_____0___bn2_1" + node { + input: "copy" + input: "blocks.0.0.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___0_____0___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_dw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___0_____0___conv_dw_1" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___0_____0___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___0_____0___bn1_1" + input: "blocks.0.0.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___0_____0___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_pw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___0_____0___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___0_____0___conv_pw_1" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___0_____0___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___0_____0___bn2_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_0_1" + input: "copy" + input: "blocks.0.0.conv_dw.weight" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + input: "blocks.0.0.conv_pw.weight" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + output: "blocks_0_0_1" + node { + input: "copy" + input: "blocks.0.0.conv_dw.weight" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + input: "blocks.0.0.conv_pw.weight" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + output: "blocks_0_0_1" + name: "timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1_0" + op_type: "timm_models__efficientnet_blocks_DepthwiseSeparableConv_blocks_0_0_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pw_1" + input: "add_5" + input: "blocks.1.0.conv_pw.weight" + output: "convolution_3" + node { + input: "blocks.1.0.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_5" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_5" + input: "blocks.1.0.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_3" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn1_act_1" + input: "add_7" + output: "copy_2" + node { + input: "add_7" + output: "hardtanh_2" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_7" + input: "hardtanh_2" + output: "copy_2" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1" + input: "convolution_3" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___1_____0___bn1_act_1" + node { + input: "blocks.1.0.bn1.running_mean" + output: "convert_element_type_6" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn1.running_var" + output: "convert_element_type_7" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_7" + input: "_val_5" + output: "add_6" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_6" + output: "sqrt_3" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_3" + output: "reciprocal_3" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_3" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_3" + input: "scalar_tensor_default_3" + output: "mul_9" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_6" + output: "unsqueeze_24" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_24" + output: "unsqueeze_25" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_9" + output: "unsqueeze_26" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_26" + output: "unsqueeze_27" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_3" + input: "unsqueeze_25" + output: "sub_3" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_3" + input: "unsqueeze_27" + output: "mul_10" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn1.weight" + output: "unsqueeze_28" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_28" + output: "unsqueeze_29" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_10" + input: "unsqueeze_29" + output: "mul_11" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn1.bias" + output: "unsqueeze_30" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_30" + output: "unsqueeze_31" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_11" + input: "unsqueeze_31" + output: "add_7" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_7" + output: "getattr_getattr_l__self___blocks___1_____0___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_dw_1" + input: "copy_2" + input: "blocks.1.0.conv_dw.weight" + output: "convolution_4" + node { + input: "blocks.1.0.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_2" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_2" + input: "blocks.1.0.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_4" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 96 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn2_act_1" + input: "add_9" + output: "copy_3" + node { + input: "add_9" + output: "hardtanh_3" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_9" + input: "hardtanh_3" + output: "copy_3" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1" + input: "convolution_4" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___1_____0___bn2_act_1" + node { + input: "blocks.1.0.bn2.running_mean" + output: "convert_element_type_8" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn2.running_var" + output: "convert_element_type_9" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_9" + input: "_val_5" + output: "add_8" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_8" + output: "sqrt_4" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_4" + output: "reciprocal_4" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_4" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_4" + input: "scalar_tensor_default_4" + output: "mul_12" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_8" + output: "unsqueeze_32" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_32" + output: "unsqueeze_33" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_12" + output: "unsqueeze_34" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_34" + output: "unsqueeze_35" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_4" + input: "unsqueeze_33" + output: "sub_4" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_4" + input: "unsqueeze_35" + output: "mul_13" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn2.weight" + output: "unsqueeze_36" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_36" + output: "unsqueeze_37" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_13" + input: "unsqueeze_37" + output: "mul_14" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn2.bias" + output: "unsqueeze_38" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_38" + output: "unsqueeze_39" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_14" + input: "unsqueeze_39" + output: "add_9" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_9" + output: "getattr_getattr_l__self___blocks___1_____0___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____0___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pwl_1" + input: "copy_3" + input: "blocks.1.0.conv_pwl.weight" + output: "convolution_5" + node { + input: "blocks.1.0.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_3" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_3" + input: "blocks.1.0.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_5" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1" + input: "convolution_5" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + output: "add_11" + node { + input: "blocks.1.0.bn3.running_mean" + output: "convert_element_type_10" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn3.running_var" + output: "convert_element_type_11" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_11" + input: "_val_5" + output: "add_10" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_10" + output: "sqrt_5" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_5" + output: "reciprocal_5" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_5" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_5" + input: "scalar_tensor_default_5" + output: "mul_15" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_10" + output: "unsqueeze_40" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_40" + output: "unsqueeze_41" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_15" + output: "unsqueeze_42" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_42" + output: "unsqueeze_43" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_5" + input: "unsqueeze_41" + output: "sub_5" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_5" + input: "unsqueeze_43" + output: "mul_16" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn3.weight" + output: "unsqueeze_44" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_44" + output: "unsqueeze_45" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_16" + input: "unsqueeze_45" + output: "mul_17" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.0.bn3.bias" + output: "unsqueeze_46" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_46" + output: "unsqueeze_47" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_17" + input: "unsqueeze_47" + output: "add_11" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1" + input: "add_5" + input: "blocks.1.0.conv_pw.weight" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + input: "blocks.1.0.conv_dw.weight" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + input: "blocks.1.0.conv_pwl.weight" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___1_____0___bn3_1" + node { + input: "add_5" + input: "blocks.1.0.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___1_____0___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___1_____0___conv_pw_1" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___1_____0___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___1_____0___bn1_1" + input: "blocks.1.0.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___1_____0___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___1_____0___conv_dw_1" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___1_____0___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___1_____0___bn2_1" + input: "blocks.1.0.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___1_____0___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____0___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___1_____0___conv_pwl_1" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___1_____0___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____0___bn3_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pw_1" + input: "add_11" + input: "blocks.1.1.conv_pw.weight" + output: "convolution_6" + node { + input: "blocks.1.1.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_11" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_11" + input: "blocks.1.1.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_6" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn1_act_1" + input: "add_13" + output: "copy_4" + node { + input: "add_13" + output: "hardtanh_4" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_13" + input: "hardtanh_4" + output: "copy_4" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1" + input: "convolution_6" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___1_____1___bn1_act_1" + node { + input: "blocks.1.1.bn1.running_mean" + output: "convert_element_type_12" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn1.running_var" + output: "convert_element_type_13" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_13" + input: "_val_5" + output: "add_12" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_12" + output: "sqrt_6" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_6" + output: "reciprocal_6" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_6" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_6" + input: "scalar_tensor_default_6" + output: "mul_18" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_12" + output: "unsqueeze_48" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_48" + output: "unsqueeze_49" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_18" + output: "unsqueeze_50" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_50" + output: "unsqueeze_51" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_6" + input: "unsqueeze_49" + output: "sub_6" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_6" + input: "unsqueeze_51" + output: "mul_19" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn1.weight" + output: "unsqueeze_52" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_52" + output: "unsqueeze_53" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_19" + input: "unsqueeze_53" + output: "mul_20" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn1.bias" + output: "unsqueeze_54" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_54" + output: "unsqueeze_55" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_20" + input: "unsqueeze_55" + output: "add_13" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_13" + output: "getattr_getattr_l__self___blocks___1_____1___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_dw_1" + input: "copy_4" + input: "blocks.1.1.conv_dw.weight" + output: "convolution_7" + node { + input: "blocks.1.1.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_4" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_4" + input: "blocks.1.1.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_7" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 144 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn2_act_1" + input: "add_15" + output: "copy_5" + node { + input: "add_15" + output: "hardtanh_5" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_15" + input: "hardtanh_5" + output: "copy_5" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1" + input: "convolution_7" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___1_____1___bn2_act_1" + node { + input: "blocks.1.1.bn2.running_mean" + output: "convert_element_type_14" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn2.running_var" + output: "convert_element_type_15" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_15" + input: "_val_5" + output: "add_14" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_14" + output: "sqrt_7" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_7" + output: "reciprocal_7" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_7" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_7" + input: "scalar_tensor_default_7" + output: "mul_21" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_14" + output: "unsqueeze_56" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_56" + output: "unsqueeze_57" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_21" + output: "unsqueeze_58" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_58" + output: "unsqueeze_59" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_7" + input: "unsqueeze_57" + output: "sub_7" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_7" + input: "unsqueeze_59" + output: "mul_22" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn2.weight" + output: "unsqueeze_60" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_60" + output: "unsqueeze_61" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_22" + input: "unsqueeze_61" + output: "mul_23" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn2.bias" + output: "unsqueeze_62" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_62" + output: "unsqueeze_63" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_23" + input: "unsqueeze_63" + output: "add_15" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_15" + output: "getattr_getattr_l__self___blocks___1_____1___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___1_____1___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pwl_1" + input: "copy_5" + input: "blocks.1.1.conv_pwl.weight" + output: "convolution_8" + node { + input: "blocks.1.1.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_5" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_5" + input: "blocks.1.1.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_8" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1" + input: "convolution_8" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + output: "add_17" + node { + input: "blocks.1.1.bn3.running_mean" + output: "convert_element_type_16" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn3.running_var" + output: "convert_element_type_17" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_17" + input: "_val_5" + output: "add_16" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_16" + output: "sqrt_8" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_8" + output: "reciprocal_8" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_8" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_8" + input: "scalar_tensor_default_8" + output: "mul_24" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_16" + output: "unsqueeze_64" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_64" + output: "unsqueeze_65" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_24" + output: "unsqueeze_66" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_66" + output: "unsqueeze_67" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_8" + input: "unsqueeze_65" + output: "sub_8" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_8" + input: "unsqueeze_67" + output: "mul_25" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn3.weight" + output: "unsqueeze_68" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_68" + output: "unsqueeze_69" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_25" + input: "unsqueeze_69" + output: "mul_26" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.1.1.bn3.bias" + output: "unsqueeze_70" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_70" + output: "unsqueeze_71" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_26" + input: "unsqueeze_71" + output: "add_17" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1" + input: "add_11" + input: "blocks.1.1.conv_pw.weight" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + input: "blocks.1.1.conv_dw.weight" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + input: "blocks.1.1.conv_pwl.weight" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + output: "add_18" + node { + input: "add_11" + input: "blocks.1.1.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___1_____1___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___1_____1___conv_pw_1" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___1_____1___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___1_____1___bn1_1" + input: "blocks.1.1.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___1_____1___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___1_____1___conv_dw_1" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___1_____1___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___1_____1___bn2_1" + input: "blocks.1.1.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___1_____1___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___1_____1___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___1_____1___conv_pwl_1" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + output: "getattr_getattr_l__self___blocks___1_____1___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___1_____1___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___1_____1___bn3_1" + input: "add_11" + output: "add_18" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_1_1" + input: "add_5" + input: "blocks.1.0.conv_pw.weight" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + input: "blocks.1.0.conv_dw.weight" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + input: "blocks.1.0.conv_pwl.weight" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + input: "blocks.1.1.conv_pw.weight" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + input: "blocks.1.1.conv_dw.weight" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + input: "blocks.1.1.conv_pwl.weight" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + output: "blocks_1_1_1" + node { + input: "add_5" + input: "blocks.1.0.conv_pw.weight" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + input: "blocks.1.0.conv_dw.weight" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + input: "blocks.1.0.conv_pwl.weight" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + output: "blocks_1_0_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1_0" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_1_0_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_1_0_1" + input: "blocks.1.1.conv_pw.weight" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + input: "blocks.1.1.conv_dw.weight" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + input: "blocks.1.1.conv_pwl.weight" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + output: "blocks_1_1_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1_1" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_1_1_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pw_1" + input: "add_18" + input: "blocks.2.0.conv_pw.weight" + output: "convolution_9" + node { + input: "blocks.2.0.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_18" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_18" + input: "blocks.2.0.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_9" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn1_act_1" + input: "add_20" + output: "copy_6" + node { + input: "add_20" + output: "hardtanh_6" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_20" + input: "hardtanh_6" + output: "copy_6" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1" + input: "convolution_9" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___2_____0___bn1_act_1" + node { + input: "blocks.2.0.bn1.running_mean" + output: "convert_element_type_18" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn1.running_var" + output: "convert_element_type_19" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_19" + input: "_val_5" + output: "add_19" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_19" + output: "sqrt_9" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_9" + output: "reciprocal_9" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_9" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_9" + input: "scalar_tensor_default_9" + output: "mul_27" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_18" + output: "unsqueeze_72" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_72" + output: "unsqueeze_73" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_27" + output: "unsqueeze_74" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_74" + output: "unsqueeze_75" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_9" + input: "unsqueeze_73" + output: "sub_9" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_9" + input: "unsqueeze_75" + output: "mul_28" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn1.weight" + output: "unsqueeze_76" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_76" + output: "unsqueeze_77" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_28" + input: "unsqueeze_77" + output: "mul_29" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn1.bias" + output: "unsqueeze_78" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_78" + output: "unsqueeze_79" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_29" + input: "unsqueeze_79" + output: "add_20" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_20" + output: "getattr_getattr_l__self___blocks___2_____0___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_dw_1" + input: "copy_6" + input: "blocks.2.0.conv_dw.weight" + output: "convolution_10" + node { + input: "blocks.2.0.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_6" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_6" + input: "blocks.2.0.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_10" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 144 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn2_act_1" + input: "add_22" + output: "copy_7" + node { + input: "add_22" + output: "hardtanh_7" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_22" + input: "hardtanh_7" + output: "copy_7" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1" + input: "convolution_10" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___2_____0___bn2_act_1" + node { + input: "blocks.2.0.bn2.running_mean" + output: "convert_element_type_20" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn2.running_var" + output: "convert_element_type_21" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_21" + input: "_val_5" + output: "add_21" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_21" + output: "sqrt_10" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_10" + output: "reciprocal_10" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_10" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_10" + input: "scalar_tensor_default_10" + output: "mul_30" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_20" + output: "unsqueeze_80" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_80" + output: "unsqueeze_81" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_30" + output: "unsqueeze_82" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_82" + output: "unsqueeze_83" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_10" + input: "unsqueeze_81" + output: "sub_10" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_10" + input: "unsqueeze_83" + output: "mul_31" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn2.weight" + output: "unsqueeze_84" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_84" + output: "unsqueeze_85" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_31" + input: "unsqueeze_85" + output: "mul_32" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn2.bias" + output: "unsqueeze_86" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_86" + output: "unsqueeze_87" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_32" + input: "unsqueeze_87" + output: "add_22" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_22" + output: "getattr_getattr_l__self___blocks___2_____0___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____0___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pwl_1" + input: "copy_7" + input: "blocks.2.0.conv_pwl.weight" + output: "convolution_11" + node { + input: "blocks.2.0.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_7" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_7" + input: "blocks.2.0.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_11" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1" + input: "convolution_11" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + output: "add_24" + node { + input: "blocks.2.0.bn3.running_mean" + output: "convert_element_type_22" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn3.running_var" + output: "convert_element_type_23" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_23" + input: "_val_5" + output: "add_23" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_23" + output: "sqrt_11" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_11" + output: "reciprocal_11" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_11" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_11" + input: "scalar_tensor_default_11" + output: "mul_33" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_22" + output: "unsqueeze_88" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_88" + output: "unsqueeze_89" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_33" + output: "unsqueeze_90" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_90" + output: "unsqueeze_91" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_11" + input: "unsqueeze_89" + output: "sub_11" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_11" + input: "unsqueeze_91" + output: "mul_34" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn3.weight" + output: "unsqueeze_92" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_92" + output: "unsqueeze_93" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_34" + input: "unsqueeze_93" + output: "mul_35" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.0.bn3.bias" + output: "unsqueeze_94" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_94" + output: "unsqueeze_95" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_35" + input: "unsqueeze_95" + output: "add_24" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1" + input: "add_18" + input: "blocks.2.0.conv_pw.weight" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + input: "blocks.2.0.conv_dw.weight" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + input: "blocks.2.0.conv_pwl.weight" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___2_____0___bn3_1" + node { + input: "add_18" + input: "blocks.2.0.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___2_____0___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____0___conv_pw_1" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___2_____0___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____0___bn1_1" + input: "blocks.2.0.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___2_____0___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____0___conv_dw_1" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___2_____0___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____0___bn2_1" + input: "blocks.2.0.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___2_____0___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____0___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____0___conv_pwl_1" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___2_____0___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____0___bn3_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pw_1" + input: "add_24" + input: "blocks.2.1.conv_pw.weight" + output: "convolution_12" + node { + input: "blocks.2.1.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_24" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_24" + input: "blocks.2.1.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_12" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn1_act_1" + input: "add_26" + output: "copy_8" + node { + input: "add_26" + output: "hardtanh_8" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_26" + input: "hardtanh_8" + output: "copy_8" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1" + input: "convolution_12" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___2_____1___bn1_act_1" + node { + input: "blocks.2.1.bn1.running_mean" + output: "convert_element_type_24" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn1.running_var" + output: "convert_element_type_25" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_25" + input: "_val_5" + output: "add_25" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_25" + output: "sqrt_12" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_12" + output: "reciprocal_12" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_12" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_12" + input: "scalar_tensor_default_12" + output: "mul_36" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_24" + output: "unsqueeze_96" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_96" + output: "unsqueeze_97" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_36" + output: "unsqueeze_98" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_98" + output: "unsqueeze_99" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_12" + input: "unsqueeze_97" + output: "sub_12" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_12" + input: "unsqueeze_99" + output: "mul_37" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn1.weight" + output: "unsqueeze_100" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_100" + output: "unsqueeze_101" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_37" + input: "unsqueeze_101" + output: "mul_38" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn1.bias" + output: "unsqueeze_102" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_102" + output: "unsqueeze_103" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_38" + input: "unsqueeze_103" + output: "add_26" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_26" + output: "getattr_getattr_l__self___blocks___2_____1___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_dw_1" + input: "copy_8" + input: "blocks.2.1.conv_dw.weight" + output: "convolution_13" + node { + input: "blocks.2.1.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_8" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_8" + input: "blocks.2.1.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_13" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 192 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn2_act_1" + input: "add_28" + output: "copy_9" + node { + input: "add_28" + output: "hardtanh_9" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_28" + input: "hardtanh_9" + output: "copy_9" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1" + input: "convolution_13" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___2_____1___bn2_act_1" + node { + input: "blocks.2.1.bn2.running_mean" + output: "convert_element_type_26" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn2.running_var" + output: "convert_element_type_27" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_27" + input: "_val_5" + output: "add_27" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_27" + output: "sqrt_13" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_13" + output: "reciprocal_13" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_13" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_13" + input: "scalar_tensor_default_13" + output: "mul_39" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_26" + output: "unsqueeze_104" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_104" + output: "unsqueeze_105" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_39" + output: "unsqueeze_106" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_106" + output: "unsqueeze_107" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_13" + input: "unsqueeze_105" + output: "sub_13" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_13" + input: "unsqueeze_107" + output: "mul_40" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn2.weight" + output: "unsqueeze_108" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_108" + output: "unsqueeze_109" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_40" + input: "unsqueeze_109" + output: "mul_41" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn2.bias" + output: "unsqueeze_110" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_110" + output: "unsqueeze_111" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_41" + input: "unsqueeze_111" + output: "add_28" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_28" + output: "getattr_getattr_l__self___blocks___2_____1___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____1___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pwl_1" + input: "copy_9" + input: "blocks.2.1.conv_pwl.weight" + output: "convolution_14" + node { + input: "blocks.2.1.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_9" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_9" + input: "blocks.2.1.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_14" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1" + input: "convolution_14" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + output: "add_30" + node { + input: "blocks.2.1.bn3.running_mean" + output: "convert_element_type_28" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn3.running_var" + output: "convert_element_type_29" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_29" + input: "_val_5" + output: "add_29" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_29" + output: "sqrt_14" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_14" + output: "reciprocal_14" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_14" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_14" + input: "scalar_tensor_default_14" + output: "mul_42" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_28" + output: "unsqueeze_112" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_112" + output: "unsqueeze_113" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_42" + output: "unsqueeze_114" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_114" + output: "unsqueeze_115" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_14" + input: "unsqueeze_113" + output: "sub_14" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_14" + input: "unsqueeze_115" + output: "mul_43" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn3.weight" + output: "unsqueeze_116" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_116" + output: "unsqueeze_117" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_43" + input: "unsqueeze_117" + output: "mul_44" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.1.bn3.bias" + output: "unsqueeze_118" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_118" + output: "unsqueeze_119" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_44" + input: "unsqueeze_119" + output: "add_30" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1" + input: "add_24" + input: "blocks.2.1.conv_pw.weight" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + input: "blocks.2.1.conv_dw.weight" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + input: "blocks.2.1.conv_pwl.weight" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + output: "add_31" + node { + input: "add_24" + input: "blocks.2.1.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___2_____1___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____1___conv_pw_1" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___2_____1___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____1___bn1_1" + input: "blocks.2.1.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___2_____1___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____1___conv_dw_1" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___2_____1___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____1___bn2_1" + input: "blocks.2.1.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___2_____1___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____1___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____1___conv_pwl_1" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + output: "getattr_getattr_l__self___blocks___2_____1___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____1___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____1___bn3_1" + input: "add_24" + output: "add_31" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pw_1" + input: "add_31" + input: "blocks.2.2.conv_pw.weight" + output: "convolution_15" + node { + input: "blocks.2.2.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_31" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_31" + input: "blocks.2.2.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_15" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn1_act_1" + input: "add_33" + output: "copy_10" + node { + input: "add_33" + output: "hardtanh_10" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_33" + input: "hardtanh_10" + output: "copy_10" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1" + input: "convolution_15" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___2_____2___bn1_act_1" + node { + input: "blocks.2.2.bn1.running_mean" + output: "convert_element_type_30" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn1.running_var" + output: "convert_element_type_31" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_31" + input: "_val_5" + output: "add_32" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_32" + output: "sqrt_15" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_15" + output: "reciprocal_15" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_15" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_15" + input: "scalar_tensor_default_15" + output: "mul_45" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_30" + output: "unsqueeze_120" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_120" + output: "unsqueeze_121" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_45" + output: "unsqueeze_122" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_122" + output: "unsqueeze_123" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_15" + input: "unsqueeze_121" + output: "sub_15" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_15" + input: "unsqueeze_123" + output: "mul_46" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn1.weight" + output: "unsqueeze_124" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_124" + output: "unsqueeze_125" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_46" + input: "unsqueeze_125" + output: "mul_47" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn1.bias" + output: "unsqueeze_126" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_126" + output: "unsqueeze_127" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_47" + input: "unsqueeze_127" + output: "add_33" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_33" + output: "getattr_getattr_l__self___blocks___2_____2___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_dw_1" + input: "copy_10" + input: "blocks.2.2.conv_dw.weight" + output: "convolution_16" + node { + input: "blocks.2.2.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_10" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_10" + input: "blocks.2.2.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_16" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 192 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn2_act_1" + input: "add_35" + output: "copy_11" + node { + input: "add_35" + output: "hardtanh_11" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_35" + input: "hardtanh_11" + output: "copy_11" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1" + input: "convolution_16" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___2_____2___bn2_act_1" + node { + input: "blocks.2.2.bn2.running_mean" + output: "convert_element_type_32" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn2.running_var" + output: "convert_element_type_33" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_33" + input: "_val_5" + output: "add_34" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_34" + output: "sqrt_16" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_16" + output: "reciprocal_16" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_16" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_16" + input: "scalar_tensor_default_16" + output: "mul_48" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_32" + output: "unsqueeze_128" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_128" + output: "unsqueeze_129" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_48" + output: "unsqueeze_130" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_130" + output: "unsqueeze_131" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_16" + input: "unsqueeze_129" + output: "sub_16" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_16" + input: "unsqueeze_131" + output: "mul_49" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn2.weight" + output: "unsqueeze_132" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_132" + output: "unsqueeze_133" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_49" + input: "unsqueeze_133" + output: "mul_50" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn2.bias" + output: "unsqueeze_134" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_134" + output: "unsqueeze_135" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_50" + input: "unsqueeze_135" + output: "add_35" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_35" + output: "getattr_getattr_l__self___blocks___2_____2___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___2_____2___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pwl_1" + input: "copy_11" + input: "blocks.2.2.conv_pwl.weight" + output: "convolution_17" + node { + input: "blocks.2.2.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_11" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_11" + input: "blocks.2.2.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_17" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1" + input: "convolution_17" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + output: "add_37" + node { + input: "blocks.2.2.bn3.running_mean" + output: "convert_element_type_34" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn3.running_var" + output: "convert_element_type_35" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_35" + input: "_val_5" + output: "add_36" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_36" + output: "sqrt_17" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_17" + output: "reciprocal_17" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_17" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_17" + input: "scalar_tensor_default_17" + output: "mul_51" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_34" + output: "unsqueeze_136" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_136" + output: "unsqueeze_137" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_51" + output: "unsqueeze_138" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_138" + output: "unsqueeze_139" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_17" + input: "unsqueeze_137" + output: "sub_17" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_17" + input: "unsqueeze_139" + output: "mul_52" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn3.weight" + output: "unsqueeze_140" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_140" + output: "unsqueeze_141" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_52" + input: "unsqueeze_141" + output: "mul_53" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.2.2.bn3.bias" + output: "unsqueeze_142" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_142" + output: "unsqueeze_143" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_53" + input: "unsqueeze_143" + output: "add_37" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1" + input: "add_31" + input: "blocks.2.2.conv_pw.weight" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + input: "blocks.2.2.conv_dw.weight" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + input: "blocks.2.2.conv_pwl.weight" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + output: "add_38" + node { + input: "add_31" + input: "blocks.2.2.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___2_____2___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____2___conv_pw_1" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___2_____2___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____2___bn1_1" + input: "blocks.2.2.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___2_____2___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____2___conv_dw_1" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___2_____2___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____2___bn2_1" + input: "blocks.2.2.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___2_____2___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___2_____2___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___2_____2___conv_pwl_1" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + output: "getattr_getattr_l__self___blocks___2_____2___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___2_____2___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___2_____2___bn3_1" + input: "add_31" + output: "add_38" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_2_1" + input: "add_18" + input: "blocks.2.0.conv_pw.weight" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + input: "blocks.2.0.conv_dw.weight" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + input: "blocks.2.0.conv_pwl.weight" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + input: "blocks.2.1.conv_pw.weight" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + input: "blocks.2.1.conv_dw.weight" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + input: "blocks.2.1.conv_pwl.weight" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + input: "blocks.2.2.conv_pw.weight" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + input: "blocks.2.2.conv_dw.weight" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + input: "blocks.2.2.conv_pwl.weight" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + output: "blocks_2_2_1" + node { + input: "add_18" + input: "blocks.2.0.conv_pw.weight" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + input: "blocks.2.0.conv_dw.weight" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + input: "blocks.2.0.conv_pwl.weight" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + output: "blocks_2_0_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1_0" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_0_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_2_0_1" + input: "blocks.2.1.conv_pw.weight" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + input: "blocks.2.1.conv_dw.weight" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + input: "blocks.2.1.conv_pwl.weight" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + output: "blocks_2_1_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1_1" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_2_1_1" + input: "blocks.2.2.conv_pw.weight" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + input: "blocks.2.2.conv_dw.weight" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + input: "blocks.2.2.conv_pwl.weight" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + output: "blocks_2_2_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1_2" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_2_2_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pw_1" + input: "add_38" + input: "blocks.3.0.conv_pw.weight" + output: "convolution_18" + node { + input: "blocks.3.0.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_38" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_38" + input: "blocks.3.0.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_18" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn1_act_1" + input: "add_40" + output: "copy_12" + node { + input: "add_40" + output: "hardtanh_12" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_40" + input: "hardtanh_12" + output: "copy_12" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1" + input: "convolution_18" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____0___bn1_act_1" + node { + input: "blocks.3.0.bn1.running_mean" + output: "convert_element_type_36" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn1.running_var" + output: "convert_element_type_37" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_37" + input: "_val_5" + output: "add_39" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_39" + output: "sqrt_18" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_18" + output: "reciprocal_18" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_18" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_18" + input: "scalar_tensor_default_18" + output: "mul_54" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_36" + output: "unsqueeze_144" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_144" + output: "unsqueeze_145" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_54" + output: "unsqueeze_146" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_146" + output: "unsqueeze_147" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_18" + input: "unsqueeze_145" + output: "sub_18" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_18" + input: "unsqueeze_147" + output: "mul_55" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn1.weight" + output: "unsqueeze_148" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_148" + output: "unsqueeze_149" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_55" + input: "unsqueeze_149" + output: "mul_56" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn1.bias" + output: "unsqueeze_150" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_150" + output: "unsqueeze_151" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_56" + input: "unsqueeze_151" + output: "add_40" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_40" + output: "getattr_getattr_l__self___blocks___3_____0___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_dw_1" + input: "copy_12" + input: "blocks.3.0.conv_dw.weight" + output: "convolution_19" + node { + input: "blocks.3.0.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_12" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_12" + input: "blocks.3.0.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_19" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 192 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn2_act_1" + input: "add_42" + output: "copy_13" + node { + input: "add_42" + output: "hardtanh_13" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_42" + input: "hardtanh_13" + output: "copy_13" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1" + input: "convolution_19" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____0___bn2_act_1" + node { + input: "blocks.3.0.bn2.running_mean" + output: "convert_element_type_38" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn2.running_var" + output: "convert_element_type_39" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_39" + input: "_val_5" + output: "add_41" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_41" + output: "sqrt_19" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_19" + output: "reciprocal_19" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_19" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_19" + input: "scalar_tensor_default_19" + output: "mul_57" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_38" + output: "unsqueeze_152" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_152" + output: "unsqueeze_153" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_57" + output: "unsqueeze_154" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_154" + output: "unsqueeze_155" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_19" + input: "unsqueeze_153" + output: "sub_19" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_19" + input: "unsqueeze_155" + output: "mul_58" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn2.weight" + output: "unsqueeze_156" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_156" + output: "unsqueeze_157" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_58" + input: "unsqueeze_157" + output: "mul_59" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn2.bias" + output: "unsqueeze_158" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_158" + output: "unsqueeze_159" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_59" + input: "unsqueeze_159" + output: "add_42" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_42" + output: "getattr_getattr_l__self___blocks___3_____0___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____0___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pwl_1" + input: "copy_13" + input: "blocks.3.0.conv_pwl.weight" + output: "convolution_20" + node { + input: "blocks.3.0.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_13" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_13" + input: "blocks.3.0.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_20" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1" + input: "convolution_20" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + output: "add_44" + node { + input: "blocks.3.0.bn3.running_mean" + output: "convert_element_type_40" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn3.running_var" + output: "convert_element_type_41" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_41" + input: "_val_5" + output: "add_43" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_43" + output: "sqrt_20" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_20" + output: "reciprocal_20" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_20" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_20" + input: "scalar_tensor_default_20" + output: "mul_60" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_40" + output: "unsqueeze_160" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_160" + output: "unsqueeze_161" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_60" + output: "unsqueeze_162" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_162" + output: "unsqueeze_163" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_20" + input: "unsqueeze_161" + output: "sub_20" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_20" + input: "unsqueeze_163" + output: "mul_61" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn3.weight" + output: "unsqueeze_164" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_164" + output: "unsqueeze_165" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_61" + input: "unsqueeze_165" + output: "mul_62" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.0.bn3.bias" + output: "unsqueeze_166" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_166" + output: "unsqueeze_167" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_62" + input: "unsqueeze_167" + output: "add_44" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1" + input: "add_38" + input: "blocks.3.0.conv_pw.weight" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + input: "blocks.3.0.conv_dw.weight" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + input: "blocks.3.0.conv_pwl.weight" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___3_____0___bn3_1" + node { + input: "add_38" + input: "blocks.3.0.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___3_____0___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____0___conv_pw_1" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____0___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____0___bn1_1" + input: "blocks.3.0.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___3_____0___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____0___conv_dw_1" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____0___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____0___bn2_1" + input: "blocks.3.0.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___3_____0___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____0___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____0___conv_pwl_1" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___3_____0___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____0___bn3_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pw_1" + input: "add_44" + input: "blocks.3.1.conv_pw.weight" + output: "convolution_21" + node { + input: "blocks.3.1.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_44" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_44" + input: "blocks.3.1.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_21" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn1_act_1" + input: "add_46" + output: "copy_14" + node { + input: "add_46" + output: "hardtanh_14" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_46" + input: "hardtanh_14" + output: "copy_14" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1" + input: "convolution_21" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____1___bn1_act_1" + node { + input: "blocks.3.1.bn1.running_mean" + output: "convert_element_type_42" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn1.running_var" + output: "convert_element_type_43" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_43" + input: "_val_5" + output: "add_45" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_45" + output: "sqrt_21" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_21" + output: "reciprocal_21" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_21" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_21" + input: "scalar_tensor_default_21" + output: "mul_63" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_42" + output: "unsqueeze_168" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_168" + output: "unsqueeze_169" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_63" + output: "unsqueeze_170" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_170" + output: "unsqueeze_171" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_21" + input: "unsqueeze_169" + output: "sub_21" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_21" + input: "unsqueeze_171" + output: "mul_64" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn1.weight" + output: "unsqueeze_172" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_172" + output: "unsqueeze_173" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_64" + input: "unsqueeze_173" + output: "mul_65" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn1.bias" + output: "unsqueeze_174" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_174" + output: "unsqueeze_175" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_65" + input: "unsqueeze_175" + output: "add_46" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_46" + output: "getattr_getattr_l__self___blocks___3_____1___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_dw_1" + input: "copy_14" + input: "blocks.3.1.conv_dw.weight" + output: "convolution_22" + node { + input: "blocks.3.1.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_14" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_14" + input: "blocks.3.1.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_22" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 384 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn2_act_1" + input: "add_48" + output: "copy_15" + node { + input: "add_48" + output: "hardtanh_15" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_48" + input: "hardtanh_15" + output: "copy_15" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1" + input: "convolution_22" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____1___bn2_act_1" + node { + input: "blocks.3.1.bn2.running_mean" + output: "convert_element_type_44" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn2.running_var" + output: "convert_element_type_45" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_45" + input: "_val_5" + output: "add_47" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_47" + output: "sqrt_22" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_22" + output: "reciprocal_22" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_22" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_22" + input: "scalar_tensor_default_22" + output: "mul_66" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_44" + output: "unsqueeze_176" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_176" + output: "unsqueeze_177" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_66" + output: "unsqueeze_178" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_178" + output: "unsqueeze_179" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_22" + input: "unsqueeze_177" + output: "sub_22" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_22" + input: "unsqueeze_179" + output: "mul_67" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn2.weight" + output: "unsqueeze_180" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_180" + output: "unsqueeze_181" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_67" + input: "unsqueeze_181" + output: "mul_68" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn2.bias" + output: "unsqueeze_182" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_182" + output: "unsqueeze_183" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_68" + input: "unsqueeze_183" + output: "add_48" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_48" + output: "getattr_getattr_l__self___blocks___3_____1___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____1___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pwl_1" + input: "copy_15" + input: "blocks.3.1.conv_pwl.weight" + output: "convolution_23" + node { + input: "blocks.3.1.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_15" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_15" + input: "blocks.3.1.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_23" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1" + input: "convolution_23" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + output: "add_50" + node { + input: "blocks.3.1.bn3.running_mean" + output: "convert_element_type_46" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn3.running_var" + output: "convert_element_type_47" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_47" + input: "_val_5" + output: "add_49" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_49" + output: "sqrt_23" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_23" + output: "reciprocal_23" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_23" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_23" + input: "scalar_tensor_default_23" + output: "mul_69" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_46" + output: "unsqueeze_184" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_184" + output: "unsqueeze_185" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_69" + output: "unsqueeze_186" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_186" + output: "unsqueeze_187" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_23" + input: "unsqueeze_185" + output: "sub_23" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_23" + input: "unsqueeze_187" + output: "mul_70" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn3.weight" + output: "unsqueeze_188" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_188" + output: "unsqueeze_189" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_70" + input: "unsqueeze_189" + output: "mul_71" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.1.bn3.bias" + output: "unsqueeze_190" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_190" + output: "unsqueeze_191" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_71" + input: "unsqueeze_191" + output: "add_50" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1" + input: "add_44" + input: "blocks.3.1.conv_pw.weight" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + input: "blocks.3.1.conv_dw.weight" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + input: "blocks.3.1.conv_pwl.weight" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + output: "add_51" + node { + input: "add_44" + input: "blocks.3.1.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___3_____1___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____1___conv_pw_1" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____1___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____1___bn1_1" + input: "blocks.3.1.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___3_____1___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____1___conv_dw_1" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____1___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____1___bn2_1" + input: "blocks.3.1.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___3_____1___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____1___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____1___conv_pwl_1" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + output: "getattr_getattr_l__self___blocks___3_____1___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____1___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____1___bn3_1" + input: "add_44" + output: "add_51" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pw_1" + input: "add_51" + input: "blocks.3.2.conv_pw.weight" + output: "convolution_24" + node { + input: "blocks.3.2.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_51" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_51" + input: "blocks.3.2.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_24" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn1_act_1" + input: "add_53" + output: "copy_16" + node { + input: "add_53" + output: "hardtanh_16" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_53" + input: "hardtanh_16" + output: "copy_16" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1" + input: "convolution_24" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____2___bn1_act_1" + node { + input: "blocks.3.2.bn1.running_mean" + output: "convert_element_type_48" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn1.running_var" + output: "convert_element_type_49" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_49" + input: "_val_5" + output: "add_52" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_52" + output: "sqrt_24" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_24" + output: "reciprocal_24" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_24" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_24" + input: "scalar_tensor_default_24" + output: "mul_72" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_48" + output: "unsqueeze_192" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_192" + output: "unsqueeze_193" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_72" + output: "unsqueeze_194" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_194" + output: "unsqueeze_195" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_24" + input: "unsqueeze_193" + output: "sub_24" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_24" + input: "unsqueeze_195" + output: "mul_73" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn1.weight" + output: "unsqueeze_196" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_196" + output: "unsqueeze_197" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_73" + input: "unsqueeze_197" + output: "mul_74" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn1.bias" + output: "unsqueeze_198" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_198" + output: "unsqueeze_199" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_74" + input: "unsqueeze_199" + output: "add_53" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_53" + output: "getattr_getattr_l__self___blocks___3_____2___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_dw_1" + input: "copy_16" + input: "blocks.3.2.conv_dw.weight" + output: "convolution_25" + node { + input: "blocks.3.2.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_16" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_16" + input: "blocks.3.2.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_25" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 384 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn2_act_1" + input: "add_55" + output: "copy_17" + node { + input: "add_55" + output: "hardtanh_17" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_55" + input: "hardtanh_17" + output: "copy_17" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1" + input: "convolution_25" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____2___bn2_act_1" + node { + input: "blocks.3.2.bn2.running_mean" + output: "convert_element_type_50" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn2.running_var" + output: "convert_element_type_51" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_51" + input: "_val_5" + output: "add_54" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_54" + output: "sqrt_25" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_25" + output: "reciprocal_25" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_25" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_25" + input: "scalar_tensor_default_25" + output: "mul_75" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_50" + output: "unsqueeze_200" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_200" + output: "unsqueeze_201" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_75" + output: "unsqueeze_202" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_202" + output: "unsqueeze_203" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_25" + input: "unsqueeze_201" + output: "sub_25" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_25" + input: "unsqueeze_203" + output: "mul_76" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn2.weight" + output: "unsqueeze_204" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_204" + output: "unsqueeze_205" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_76" + input: "unsqueeze_205" + output: "mul_77" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn2.bias" + output: "unsqueeze_206" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_206" + output: "unsqueeze_207" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_77" + input: "unsqueeze_207" + output: "add_55" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_55" + output: "getattr_getattr_l__self___blocks___3_____2___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____2___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pwl_1" + input: "copy_17" + input: "blocks.3.2.conv_pwl.weight" + output: "convolution_26" + node { + input: "blocks.3.2.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_17" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_17" + input: "blocks.3.2.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_26" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1" + input: "convolution_26" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + output: "add_57" + node { + input: "blocks.3.2.bn3.running_mean" + output: "convert_element_type_52" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn3.running_var" + output: "convert_element_type_53" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_53" + input: "_val_5" + output: "add_56" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_56" + output: "sqrt_26" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_26" + output: "reciprocal_26" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_26" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_26" + input: "scalar_tensor_default_26" + output: "mul_78" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_52" + output: "unsqueeze_208" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_208" + output: "unsqueeze_209" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_78" + output: "unsqueeze_210" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_210" + output: "unsqueeze_211" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_26" + input: "unsqueeze_209" + output: "sub_26" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_26" + input: "unsqueeze_211" + output: "mul_79" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn3.weight" + output: "unsqueeze_212" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_212" + output: "unsqueeze_213" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_79" + input: "unsqueeze_213" + output: "mul_80" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.2.bn3.bias" + output: "unsqueeze_214" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_214" + output: "unsqueeze_215" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_80" + input: "unsqueeze_215" + output: "add_57" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1" + input: "add_51" + input: "blocks.3.2.conv_pw.weight" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + input: "blocks.3.2.conv_dw.weight" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + input: "blocks.3.2.conv_pwl.weight" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + output: "add_58" + node { + input: "add_51" + input: "blocks.3.2.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___3_____2___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____2___conv_pw_1" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____2___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____2___bn1_1" + input: "blocks.3.2.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___3_____2___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____2___conv_dw_1" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____2___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____2___bn2_1" + input: "blocks.3.2.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___3_____2___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____2___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____2___conv_pwl_1" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + output: "getattr_getattr_l__self___blocks___3_____2___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____2___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____2___bn3_1" + input: "add_51" + output: "add_58" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pw_1" + input: "add_58" + input: "blocks.3.3.conv_pw.weight" + output: "convolution_27" + node { + input: "blocks.3.3.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_58" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_58" + input: "blocks.3.3.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_27" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn1_act_1" + input: "add_60" + output: "copy_18" + node { + input: "add_60" + output: "hardtanh_18" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_60" + input: "hardtanh_18" + output: "copy_18" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1" + input: "convolution_27" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____3___bn1_act_1" + node { + input: "blocks.3.3.bn1.running_mean" + output: "convert_element_type_54" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn1.running_var" + output: "convert_element_type_55" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_55" + input: "_val_5" + output: "add_59" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_59" + output: "sqrt_27" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_27" + output: "reciprocal_27" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_27" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_27" + input: "scalar_tensor_default_27" + output: "mul_81" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_54" + output: "unsqueeze_216" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_216" + output: "unsqueeze_217" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_81" + output: "unsqueeze_218" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_218" + output: "unsqueeze_219" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_27" + input: "unsqueeze_217" + output: "sub_27" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_27" + input: "unsqueeze_219" + output: "mul_82" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn1.weight" + output: "unsqueeze_220" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_220" + output: "unsqueeze_221" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_82" + input: "unsqueeze_221" + output: "mul_83" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn1.bias" + output: "unsqueeze_222" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_222" + output: "unsqueeze_223" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_83" + input: "unsqueeze_223" + output: "add_60" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_60" + output: "getattr_getattr_l__self___blocks___3_____3___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_dw_1" + input: "copy_18" + input: "blocks.3.3.conv_dw.weight" + output: "convolution_28" + node { + input: "blocks.3.3.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_18" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_18" + input: "blocks.3.3.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_28" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 384 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn2_act_1" + input: "add_62" + output: "copy_19" + node { + input: "add_62" + output: "hardtanh_19" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_62" + input: "hardtanh_19" + output: "copy_19" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1" + input: "convolution_28" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____3___bn2_act_1" + node { + input: "blocks.3.3.bn2.running_mean" + output: "convert_element_type_56" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn2.running_var" + output: "convert_element_type_57" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_57" + input: "_val_5" + output: "add_61" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_61" + output: "sqrt_28" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_28" + output: "reciprocal_28" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_28" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_28" + input: "scalar_tensor_default_28" + output: "mul_84" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_56" + output: "unsqueeze_224" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_224" + output: "unsqueeze_225" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_84" + output: "unsqueeze_226" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_226" + output: "unsqueeze_227" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_28" + input: "unsqueeze_225" + output: "sub_28" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_28" + input: "unsqueeze_227" + output: "mul_85" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn2.weight" + output: "unsqueeze_228" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_228" + output: "unsqueeze_229" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_85" + input: "unsqueeze_229" + output: "mul_86" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn2.bias" + output: "unsqueeze_230" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_230" + output: "unsqueeze_231" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_86" + input: "unsqueeze_231" + output: "add_62" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_62" + output: "getattr_getattr_l__self___blocks___3_____3___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___3_____3___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pwl_1" + input: "copy_19" + input: "blocks.3.3.conv_pwl.weight" + output: "convolution_29" + node { + input: "blocks.3.3.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_19" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_19" + input: "blocks.3.3.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_29" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1" + input: "convolution_29" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + output: "add_64" + node { + input: "blocks.3.3.bn3.running_mean" + output: "convert_element_type_58" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn3.running_var" + output: "convert_element_type_59" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_59" + input: "_val_5" + output: "add_63" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_63" + output: "sqrt_29" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_29" + output: "reciprocal_29" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_29" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_29" + input: "scalar_tensor_default_29" + output: "mul_87" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_58" + output: "unsqueeze_232" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_232" + output: "unsqueeze_233" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_87" + output: "unsqueeze_234" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_234" + output: "unsqueeze_235" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_29" + input: "unsqueeze_233" + output: "sub_29" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_29" + input: "unsqueeze_235" + output: "mul_88" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn3.weight" + output: "unsqueeze_236" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_236" + output: "unsqueeze_237" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_88" + input: "unsqueeze_237" + output: "mul_89" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.3.3.bn3.bias" + output: "unsqueeze_238" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_238" + output: "unsqueeze_239" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_89" + input: "unsqueeze_239" + output: "add_64" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1" + input: "add_58" + input: "blocks.3.3.conv_pw.weight" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + input: "blocks.3.3.conv_dw.weight" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + input: "blocks.3.3.conv_pwl.weight" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + output: "add_65" + node { + input: "add_58" + input: "blocks.3.3.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___3_____3___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____3___conv_pw_1" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + output: "getattr_getattr_l__self___blocks___3_____3___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____3___bn1_1" + input: "blocks.3.3.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___3_____3___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____3___conv_dw_1" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + output: "getattr_getattr_l__self___blocks___3_____3___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____3___bn2_1" + input: "blocks.3.3.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___3_____3___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___3_____3___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___3_____3___conv_pwl_1" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + output: "getattr_getattr_l__self___blocks___3_____3___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___3_____3___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___3_____3___bn3_1" + input: "add_58" + output: "add_65" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_3_1" + input: "add_38" + input: "blocks.3.0.conv_pw.weight" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + input: "blocks.3.0.conv_dw.weight" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + input: "blocks.3.0.conv_pwl.weight" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + input: "blocks.3.1.conv_pw.weight" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + input: "blocks.3.1.conv_dw.weight" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + input: "blocks.3.1.conv_pwl.weight" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + input: "blocks.3.2.conv_pw.weight" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + input: "blocks.3.2.conv_dw.weight" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + input: "blocks.3.2.conv_pwl.weight" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + input: "blocks.3.3.conv_pw.weight" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + input: "blocks.3.3.conv_dw.weight" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + input: "blocks.3.3.conv_pwl.weight" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + output: "blocks_3_3_1" + node { + input: "add_38" + input: "blocks.3.0.conv_pw.weight" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + input: "blocks.3.0.conv_dw.weight" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + input: "blocks.3.0.conv_pwl.weight" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + output: "blocks_3_0_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1_0" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_0_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_3_0_1" + input: "blocks.3.1.conv_pw.weight" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + input: "blocks.3.1.conv_dw.weight" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + input: "blocks.3.1.conv_pwl.weight" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + output: "blocks_3_1_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1_1" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_3_1_1" + input: "blocks.3.2.conv_pw.weight" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + input: "blocks.3.2.conv_dw.weight" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + input: "blocks.3.2.conv_pwl.weight" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + output: "blocks_3_2_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1_2" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_3_2_1" + input: "blocks.3.3.conv_pw.weight" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + input: "blocks.3.3.conv_dw.weight" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + input: "blocks.3.3.conv_pwl.weight" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + output: "blocks_3_3_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1_3" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_3_3_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pw_1" + input: "add_65" + input: "blocks.4.0.conv_pw.weight" + output: "convolution_30" + node { + input: "blocks.4.0.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_65" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_65" + input: "blocks.4.0.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_30" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn1_act_1" + input: "add_67" + output: "copy_20" + node { + input: "add_67" + output: "hardtanh_20" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_67" + input: "hardtanh_20" + output: "copy_20" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1" + input: "convolution_30" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___4_____0___bn1_act_1" + node { + input: "blocks.4.0.bn1.running_mean" + output: "convert_element_type_60" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn1.running_var" + output: "convert_element_type_61" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_61" + input: "_val_5" + output: "add_66" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_66" + output: "sqrt_30" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_30" + output: "reciprocal_30" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_30" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_30" + input: "scalar_tensor_default_30" + output: "mul_90" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_60" + output: "unsqueeze_240" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_240" + output: "unsqueeze_241" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_90" + output: "unsqueeze_242" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_242" + output: "unsqueeze_243" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_30" + input: "unsqueeze_241" + output: "sub_30" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_30" + input: "unsqueeze_243" + output: "mul_91" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn1.weight" + output: "unsqueeze_244" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_244" + output: "unsqueeze_245" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_91" + input: "unsqueeze_245" + output: "mul_92" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn1.bias" + output: "unsqueeze_246" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_246" + output: "unsqueeze_247" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_92" + input: "unsqueeze_247" + output: "add_67" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_67" + output: "getattr_getattr_l__self___blocks___4_____0___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_dw_1" + input: "copy_20" + input: "blocks.4.0.conv_dw.weight" + output: "convolution_31" + node { + input: "blocks.4.0.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_20" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_20" + input: "blocks.4.0.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_31" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 384 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn2_act_1" + input: "add_69" + output: "copy_21" + node { + input: "add_69" + output: "hardtanh_21" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_69" + input: "hardtanh_21" + output: "copy_21" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1" + input: "convolution_31" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___4_____0___bn2_act_1" + node { + input: "blocks.4.0.bn2.running_mean" + output: "convert_element_type_62" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn2.running_var" + output: "convert_element_type_63" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_63" + input: "_val_5" + output: "add_68" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_68" + output: "sqrt_31" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_31" + output: "reciprocal_31" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_31" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_31" + input: "scalar_tensor_default_31" + output: "mul_93" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_62" + output: "unsqueeze_248" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_248" + output: "unsqueeze_249" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_93" + output: "unsqueeze_250" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_250" + output: "unsqueeze_251" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_31" + input: "unsqueeze_249" + output: "sub_31" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_31" + input: "unsqueeze_251" + output: "mul_94" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn2.weight" + output: "unsqueeze_252" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_252" + output: "unsqueeze_253" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_94" + input: "unsqueeze_253" + output: "mul_95" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn2.bias" + output: "unsqueeze_254" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_254" + output: "unsqueeze_255" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_95" + input: "unsqueeze_255" + output: "add_69" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_69" + output: "getattr_getattr_l__self___blocks___4_____0___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____0___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pwl_1" + input: "copy_21" + input: "blocks.4.0.conv_pwl.weight" + output: "convolution_32" + node { + input: "blocks.4.0.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_21" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_21" + input: "blocks.4.0.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_32" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1" + input: "convolution_32" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + output: "add_71" + node { + input: "blocks.4.0.bn3.running_mean" + output: "convert_element_type_64" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn3.running_var" + output: "convert_element_type_65" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_65" + input: "_val_5" + output: "add_70" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_70" + output: "sqrt_32" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_32" + output: "reciprocal_32" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_32" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_32" + input: "scalar_tensor_default_32" + output: "mul_96" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_64" + output: "unsqueeze_256" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_256" + output: "unsqueeze_257" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_96" + output: "unsqueeze_258" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_258" + output: "unsqueeze_259" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_32" + input: "unsqueeze_257" + output: "sub_32" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_32" + input: "unsqueeze_259" + output: "mul_97" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn3.weight" + output: "unsqueeze_260" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_260" + output: "unsqueeze_261" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_97" + input: "unsqueeze_261" + output: "mul_98" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.0.bn3.bias" + output: "unsqueeze_262" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_262" + output: "unsqueeze_263" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_98" + input: "unsqueeze_263" + output: "add_71" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1" + input: "add_65" + input: "blocks.4.0.conv_pw.weight" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + input: "blocks.4.0.conv_dw.weight" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + input: "blocks.4.0.conv_pwl.weight" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___4_____0___bn3_1" + node { + input: "add_65" + input: "blocks.4.0.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___4_____0___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____0___conv_pw_1" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___4_____0___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____0___bn1_1" + input: "blocks.4.0.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___4_____0___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____0___conv_dw_1" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___4_____0___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____0___bn2_1" + input: "blocks.4.0.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___4_____0___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____0___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____0___conv_pwl_1" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___4_____0___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____0___bn3_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pw_1" + input: "add_71" + input: "blocks.4.1.conv_pw.weight" + output: "convolution_33" + node { + input: "blocks.4.1.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_71" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_71" + input: "blocks.4.1.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_33" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn1_act_1" + input: "add_73" + output: "copy_22" + node { + input: "add_73" + output: "hardtanh_22" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_73" + input: "hardtanh_22" + output: "copy_22" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1" + input: "convolution_33" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___4_____1___bn1_act_1" + node { + input: "blocks.4.1.bn1.running_mean" + output: "convert_element_type_66" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn1.running_var" + output: "convert_element_type_67" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_67" + input: "_val_5" + output: "add_72" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_72" + output: "sqrt_33" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_33" + output: "reciprocal_33" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_33" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_33" + input: "scalar_tensor_default_33" + output: "mul_99" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_66" + output: "unsqueeze_264" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_264" + output: "unsqueeze_265" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_99" + output: "unsqueeze_266" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_266" + output: "unsqueeze_267" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_33" + input: "unsqueeze_265" + output: "sub_33" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_33" + input: "unsqueeze_267" + output: "mul_100" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn1.weight" + output: "unsqueeze_268" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_268" + output: "unsqueeze_269" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_100" + input: "unsqueeze_269" + output: "mul_101" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn1.bias" + output: "unsqueeze_270" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_270" + output: "unsqueeze_271" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_101" + input: "unsqueeze_271" + output: "add_73" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_73" + output: "getattr_getattr_l__self___blocks___4_____1___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_dw_1" + input: "copy_22" + input: "blocks.4.1.conv_dw.weight" + output: "convolution_34" + node { + input: "blocks.4.1.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_22" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_22" + input: "blocks.4.1.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_34" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 576 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn2_act_1" + input: "add_75" + output: "copy_23" + node { + input: "add_75" + output: "hardtanh_23" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_75" + input: "hardtanh_23" + output: "copy_23" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1" + input: "convolution_34" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___4_____1___bn2_act_1" + node { + input: "blocks.4.1.bn2.running_mean" + output: "convert_element_type_68" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn2.running_var" + output: "convert_element_type_69" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_69" + input: "_val_5" + output: "add_74" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_74" + output: "sqrt_34" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_34" + output: "reciprocal_34" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_34" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_34" + input: "scalar_tensor_default_34" + output: "mul_102" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_68" + output: "unsqueeze_272" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_272" + output: "unsqueeze_273" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_102" + output: "unsqueeze_274" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_274" + output: "unsqueeze_275" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_34" + input: "unsqueeze_273" + output: "sub_34" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_34" + input: "unsqueeze_275" + output: "mul_103" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn2.weight" + output: "unsqueeze_276" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_276" + output: "unsqueeze_277" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_103" + input: "unsqueeze_277" + output: "mul_104" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn2.bias" + output: "unsqueeze_278" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_278" + output: "unsqueeze_279" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_104" + input: "unsqueeze_279" + output: "add_75" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_75" + output: "getattr_getattr_l__self___blocks___4_____1___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____1___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pwl_1" + input: "copy_23" + input: "blocks.4.1.conv_pwl.weight" + output: "convolution_35" + node { + input: "blocks.4.1.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_23" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_23" + input: "blocks.4.1.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_35" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1" + input: "convolution_35" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + output: "add_77" + node { + input: "blocks.4.1.bn3.running_mean" + output: "convert_element_type_70" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn3.running_var" + output: "convert_element_type_71" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_71" + input: "_val_5" + output: "add_76" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_76" + output: "sqrt_35" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_35" + output: "reciprocal_35" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_35" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_35" + input: "scalar_tensor_default_35" + output: "mul_105" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_70" + output: "unsqueeze_280" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_280" + output: "unsqueeze_281" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_105" + output: "unsqueeze_282" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_282" + output: "unsqueeze_283" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_35" + input: "unsqueeze_281" + output: "sub_35" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_35" + input: "unsqueeze_283" + output: "mul_106" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn3.weight" + output: "unsqueeze_284" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_284" + output: "unsqueeze_285" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_106" + input: "unsqueeze_285" + output: "mul_107" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.1.bn3.bias" + output: "unsqueeze_286" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_286" + output: "unsqueeze_287" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_107" + input: "unsqueeze_287" + output: "add_77" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1" + input: "add_71" + input: "blocks.4.1.conv_pw.weight" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + input: "blocks.4.1.conv_dw.weight" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + input: "blocks.4.1.conv_pwl.weight" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + output: "add_78" + node { + input: "add_71" + input: "blocks.4.1.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___4_____1___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____1___conv_pw_1" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___4_____1___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____1___bn1_1" + input: "blocks.4.1.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___4_____1___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____1___conv_dw_1" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___4_____1___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____1___bn2_1" + input: "blocks.4.1.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___4_____1___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____1___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____1___conv_pwl_1" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + output: "getattr_getattr_l__self___blocks___4_____1___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____1___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____1___bn3_1" + input: "add_71" + output: "add_78" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pw_1" + input: "add_78" + input: "blocks.4.2.conv_pw.weight" + output: "convolution_36" + node { + input: "blocks.4.2.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_78" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_78" + input: "blocks.4.2.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_36" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn1_act_1" + input: "add_80" + output: "copy_24" + node { + input: "add_80" + output: "hardtanh_24" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_80" + input: "hardtanh_24" + output: "copy_24" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1" + input: "convolution_36" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___4_____2___bn1_act_1" + node { + input: "blocks.4.2.bn1.running_mean" + output: "convert_element_type_72" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn1.running_var" + output: "convert_element_type_73" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_73" + input: "_val_5" + output: "add_79" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_79" + output: "sqrt_36" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_36" + output: "reciprocal_36" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_36" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_36" + input: "scalar_tensor_default_36" + output: "mul_108" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_72" + output: "unsqueeze_288" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_288" + output: "unsqueeze_289" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_108" + output: "unsqueeze_290" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_290" + output: "unsqueeze_291" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_36" + input: "unsqueeze_289" + output: "sub_36" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_36" + input: "unsqueeze_291" + output: "mul_109" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn1.weight" + output: "unsqueeze_292" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_292" + output: "unsqueeze_293" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_109" + input: "unsqueeze_293" + output: "mul_110" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn1.bias" + output: "unsqueeze_294" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_294" + output: "unsqueeze_295" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_110" + input: "unsqueeze_295" + output: "add_80" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_80" + output: "getattr_getattr_l__self___blocks___4_____2___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_dw_1" + input: "copy_24" + input: "blocks.4.2.conv_dw.weight" + output: "convolution_37" + node { + input: "blocks.4.2.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_24" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_24" + input: "blocks.4.2.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_37" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 576 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn2_act_1" + input: "add_82" + output: "copy_25" + node { + input: "add_82" + output: "hardtanh_25" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_82" + input: "hardtanh_25" + output: "copy_25" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1" + input: "convolution_37" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___4_____2___bn2_act_1" + node { + input: "blocks.4.2.bn2.running_mean" + output: "convert_element_type_74" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn2.running_var" + output: "convert_element_type_75" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_75" + input: "_val_5" + output: "add_81" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_81" + output: "sqrt_37" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_37" + output: "reciprocal_37" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_37" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_37" + input: "scalar_tensor_default_37" + output: "mul_111" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_74" + output: "unsqueeze_296" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_296" + output: "unsqueeze_297" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_111" + output: "unsqueeze_298" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_298" + output: "unsqueeze_299" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_37" + input: "unsqueeze_297" + output: "sub_37" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_37" + input: "unsqueeze_299" + output: "mul_112" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn2.weight" + output: "unsqueeze_300" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_300" + output: "unsqueeze_301" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_112" + input: "unsqueeze_301" + output: "mul_113" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn2.bias" + output: "unsqueeze_302" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_302" + output: "unsqueeze_303" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_113" + input: "unsqueeze_303" + output: "add_82" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_82" + output: "getattr_getattr_l__self___blocks___4_____2___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___4_____2___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pwl_1" + input: "copy_25" + input: "blocks.4.2.conv_pwl.weight" + output: "convolution_38" + node { + input: "blocks.4.2.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_25" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_25" + input: "blocks.4.2.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_38" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1" + input: "convolution_38" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + output: "add_84" + node { + input: "blocks.4.2.bn3.running_mean" + output: "convert_element_type_76" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn3.running_var" + output: "convert_element_type_77" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_77" + input: "_val_5" + output: "add_83" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_83" + output: "sqrt_38" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_38" + output: "reciprocal_38" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_38" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_38" + input: "scalar_tensor_default_38" + output: "mul_114" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_76" + output: "unsqueeze_304" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_304" + output: "unsqueeze_305" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_114" + output: "unsqueeze_306" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_306" + output: "unsqueeze_307" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_38" + input: "unsqueeze_305" + output: "sub_38" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_38" + input: "unsqueeze_307" + output: "mul_115" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn3.weight" + output: "unsqueeze_308" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_308" + output: "unsqueeze_309" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_115" + input: "unsqueeze_309" + output: "mul_116" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.4.2.bn3.bias" + output: "unsqueeze_310" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_310" + output: "unsqueeze_311" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_116" + input: "unsqueeze_311" + output: "add_84" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1" + input: "add_78" + input: "blocks.4.2.conv_pw.weight" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + input: "blocks.4.2.conv_dw.weight" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + input: "blocks.4.2.conv_pwl.weight" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + output: "add_85" + node { + input: "add_78" + input: "blocks.4.2.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___4_____2___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____2___conv_pw_1" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___4_____2___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____2___bn1_1" + input: "blocks.4.2.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___4_____2___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____2___conv_dw_1" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___4_____2___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____2___bn2_1" + input: "blocks.4.2.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___4_____2___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___4_____2___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___4_____2___conv_pwl_1" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + output: "getattr_getattr_l__self___blocks___4_____2___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___4_____2___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___4_____2___bn3_1" + input: "add_78" + output: "add_85" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_4_1" + input: "add_65" + input: "blocks.4.0.conv_pw.weight" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + input: "blocks.4.0.conv_dw.weight" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + input: "blocks.4.0.conv_pwl.weight" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + input: "blocks.4.1.conv_pw.weight" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + input: "blocks.4.1.conv_dw.weight" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + input: "blocks.4.1.conv_pwl.weight" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + input: "blocks.4.2.conv_pw.weight" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + input: "blocks.4.2.conv_dw.weight" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + input: "blocks.4.2.conv_pwl.weight" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + output: "blocks_4_2_1" + node { + input: "add_65" + input: "blocks.4.0.conv_pw.weight" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + input: "blocks.4.0.conv_dw.weight" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + input: "blocks.4.0.conv_pwl.weight" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + output: "blocks_4_0_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1_0" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_0_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_4_0_1" + input: "blocks.4.1.conv_pw.weight" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + input: "blocks.4.1.conv_dw.weight" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + input: "blocks.4.1.conv_pwl.weight" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + output: "blocks_4_1_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1_1" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_4_1_1" + input: "blocks.4.2.conv_pw.weight" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + input: "blocks.4.2.conv_dw.weight" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + input: "blocks.4.2.conv_pwl.weight" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + output: "blocks_4_2_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1_2" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_4_2_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pw_1" + input: "add_85" + input: "blocks.5.0.conv_pw.weight" + output: "convolution_39" + node { + input: "blocks.5.0.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_85" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_85" + input: "blocks.5.0.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_39" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn1_act_1" + input: "add_87" + output: "copy_26" + node { + input: "add_87" + output: "hardtanh_26" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_87" + input: "hardtanh_26" + output: "copy_26" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1" + input: "convolution_39" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___5_____0___bn1_act_1" + node { + input: "blocks.5.0.bn1.running_mean" + output: "convert_element_type_78" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn1.running_var" + output: "convert_element_type_79" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_79" + input: "_val_5" + output: "add_86" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_86" + output: "sqrt_39" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_39" + output: "reciprocal_39" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_39" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_39" + input: "scalar_tensor_default_39" + output: "mul_117" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_78" + output: "unsqueeze_312" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_312" + output: "unsqueeze_313" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_117" + output: "unsqueeze_314" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_314" + output: "unsqueeze_315" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_39" + input: "unsqueeze_313" + output: "sub_39" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_39" + input: "unsqueeze_315" + output: "mul_118" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn1.weight" + output: "unsqueeze_316" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_316" + output: "unsqueeze_317" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_118" + input: "unsqueeze_317" + output: "mul_119" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn1.bias" + output: "unsqueeze_318" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_318" + output: "unsqueeze_319" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_119" + input: "unsqueeze_319" + output: "add_87" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_87" + output: "getattr_getattr_l__self___blocks___5_____0___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_dw_1" + input: "copy_26" + input: "blocks.5.0.conv_dw.weight" + output: "convolution_40" + node { + input: "blocks.5.0.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_26" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_26" + input: "blocks.5.0.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_40" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 576 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn2_act_1" + input: "add_89" + output: "copy_27" + node { + input: "add_89" + output: "hardtanh_27" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_89" + input: "hardtanh_27" + output: "copy_27" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1" + input: "convolution_40" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___5_____0___bn2_act_1" + node { + input: "blocks.5.0.bn2.running_mean" + output: "convert_element_type_80" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn2.running_var" + output: "convert_element_type_81" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_81" + input: "_val_5" + output: "add_88" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_88" + output: "sqrt_40" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_40" + output: "reciprocal_40" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_40" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_40" + input: "scalar_tensor_default_40" + output: "mul_120" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_80" + output: "unsqueeze_320" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_320" + output: "unsqueeze_321" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_120" + output: "unsqueeze_322" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_322" + output: "unsqueeze_323" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_40" + input: "unsqueeze_321" + output: "sub_40" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_40" + input: "unsqueeze_323" + output: "mul_121" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn2.weight" + output: "unsqueeze_324" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_324" + output: "unsqueeze_325" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_121" + input: "unsqueeze_325" + output: "mul_122" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn2.bias" + output: "unsqueeze_326" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_326" + output: "unsqueeze_327" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_122" + input: "unsqueeze_327" + output: "add_89" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_89" + output: "getattr_getattr_l__self___blocks___5_____0___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____0___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pwl_1" + input: "copy_27" + input: "blocks.5.0.conv_pwl.weight" + output: "convolution_41" + node { + input: "blocks.5.0.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_27" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_27" + input: "blocks.5.0.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_41" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1" + input: "convolution_41" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + output: "add_91" + node { + input: "blocks.5.0.bn3.running_mean" + output: "convert_element_type_82" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn3.running_var" + output: "convert_element_type_83" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_83" + input: "_val_5" + output: "add_90" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_90" + output: "sqrt_41" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_41" + output: "reciprocal_41" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_41" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_41" + input: "scalar_tensor_default_41" + output: "mul_123" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_82" + output: "unsqueeze_328" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_328" + output: "unsqueeze_329" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_123" + output: "unsqueeze_330" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_330" + output: "unsqueeze_331" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_41" + input: "unsqueeze_329" + output: "sub_41" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_41" + input: "unsqueeze_331" + output: "mul_124" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn3.weight" + output: "unsqueeze_332" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_332" + output: "unsqueeze_333" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_124" + input: "unsqueeze_333" + output: "mul_125" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.0.bn3.bias" + output: "unsqueeze_334" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_334" + output: "unsqueeze_335" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_125" + input: "unsqueeze_335" + output: "add_91" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1" + input: "add_85" + input: "blocks.5.0.conv_pw.weight" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + input: "blocks.5.0.conv_dw.weight" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + input: "blocks.5.0.conv_pwl.weight" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___5_____0___bn3_1" + node { + input: "add_85" + input: "blocks.5.0.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___5_____0___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____0___conv_pw_1" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___5_____0___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____0___bn1_1" + input: "blocks.5.0.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___5_____0___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____0___conv_dw_1" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___5_____0___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____0___bn2_1" + input: "blocks.5.0.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___5_____0___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____0___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____0___conv_pwl_1" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___5_____0___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____0___bn3_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pw_1" + input: "add_91" + input: "blocks.5.1.conv_pw.weight" + output: "convolution_42" + node { + input: "blocks.5.1.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_91" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_91" + input: "blocks.5.1.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_42" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn1_act_1" + input: "add_93" + output: "copy_28" + node { + input: "add_93" + output: "hardtanh_28" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_93" + input: "hardtanh_28" + output: "copy_28" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1" + input: "convolution_42" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___5_____1___bn1_act_1" + node { + input: "blocks.5.1.bn1.running_mean" + output: "convert_element_type_84" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn1.running_var" + output: "convert_element_type_85" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_85" + input: "_val_5" + output: "add_92" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_92" + output: "sqrt_42" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_42" + output: "reciprocal_42" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_42" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_42" + input: "scalar_tensor_default_42" + output: "mul_126" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_84" + output: "unsqueeze_336" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_336" + output: "unsqueeze_337" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_126" + output: "unsqueeze_338" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_338" + output: "unsqueeze_339" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_42" + input: "unsqueeze_337" + output: "sub_42" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_42" + input: "unsqueeze_339" + output: "mul_127" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn1.weight" + output: "unsqueeze_340" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_340" + output: "unsqueeze_341" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_127" + input: "unsqueeze_341" + output: "mul_128" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn1.bias" + output: "unsqueeze_342" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_342" + output: "unsqueeze_343" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_128" + input: "unsqueeze_343" + output: "add_93" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_93" + output: "getattr_getattr_l__self___blocks___5_____1___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_dw_1" + input: "copy_28" + input: "blocks.5.1.conv_dw.weight" + output: "convolution_43" + node { + input: "blocks.5.1.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_28" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_28" + input: "blocks.5.1.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_43" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 960 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn2_act_1" + input: "add_95" + output: "copy_29" + node { + input: "add_95" + output: "hardtanh_29" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_95" + input: "hardtanh_29" + output: "copy_29" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1" + input: "convolution_43" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___5_____1___bn2_act_1" + node { + input: "blocks.5.1.bn2.running_mean" + output: "convert_element_type_86" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn2.running_var" + output: "convert_element_type_87" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_87" + input: "_val_5" + output: "add_94" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_94" + output: "sqrt_43" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_43" + output: "reciprocal_43" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_43" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_43" + input: "scalar_tensor_default_43" + output: "mul_129" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_86" + output: "unsqueeze_344" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_344" + output: "unsqueeze_345" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_129" + output: "unsqueeze_346" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_346" + output: "unsqueeze_347" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_43" + input: "unsqueeze_345" + output: "sub_43" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_43" + input: "unsqueeze_347" + output: "mul_130" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn2.weight" + output: "unsqueeze_348" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_348" + output: "unsqueeze_349" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_130" + input: "unsqueeze_349" + output: "mul_131" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn2.bias" + output: "unsqueeze_350" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_350" + output: "unsqueeze_351" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_131" + input: "unsqueeze_351" + output: "add_95" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_95" + output: "getattr_getattr_l__self___blocks___5_____1___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____1___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pwl_1" + input: "copy_29" + input: "blocks.5.1.conv_pwl.weight" + output: "convolution_44" + node { + input: "blocks.5.1.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_29" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_29" + input: "blocks.5.1.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_44" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1" + input: "convolution_44" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + output: "add_97" + node { + input: "blocks.5.1.bn3.running_mean" + output: "convert_element_type_88" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn3.running_var" + output: "convert_element_type_89" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_89" + input: "_val_5" + output: "add_96" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_96" + output: "sqrt_44" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_44" + output: "reciprocal_44" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_44" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_44" + input: "scalar_tensor_default_44" + output: "mul_132" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_88" + output: "unsqueeze_352" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_352" + output: "unsqueeze_353" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_132" + output: "unsqueeze_354" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_354" + output: "unsqueeze_355" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_44" + input: "unsqueeze_353" + output: "sub_44" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_44" + input: "unsqueeze_355" + output: "mul_133" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn3.weight" + output: "unsqueeze_356" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_356" + output: "unsqueeze_357" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_133" + input: "unsqueeze_357" + output: "mul_134" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.1.bn3.bias" + output: "unsqueeze_358" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_358" + output: "unsqueeze_359" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_134" + input: "unsqueeze_359" + output: "add_97" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1" + input: "add_91" + input: "blocks.5.1.conv_pw.weight" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + input: "blocks.5.1.conv_dw.weight" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + input: "blocks.5.1.conv_pwl.weight" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + output: "add_98" + node { + input: "add_91" + input: "blocks.5.1.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___5_____1___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____1___conv_pw_1" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + output: "getattr_getattr_l__self___blocks___5_____1___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____1___bn1_1" + input: "blocks.5.1.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___5_____1___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____1___conv_dw_1" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + output: "getattr_getattr_l__self___blocks___5_____1___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____1___bn2_1" + input: "blocks.5.1.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___5_____1___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____1___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____1___conv_pwl_1" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + output: "getattr_getattr_l__self___blocks___5_____1___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____1___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____1___bn3_1" + input: "add_91" + output: "add_98" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pw_1" + input: "add_98" + input: "blocks.5.2.conv_pw.weight" + output: "convolution_45" + node { + input: "blocks.5.2.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_98" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_98" + input: "blocks.5.2.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_45" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn1_act_1" + input: "add_100" + output: "copy_30" + node { + input: "add_100" + output: "hardtanh_30" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_100" + input: "hardtanh_30" + output: "copy_30" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1" + input: "convolution_45" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___5_____2___bn1_act_1" + node { + input: "blocks.5.2.bn1.running_mean" + output: "convert_element_type_90" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn1.running_var" + output: "convert_element_type_91" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_91" + input: "_val_5" + output: "add_99" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_99" + output: "sqrt_45" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_45" + output: "reciprocal_45" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_45" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_45" + input: "scalar_tensor_default_45" + output: "mul_135" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_90" + output: "unsqueeze_360" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_360" + output: "unsqueeze_361" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_135" + output: "unsqueeze_362" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_362" + output: "unsqueeze_363" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_45" + input: "unsqueeze_361" + output: "sub_45" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_45" + input: "unsqueeze_363" + output: "mul_136" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn1.weight" + output: "unsqueeze_364" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_364" + output: "unsqueeze_365" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_136" + input: "unsqueeze_365" + output: "mul_137" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn1.bias" + output: "unsqueeze_366" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_366" + output: "unsqueeze_367" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_137" + input: "unsqueeze_367" + output: "add_100" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_100" + output: "getattr_getattr_l__self___blocks___5_____2___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_dw_1" + input: "copy_30" + input: "blocks.5.2.conv_dw.weight" + output: "convolution_46" + node { + input: "blocks.5.2.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_30" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_30" + input: "blocks.5.2.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_46" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 960 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn2_act_1" + input: "add_102" + output: "copy_31" + node { + input: "add_102" + output: "hardtanh_31" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_102" + input: "hardtanh_31" + output: "copy_31" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1" + input: "convolution_46" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___5_____2___bn2_act_1" + node { + input: "blocks.5.2.bn2.running_mean" + output: "convert_element_type_92" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn2.running_var" + output: "convert_element_type_93" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_93" + input: "_val_5" + output: "add_101" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_101" + output: "sqrt_46" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_46" + output: "reciprocal_46" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_46" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_46" + input: "scalar_tensor_default_46" + output: "mul_138" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_92" + output: "unsqueeze_368" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_368" + output: "unsqueeze_369" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_138" + output: "unsqueeze_370" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_370" + output: "unsqueeze_371" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_46" + input: "unsqueeze_369" + output: "sub_46" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_46" + input: "unsqueeze_371" + output: "mul_139" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn2.weight" + output: "unsqueeze_372" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_372" + output: "unsqueeze_373" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_139" + input: "unsqueeze_373" + output: "mul_140" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn2.bias" + output: "unsqueeze_374" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_374" + output: "unsqueeze_375" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_140" + input: "unsqueeze_375" + output: "add_102" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_102" + output: "getattr_getattr_l__self___blocks___5_____2___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___5_____2___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pwl_1" + input: "copy_31" + input: "blocks.5.2.conv_pwl.weight" + output: "convolution_47" + node { + input: "blocks.5.2.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_31" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_31" + input: "blocks.5.2.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_47" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1" + input: "convolution_47" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + output: "add_104" + node { + input: "blocks.5.2.bn3.running_mean" + output: "convert_element_type_94" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn3.running_var" + output: "convert_element_type_95" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_95" + input: "_val_5" + output: "add_103" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_103" + output: "sqrt_47" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_47" + output: "reciprocal_47" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_47" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_47" + input: "scalar_tensor_default_47" + output: "mul_141" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_94" + output: "unsqueeze_376" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_376" + output: "unsqueeze_377" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_141" + output: "unsqueeze_378" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_378" + output: "unsqueeze_379" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_47" + input: "unsqueeze_377" + output: "sub_47" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_47" + input: "unsqueeze_379" + output: "mul_142" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn3.weight" + output: "unsqueeze_380" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_380" + output: "unsqueeze_381" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_142" + input: "unsqueeze_381" + output: "mul_143" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.5.2.bn3.bias" + output: "unsqueeze_382" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_382" + output: "unsqueeze_383" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_143" + input: "unsqueeze_383" + output: "add_104" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1" + input: "add_98" + input: "blocks.5.2.conv_pw.weight" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + input: "blocks.5.2.conv_dw.weight" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + input: "blocks.5.2.conv_pwl.weight" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + output: "add_105" + node { + input: "add_98" + input: "blocks.5.2.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___5_____2___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____2___conv_pw_1" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + output: "getattr_getattr_l__self___blocks___5_____2___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____2___bn1_1" + input: "blocks.5.2.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___5_____2___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____2___conv_dw_1" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + output: "getattr_getattr_l__self___blocks___5_____2___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____2___bn2_1" + input: "blocks.5.2.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___5_____2___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___5_____2___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___5_____2___conv_pwl_1" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + output: "getattr_getattr_l__self___blocks___5_____2___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___5_____2___bn3_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___5_____2___bn3_1" + input: "add_98" + output: "add_105" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_5_1" + input: "add_85" + input: "blocks.5.0.conv_pw.weight" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + input: "blocks.5.0.conv_dw.weight" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + input: "blocks.5.0.conv_pwl.weight" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + input: "blocks.5.1.conv_pw.weight" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + input: "blocks.5.1.conv_dw.weight" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + input: "blocks.5.1.conv_pwl.weight" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + input: "blocks.5.2.conv_pw.weight" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + input: "blocks.5.2.conv_dw.weight" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + input: "blocks.5.2.conv_pwl.weight" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + output: "blocks_5_2_1" + node { + input: "add_85" + input: "blocks.5.0.conv_pw.weight" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + input: "blocks.5.0.conv_dw.weight" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + input: "blocks.5.0.conv_pwl.weight" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + output: "blocks_5_0_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1_0" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_0_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_5_0_1" + input: "blocks.5.1.conv_pw.weight" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + input: "blocks.5.1.conv_dw.weight" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + input: "blocks.5.1.conv_pwl.weight" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + output: "blocks_5_1_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1_1" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "blocks_5_1_1" + input: "blocks.5.2.conv_pw.weight" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + input: "blocks.5.2.conv_dw.weight" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + input: "blocks.5.2.conv_pwl.weight" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + output: "blocks_5_2_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1_2" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_5_2_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pw_1" + input: "add_105" + input: "blocks.6.0.conv_pw.weight" + output: "convolution_48" + node { + input: "blocks.6.0.conv_pw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_105" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_105" + input: "blocks.6.0.conv_pw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_48" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn1_act_1" + input: "add_107" + output: "copy_32" + node { + input: "add_107" + output: "hardtanh_32" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_107" + input: "hardtanh_32" + output: "copy_32" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1" + input: "convolution_48" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___6_____0___bn1_act_1" + node { + input: "blocks.6.0.bn1.running_mean" + output: "convert_element_type_96" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn1.running_var" + output: "convert_element_type_97" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_97" + input: "_val_5" + output: "add_106" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_106" + output: "sqrt_48" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_48" + output: "reciprocal_48" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_48" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_48" + input: "scalar_tensor_default_48" + output: "mul_144" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_96" + output: "unsqueeze_384" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_384" + output: "unsqueeze_385" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_144" + output: "unsqueeze_386" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_386" + output: "unsqueeze_387" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_48" + input: "unsqueeze_385" + output: "sub_48" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_48" + input: "unsqueeze_387" + output: "mul_145" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn1.weight" + output: "unsqueeze_388" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_388" + output: "unsqueeze_389" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_145" + input: "unsqueeze_389" + output: "mul_146" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn1.bias" + output: "unsqueeze_390" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_390" + output: "unsqueeze_391" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_146" + input: "unsqueeze_391" + output: "add_107" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_107" + output: "getattr_getattr_l__self___blocks___6_____0___bn1_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn1_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn1_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_dw_1" + input: "copy_32" + input: "blocks.6.0.conv_dw.weight" + output: "convolution_49" + node { + input: "blocks.6.0.conv_dw.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_32" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_32" + input: "blocks.6.0.conv_dw.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_49" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 960 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn2_act_1" + input: "add_109" + output: "copy_33" + node { + input: "add_109" + output: "hardtanh_33" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_109" + input: "hardtanh_33" + output: "copy_33" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1" + input: "convolution_49" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___6_____0___bn2_act_1" + node { + input: "blocks.6.0.bn2.running_mean" + output: "convert_element_type_98" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn2.running_var" + output: "convert_element_type_99" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_99" + input: "_val_5" + output: "add_108" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_108" + output: "sqrt_49" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_49" + output: "reciprocal_49" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_49" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_49" + input: "scalar_tensor_default_49" + output: "mul_147" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_98" + output: "unsqueeze_392" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_392" + output: "unsqueeze_393" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_147" + output: "unsqueeze_394" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_394" + output: "unsqueeze_395" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_49" + input: "unsqueeze_393" + output: "sub_49" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_49" + input: "unsqueeze_395" + output: "mul_148" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn2.weight" + output: "unsqueeze_396" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_396" + output: "unsqueeze_397" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_148" + input: "unsqueeze_397" + output: "mul_149" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn2.bias" + output: "unsqueeze_398" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_398" + output: "unsqueeze_399" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_149" + input: "unsqueeze_399" + output: "add_109" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_109" + output: "getattr_getattr_l__self___blocks___6_____0___bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_getattr_getattr_L__self___blocks___6_____0___bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pwl_1" + input: "copy_33" + input: "blocks.6.0.conv_pwl.weight" + output: "convolution_50" + node { + input: "blocks.6.0.conv_pwl.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_33" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_33" + input: "blocks.6.0.conv_pwl.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_50" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1" + input: "convolution_50" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "add_111" + node { + input: "blocks.6.0.bn3.running_mean" + output: "convert_element_type_100" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn3.running_var" + output: "convert_element_type_101" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_101" + input: "_val_5" + output: "add_110" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_110" + output: "sqrt_50" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_50" + output: "reciprocal_50" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_50" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_50" + input: "scalar_tensor_default_50" + output: "mul_150" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_100" + output: "unsqueeze_400" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_400" + output: "unsqueeze_401" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_150" + output: "unsqueeze_402" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_402" + output: "unsqueeze_403" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_50" + input: "unsqueeze_401" + output: "sub_50" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_50" + input: "unsqueeze_403" + output: "mul_151" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn3.weight" + output: "unsqueeze_404" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_404" + output: "unsqueeze_405" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_151" + input: "unsqueeze_405" + output: "mul_152" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "blocks.6.0.bn3.bias" + output: "unsqueeze_406" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_406" + output: "unsqueeze_407" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_152" + input: "unsqueeze_407" + output: "add_111" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1" + input: "add_105" + input: "blocks.6.0.conv_pw.weight" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + input: "blocks.6.0.conv_dw.weight" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + input: "blocks.6.0.conv_pwl.weight" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___6_____0___bn3_1" + node { + input: "add_105" + input: "blocks.6.0.conv_pw.weight" + output: "getattr_getattr_l__self___blocks___6_____0___conv_pw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pw_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___6_____0___conv_pw_1" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + output: "getattr_getattr_l__self___blocks___6_____0___bn1_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1_1" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn1_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___6_____0___bn1_1" + input: "blocks.6.0.conv_dw.weight" + output: "getattr_getattr_l__self___blocks___6_____0___conv_dw_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_dw_1_2" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_dw_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___6_____0___conv_dw_1" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + output: "getattr_getattr_l__self___blocks___6_____0___bn2_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1_3" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn2_1" + domain: "pkg.timm.0.9.7" + } + node { + input: "getattr_getattr_l__self___blocks___6_____0___bn2_1" + input: "blocks.6.0.conv_pwl.weight" + output: "getattr_getattr_l__self___blocks___6_____0___conv_pwl_1" + name: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pwl_1_4" + op_type: "torch_nn_modules_conv_Conv2d_getattr_getattr_L__self___blocks___6_____0___conv_pwl_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "getattr_getattr_l__self___blocks___6_____0___conv_pwl_1" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "getattr_getattr_l__self___blocks___6_____0___bn3_1" + name: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1_5" + op_type: "timm_layers_norm_act_BatchNormAct2d_getattr_getattr_L__self___blocks___6_____0___bn3_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_6_1" + input: "add_105" + input: "blocks.6.0.conv_pw.weight" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + input: "blocks.6.0.conv_dw.weight" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + input: "blocks.6.0.conv_pwl.weight" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "blocks_6_0_1" + node { + input: "add_105" + input: "blocks.6.0.conv_pw.weight" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + input: "blocks.6.0.conv_dw.weight" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + input: "blocks.6.0.conv_pwl.weight" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "blocks_6_0_1" + name: "timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1_0" + op_type: "timm_models__efficientnet_blocks_InvertedResidual_blocks_6_0_1" + domain: "pkg.timm.0.9.7" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.timm.0.9.7" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_container_Sequential_blocks_1" + input: "copy" + input: "blocks.0.0.conv_dw.weight" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + input: "blocks.0.0.conv_pw.weight" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + input: "blocks.1.0.conv_pw.weight" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + input: "blocks.1.0.conv_dw.weight" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + input: "blocks.1.0.conv_pwl.weight" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + input: "blocks.1.1.conv_pw.weight" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + input: "blocks.1.1.conv_dw.weight" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + input: "blocks.1.1.conv_pwl.weight" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + input: "blocks.2.0.conv_pw.weight" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + input: "blocks.2.0.conv_dw.weight" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + input: "blocks.2.0.conv_pwl.weight" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + input: "blocks.2.1.conv_pw.weight" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + input: "blocks.2.1.conv_dw.weight" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + input: "blocks.2.1.conv_pwl.weight" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + input: "blocks.2.2.conv_pw.weight" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + input: "blocks.2.2.conv_dw.weight" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + input: "blocks.2.2.conv_pwl.weight" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + input: "blocks.3.0.conv_pw.weight" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + input: "blocks.3.0.conv_dw.weight" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + input: "blocks.3.0.conv_pwl.weight" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + input: "blocks.3.1.conv_pw.weight" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + input: "blocks.3.1.conv_dw.weight" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + input: "blocks.3.1.conv_pwl.weight" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + input: "blocks.3.2.conv_pw.weight" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + input: "blocks.3.2.conv_dw.weight" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + input: "blocks.3.2.conv_pwl.weight" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + input: "blocks.3.3.conv_pw.weight" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + input: "blocks.3.3.conv_dw.weight" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + input: "blocks.3.3.conv_pwl.weight" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + input: "blocks.4.0.conv_pw.weight" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + input: "blocks.4.0.conv_dw.weight" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + input: "blocks.4.0.conv_pwl.weight" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + input: "blocks.4.1.conv_pw.weight" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + input: "blocks.4.1.conv_dw.weight" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + input: "blocks.4.1.conv_pwl.weight" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + input: "blocks.4.2.conv_pw.weight" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + input: "blocks.4.2.conv_dw.weight" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + input: "blocks.4.2.conv_pwl.weight" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + input: "blocks.5.0.conv_pw.weight" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + input: "blocks.5.0.conv_dw.weight" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + input: "blocks.5.0.conv_pwl.weight" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + input: "blocks.5.1.conv_pw.weight" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + input: "blocks.5.1.conv_dw.weight" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + input: "blocks.5.1.conv_pwl.weight" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + input: "blocks.5.2.conv_pw.weight" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + input: "blocks.5.2.conv_dw.weight" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + input: "blocks.5.2.conv_pwl.weight" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + input: "blocks.6.0.conv_pw.weight" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + input: "blocks.6.0.conv_dw.weight" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + input: "blocks.6.0.conv_pwl.weight" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "blocks_6_1" + node { + input: "copy" + input: "blocks.0.0.conv_dw.weight" + input: "blocks.0.0.bn1.running_mean" + input: "blocks.0.0.bn1.running_var" + input: "blocks.0.0.bn1.weight" + input: "blocks.0.0.bn1.bias" + input: "blocks.0.0.conv_pw.weight" + input: "blocks.0.0.bn2.running_mean" + input: "blocks.0.0.bn2.running_var" + input: "blocks.0.0.bn2.weight" + input: "blocks.0.0.bn2.bias" + output: "blocks_0_1" + name: "torch_nn_modules_container_Sequential_blocks_0_1_0" + op_type: "torch_nn_modules_container_Sequential_blocks_0_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "blocks_0_1" + input: "blocks.1.0.conv_pw.weight" + input: "blocks.1.0.bn1.running_mean" + input: "blocks.1.0.bn1.running_var" + input: "blocks.1.0.bn1.weight" + input: "blocks.1.0.bn1.bias" + input: "blocks.1.0.conv_dw.weight" + input: "blocks.1.0.bn2.running_mean" + input: "blocks.1.0.bn2.running_var" + input: "blocks.1.0.bn2.weight" + input: "blocks.1.0.bn2.bias" + input: "blocks.1.0.conv_pwl.weight" + input: "blocks.1.0.bn3.running_mean" + input: "blocks.1.0.bn3.running_var" + input: "blocks.1.0.bn3.weight" + input: "blocks.1.0.bn3.bias" + input: "blocks.1.1.conv_pw.weight" + input: "blocks.1.1.bn1.running_mean" + input: "blocks.1.1.bn1.running_var" + input: "blocks.1.1.bn1.weight" + input: "blocks.1.1.bn1.bias" + input: "blocks.1.1.conv_dw.weight" + input: "blocks.1.1.bn2.running_mean" + input: "blocks.1.1.bn2.running_var" + input: "blocks.1.1.bn2.weight" + input: "blocks.1.1.bn2.bias" + input: "blocks.1.1.conv_pwl.weight" + input: "blocks.1.1.bn3.running_mean" + input: "blocks.1.1.bn3.running_var" + input: "blocks.1.1.bn3.weight" + input: "blocks.1.1.bn3.bias" + output: "blocks_1_1" + name: "torch_nn_modules_container_Sequential_blocks_1_1_1" + op_type: "torch_nn_modules_container_Sequential_blocks_1_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "blocks_1_1" + input: "blocks.2.0.conv_pw.weight" + input: "blocks.2.0.bn1.running_mean" + input: "blocks.2.0.bn1.running_var" + input: "blocks.2.0.bn1.weight" + input: "blocks.2.0.bn1.bias" + input: "blocks.2.0.conv_dw.weight" + input: "blocks.2.0.bn2.running_mean" + input: "blocks.2.0.bn2.running_var" + input: "blocks.2.0.bn2.weight" + input: "blocks.2.0.bn2.bias" + input: "blocks.2.0.conv_pwl.weight" + input: "blocks.2.0.bn3.running_mean" + input: "blocks.2.0.bn3.running_var" + input: "blocks.2.0.bn3.weight" + input: "blocks.2.0.bn3.bias" + input: "blocks.2.1.conv_pw.weight" + input: "blocks.2.1.bn1.running_mean" + input: "blocks.2.1.bn1.running_var" + input: "blocks.2.1.bn1.weight" + input: "blocks.2.1.bn1.bias" + input: "blocks.2.1.conv_dw.weight" + input: "blocks.2.1.bn2.running_mean" + input: "blocks.2.1.bn2.running_var" + input: "blocks.2.1.bn2.weight" + input: "blocks.2.1.bn2.bias" + input: "blocks.2.1.conv_pwl.weight" + input: "blocks.2.1.bn3.running_mean" + input: "blocks.2.1.bn3.running_var" + input: "blocks.2.1.bn3.weight" + input: "blocks.2.1.bn3.bias" + input: "blocks.2.2.conv_pw.weight" + input: "blocks.2.2.bn1.running_mean" + input: "blocks.2.2.bn1.running_var" + input: "blocks.2.2.bn1.weight" + input: "blocks.2.2.bn1.bias" + input: "blocks.2.2.conv_dw.weight" + input: "blocks.2.2.bn2.running_mean" + input: "blocks.2.2.bn2.running_var" + input: "blocks.2.2.bn2.weight" + input: "blocks.2.2.bn2.bias" + input: "blocks.2.2.conv_pwl.weight" + input: "blocks.2.2.bn3.running_mean" + input: "blocks.2.2.bn3.running_var" + input: "blocks.2.2.bn3.weight" + input: "blocks.2.2.bn3.bias" + output: "blocks_2_1" + name: "torch_nn_modules_container_Sequential_blocks_2_1_2" + op_type: "torch_nn_modules_container_Sequential_blocks_2_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "blocks_2_1" + input: "blocks.3.0.conv_pw.weight" + input: "blocks.3.0.bn1.running_mean" + input: "blocks.3.0.bn1.running_var" + input: "blocks.3.0.bn1.weight" + input: "blocks.3.0.bn1.bias" + input: "blocks.3.0.conv_dw.weight" + input: "blocks.3.0.bn2.running_mean" + input: "blocks.3.0.bn2.running_var" + input: "blocks.3.0.bn2.weight" + input: "blocks.3.0.bn2.bias" + input: "blocks.3.0.conv_pwl.weight" + input: "blocks.3.0.bn3.running_mean" + input: "blocks.3.0.bn3.running_var" + input: "blocks.3.0.bn3.weight" + input: "blocks.3.0.bn3.bias" + input: "blocks.3.1.conv_pw.weight" + input: "blocks.3.1.bn1.running_mean" + input: "blocks.3.1.bn1.running_var" + input: "blocks.3.1.bn1.weight" + input: "blocks.3.1.bn1.bias" + input: "blocks.3.1.conv_dw.weight" + input: "blocks.3.1.bn2.running_mean" + input: "blocks.3.1.bn2.running_var" + input: "blocks.3.1.bn2.weight" + input: "blocks.3.1.bn2.bias" + input: "blocks.3.1.conv_pwl.weight" + input: "blocks.3.1.bn3.running_mean" + input: "blocks.3.1.bn3.running_var" + input: "blocks.3.1.bn3.weight" + input: "blocks.3.1.bn3.bias" + input: "blocks.3.2.conv_pw.weight" + input: "blocks.3.2.bn1.running_mean" + input: "blocks.3.2.bn1.running_var" + input: "blocks.3.2.bn1.weight" + input: "blocks.3.2.bn1.bias" + input: "blocks.3.2.conv_dw.weight" + input: "blocks.3.2.bn2.running_mean" + input: "blocks.3.2.bn2.running_var" + input: "blocks.3.2.bn2.weight" + input: "blocks.3.2.bn2.bias" + input: "blocks.3.2.conv_pwl.weight" + input: "blocks.3.2.bn3.running_mean" + input: "blocks.3.2.bn3.running_var" + input: "blocks.3.2.bn3.weight" + input: "blocks.3.2.bn3.bias" + input: "blocks.3.3.conv_pw.weight" + input: "blocks.3.3.bn1.running_mean" + input: "blocks.3.3.bn1.running_var" + input: "blocks.3.3.bn1.weight" + input: "blocks.3.3.bn1.bias" + input: "blocks.3.3.conv_dw.weight" + input: "blocks.3.3.bn2.running_mean" + input: "blocks.3.3.bn2.running_var" + input: "blocks.3.3.bn2.weight" + input: "blocks.3.3.bn2.bias" + input: "blocks.3.3.conv_pwl.weight" + input: "blocks.3.3.bn3.running_mean" + input: "blocks.3.3.bn3.running_var" + input: "blocks.3.3.bn3.weight" + input: "blocks.3.3.bn3.bias" + output: "blocks_3_1" + name: "torch_nn_modules_container_Sequential_blocks_3_1_3" + op_type: "torch_nn_modules_container_Sequential_blocks_3_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "blocks_3_1" + input: "blocks.4.0.conv_pw.weight" + input: "blocks.4.0.bn1.running_mean" + input: "blocks.4.0.bn1.running_var" + input: "blocks.4.0.bn1.weight" + input: "blocks.4.0.bn1.bias" + input: "blocks.4.0.conv_dw.weight" + input: "blocks.4.0.bn2.running_mean" + input: "blocks.4.0.bn2.running_var" + input: "blocks.4.0.bn2.weight" + input: "blocks.4.0.bn2.bias" + input: "blocks.4.0.conv_pwl.weight" + input: "blocks.4.0.bn3.running_mean" + input: "blocks.4.0.bn3.running_var" + input: "blocks.4.0.bn3.weight" + input: "blocks.4.0.bn3.bias" + input: "blocks.4.1.conv_pw.weight" + input: "blocks.4.1.bn1.running_mean" + input: "blocks.4.1.bn1.running_var" + input: "blocks.4.1.bn1.weight" + input: "blocks.4.1.bn1.bias" + input: "blocks.4.1.conv_dw.weight" + input: "blocks.4.1.bn2.running_mean" + input: "blocks.4.1.bn2.running_var" + input: "blocks.4.1.bn2.weight" + input: "blocks.4.1.bn2.bias" + input: "blocks.4.1.conv_pwl.weight" + input: "blocks.4.1.bn3.running_mean" + input: "blocks.4.1.bn3.running_var" + input: "blocks.4.1.bn3.weight" + input: "blocks.4.1.bn3.bias" + input: "blocks.4.2.conv_pw.weight" + input: "blocks.4.2.bn1.running_mean" + input: "blocks.4.2.bn1.running_var" + input: "blocks.4.2.bn1.weight" + input: "blocks.4.2.bn1.bias" + input: "blocks.4.2.conv_dw.weight" + input: "blocks.4.2.bn2.running_mean" + input: "blocks.4.2.bn2.running_var" + input: "blocks.4.2.bn2.weight" + input: "blocks.4.2.bn2.bias" + input: "blocks.4.2.conv_pwl.weight" + input: "blocks.4.2.bn3.running_mean" + input: "blocks.4.2.bn3.running_var" + input: "blocks.4.2.bn3.weight" + input: "blocks.4.2.bn3.bias" + output: "blocks_4_1" + name: "torch_nn_modules_container_Sequential_blocks_4_1_4" + op_type: "torch_nn_modules_container_Sequential_blocks_4_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "blocks_4_1" + input: "blocks.5.0.conv_pw.weight" + input: "blocks.5.0.bn1.running_mean" + input: "blocks.5.0.bn1.running_var" + input: "blocks.5.0.bn1.weight" + input: "blocks.5.0.bn1.bias" + input: "blocks.5.0.conv_dw.weight" + input: "blocks.5.0.bn2.running_mean" + input: "blocks.5.0.bn2.running_var" + input: "blocks.5.0.bn2.weight" + input: "blocks.5.0.bn2.bias" + input: "blocks.5.0.conv_pwl.weight" + input: "blocks.5.0.bn3.running_mean" + input: "blocks.5.0.bn3.running_var" + input: "blocks.5.0.bn3.weight" + input: "blocks.5.0.bn3.bias" + input: "blocks.5.1.conv_pw.weight" + input: "blocks.5.1.bn1.running_mean" + input: "blocks.5.1.bn1.running_var" + input: "blocks.5.1.bn1.weight" + input: "blocks.5.1.bn1.bias" + input: "blocks.5.1.conv_dw.weight" + input: "blocks.5.1.bn2.running_mean" + input: "blocks.5.1.bn2.running_var" + input: "blocks.5.1.bn2.weight" + input: "blocks.5.1.bn2.bias" + input: "blocks.5.1.conv_pwl.weight" + input: "blocks.5.1.bn3.running_mean" + input: "blocks.5.1.bn3.running_var" + input: "blocks.5.1.bn3.weight" + input: "blocks.5.1.bn3.bias" + input: "blocks.5.2.conv_pw.weight" + input: "blocks.5.2.bn1.running_mean" + input: "blocks.5.2.bn1.running_var" + input: "blocks.5.2.bn1.weight" + input: "blocks.5.2.bn1.bias" + input: "blocks.5.2.conv_dw.weight" + input: "blocks.5.2.bn2.running_mean" + input: "blocks.5.2.bn2.running_var" + input: "blocks.5.2.bn2.weight" + input: "blocks.5.2.bn2.bias" + input: "blocks.5.2.conv_pwl.weight" + input: "blocks.5.2.bn3.running_mean" + input: "blocks.5.2.bn3.running_var" + input: "blocks.5.2.bn3.weight" + input: "blocks.5.2.bn3.bias" + output: "blocks_5_1" + name: "torch_nn_modules_container_Sequential_blocks_5_1_5" + op_type: "torch_nn_modules_container_Sequential_blocks_5_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "blocks_5_1" + input: "blocks.6.0.conv_pw.weight" + input: "blocks.6.0.bn1.running_mean" + input: "blocks.6.0.bn1.running_var" + input: "blocks.6.0.bn1.weight" + input: "blocks.6.0.bn1.bias" + input: "blocks.6.0.conv_dw.weight" + input: "blocks.6.0.bn2.running_mean" + input: "blocks.6.0.bn2.running_var" + input: "blocks.6.0.bn2.weight" + input: "blocks.6.0.bn2.bias" + input: "blocks.6.0.conv_pwl.weight" + input: "blocks.6.0.bn3.running_mean" + input: "blocks.6.0.bn3.running_var" + input: "blocks.6.0.bn3.weight" + input: "blocks.6.0.bn3.bias" + output: "blocks_6_1" + name: "torch_nn_modules_container_Sequential_blocks_6_1_6" + op_type: "torch_nn_modules_container_Sequential_blocks_6_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_conv_Conv2d_conv_head_1" + input: "add_111" + input: "conv_head.weight" + output: "convolution_51" + node { + input: "conv_head.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "add_111" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "add_111" + input: "conv_head.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_51" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "torch_nn_modules_activation_ReLU6_bn2_act_1" + input: "add_113" + output: "copy_34" + node { + input: "add_113" + output: "hardtanh_34" + name: "aten_hardtanh_0" + op_type: "aten_hardtanh" + attribute { + name: "max_val" + f: 6.0 + type: FLOAT + } + attribute { + name: "min_val" + f: 0.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_113" + input: "hardtanh_34" + output: "copy_34" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_norm_act_BatchNormAct2d_bn2_1" + input: "convolution_51" + input: "bn2.running_mean" + input: "bn2.running_var" + input: "bn2.weight" + input: "bn2.bias" + output: "bn2_act_1" + node { + input: "bn2.running_mean" + output: "convert_element_type_102" + name: "prims_convert_element_type_2" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "bn2.running_var" + output: "convert_element_type_103" + name: "prims_convert_element_type_3" + op_type: "prims_convert_element_type" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_5" + name: "Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\254\305\'7" + } + type: TENSOR + } + } + node { + input: "convert_element_type_103" + input: "_val_5" + output: "add_112" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_112" + output: "sqrt_51" + name: "aten_sqrt_6" + op_type: "aten_sqrt" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sqrt_51" + output: "reciprocal_51" + name: "aten_reciprocal_7" + op_type: "aten_reciprocal" + domain: "pkg.onnxscript.torch_lib" + } + node { + output: "_val_9" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_9" + output: "scalar_tensor_default_51" + name: "aten_scalar_tensor_sym_number_9" + op_type: "aten_scalar_tensor_sym_number" + attribute { + name: "dtype" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "reciprocal_51" + input: "scalar_tensor_default_51" + output: "mul_153" + name: "aten_mul_10" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convert_element_type_102" + output: "unsqueeze_408" + name: "aten_unsqueeze_11" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_408" + output: "unsqueeze_409" + name: "aten_unsqueeze_12" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_153" + output: "unsqueeze_410" + name: "aten_unsqueeze_13" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_410" + output: "unsqueeze_411" + name: "aten_unsqueeze_14" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "convolution_51" + input: "unsqueeze_409" + output: "sub_51" + name: "aten_sub_15" + op_type: "aten_sub" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "sub_51" + input: "unsqueeze_411" + output: "mul_154" + name: "aten_mul_16" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "bn2.weight" + output: "unsqueeze_412" + name: "aten_unsqueeze_17" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_412" + output: "unsqueeze_413" + name: "aten_unsqueeze_18" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_154" + input: "unsqueeze_413" + output: "mul_155" + name: "aten_mul_19" + op_type: "aten_mul" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "bn2.bias" + output: "unsqueeze_414" + name: "aten_unsqueeze_20" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "unsqueeze_414" + output: "unsqueeze_415" + name: "aten_unsqueeze_21" + op_type: "aten_unsqueeze" + attribute { + name: "dim" + i: -1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "mul_155" + input: "unsqueeze_415" + output: "add_113" + name: "aten_add_22" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_113" + output: "bn2_act_1" + name: "torch_nn_modules_activation_ReLU6_bn2_act_1_23" + op_type: "torch_nn_modules_activation_ReLU6_bn2_act_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "aten_mean_dim" + input: "self" + input: "dim" + output: "result_10" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "result_10" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "result" + name: "n0" + op_type: "Identity" + } + name: "thenGraph_5" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "dim" + output: "tmp_1" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp_1" + output: "tmp_2" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0_3" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0_3" + } + type: TENSOR + } + } + node { + input: "int64_0_3" + input: "tmp_2" + output: "int64_0_3_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_2" + input: "int64_0_3_cast" + output: "cond_4" + name: "n4" + op_type: "Equal" + } + node { + input: "cond_4" + output: "dim_8" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "int64_0_5" + name: "n0" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0_5" + } + type: TENSOR + } + } + node { + input: "dim" + input: "int64_0_5" + output: "dim_6" + name: "n1" + op_type: "Unsqueeze" + } + name: "thenGraph_8" + output { + name: "dim_6" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "dim" + output: "dim_7" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_8" + output { + name: "dim_7" + } + } + type: GRAPH + } + } + node { + input: "self" + input: "dim_8" + output: "result_9" + name: "n6" + op_type: "ReduceMean" + attribute { + name: "keepdims" + type: INT + ref_attr_name: "keepdim" + } + } + name: "elseGraph_5" + output { + name: "result_9" + } + } + type: GRAPH + } + } + doc_string: "mean.dim(Tensor self, int[1]? dim, bool keepdim=False, *, ScalarType? dtype=None) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "keepdim" + i: 0 + type: INT + } +} +functions { + name: "torch_nn_modules_pooling_AdaptiveAvgPool2d_global_pool_pool_1" + input: "copy_34" + output: "mean" + node { + output: "_val_1" + name: "Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377\376\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "copy_34" + input: "_val_1" + output: "mean" + name: "aten_mean_dim_2" + op_type: "aten_mean_dim" + attribute { + name: "keepdim" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "aten_view" + input: "self" + input: "size" + output: "return_val" + node { + input: "size" + output: "size_0" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "self" + input: "size_0" + output: "return_val" + name: "n1" + op_type: "Reshape" + } + doc_string: "view(Tensor(a) self, SymInt[] size) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "torch_nn_modules_flatten_Flatten_global_pool_flatten_1" + input: "mean" + output: "view" + node { + output: "_val_1" + name: "Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\000\005\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "mean" + input: "_val_1" + output: "view" + name: "aten_view_2" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "timm_layers_adaptive_avgmax_pool_SelectAdaptivePool2d_global_pool_1" + input: "copy_34" + output: "global_pool_flatten_1" + node { + input: "copy_34" + output: "global_pool_pool_1" + name: "torch_nn_modules_pooling_AdaptiveAvgPool2d_global_pool_pool_1_0" + op_type: "torch_nn_modules_pooling_AdaptiveAvgPool2d_global_pool_pool_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + node { + input: "global_pool_pool_1" + output: "global_pool_flatten_1" + name: "torch_nn_modules_flatten_Flatten_global_pool_flatten_1_1" + op_type: "torch_nn_modules_flatten_Flatten_global_pool_flatten_1" + domain: "pkg.torch.2.2.0a0+git63d65dd" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git63d65dd" + version: 1 + } + domain: "pkg.timm.0.9.7" +} +functions { + name: "aten_t" + input: "self" + output: "result_1" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "rank" + name: "n1" + op_type: "Size" + } + node { + output: "int64_2" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 2 + name: "int64_2" + } + type: TENSOR + } + } + node { + input: "int64_2" + input: "rank" + output: "int64_2_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "rank" + input: "int64_2_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "result_1" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "result" + name: "n0" + op_type: "Transpose" + attribute { + name: "perm" + ints: 1 + ints: 0 + type: INTS + } + } + name: "thenGraph_6" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "result_0" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_6" + output { + name: "result_0" + } + } + type: GRAPH + } + } + doc_string: "t(Tensor(a) self) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_addmm" + input: "self" + input: "mat1" + input: "mat2" + output: "return_val" + node { + input: "mat1" + input: "mat2" + input: "self" + output: "return_val" + name: "n0" + op_type: "Gemm" + attribute { + name: "alpha" + type: FLOAT + ref_attr_name: "alpha" + } + attribute { + name: "beta" + type: FLOAT + ref_attr_name: "beta" + } + } + doc_string: "addmm(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "beta" + f: 1.0 + type: FLOAT + } + attribute_proto { + name: "alpha" + f: 1.0 + type: FLOAT + } +} +functions { + name: "torch_nn_modules_linear_Linear_classifier_1" + input: "view" + input: "classifier.weight" + input: "classifier.bias" + output: "addmm" + node { + input: "classifier.weight" + output: "t" + name: "aten_t_0" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "classifier.bias" + input: "view" + input: "t" + output: "addmm" + name: "aten_addmm_1" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git63d65dd" +} +functions { + name: "Rank" + input: "input" + output: "return_val" + node { + input: "input" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "return_val" + name: "n1" + op_type: "Size" + } + doc_string: "Take the rank of the input tensor." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib.common" +} +functions { + name: "IsScalar" + input: "input" + output: "return_val" + node { + input: "input" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "tmp_1" + name: "n2" + op_type: "Constant" + attribute { + name: "value_int" + i: 0 + type: INT + } + } + node { + input: "tmp_0" + input: "tmp_1" + output: "return_val" + name: "n3" + op_type: "Equal" + } + doc_string: "Return whether the input has rank 0, or is a scalar." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib.common" +} diff --git a/testdata/e2e_models/resnet18/resnet18_dynamo.textproto b/testdata/e2e_models/resnet18/resnet18_dynamo.textproto new file mode 100644 index 00000000..d158910d --- /dev/null +++ b/testdata/e2e_models/resnet18/resnet18_dynamo.textproto @@ -0,0 +1,15021 @@ +ir_version: 8 +producer_name: "pytorch" +producer_version: "2.2.0" +graph { + node { + input: "l_x_" + input: "conv1.weight" + output: "conv1_1" + name: "torch_nn_modules_conv_Conv2d_conv1_1_1" + op_type: "torch_nn_modules_conv_Conv2d_conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "conv1_1" + input: "bn1.weight" + input: "bn1.bias" + input: "bn1.running_mean" + input: "bn1.running_var" + output: "bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_bn1_1_2" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "bn1_1" + output: "relu_1" + name: "torch_nn_modules_activation_ReLU_relu_1_3" + op_type: "torch_nn_modules_activation_ReLU_relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "relu_1" + output: "maxpool_1" + name: "torch_nn_modules_pooling_MaxPool2d_maxpool_1_4" + op_type: "torch_nn_modules_pooling_MaxPool2d_maxpool_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "maxpool_1" + input: "layer1.0.conv1.weight" + input: "layer1.0.bn1.weight" + input: "layer1.0.bn1.bias" + input: "layer1.0.bn1.running_mean" + input: "layer1.0.bn1.running_var" + input: "layer1.0.conv2.weight" + input: "layer1.0.bn2.weight" + input: "layer1.0.bn2.bias" + input: "layer1.0.bn2.running_mean" + input: "layer1.0.bn2.running_var" + input: "layer1.1.conv1.weight" + input: "layer1.1.bn1.weight" + input: "layer1.1.bn1.bias" + input: "layer1.1.bn1.running_mean" + input: "layer1.1.bn1.running_var" + input: "layer1.1.conv2.weight" + input: "layer1.1.bn2.weight" + input: "layer1.1.bn2.bias" + input: "layer1.1.bn2.running_mean" + input: "layer1.1.bn2.running_var" + output: "layer1_1" + name: "torch_nn_modules_container_Sequential_layer1_1_5" + op_type: "torch_nn_modules_container_Sequential_layer1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "layer1_1" + input: "layer2.0.conv1.weight" + input: "layer2.0.bn1.weight" + input: "layer2.0.bn1.bias" + input: "layer2.0.bn1.running_mean" + input: "layer2.0.bn1.running_var" + input: "layer2.0.conv2.weight" + input: "layer2.0.bn2.weight" + input: "layer2.0.bn2.bias" + input: "layer2.0.bn2.running_mean" + input: "layer2.0.bn2.running_var" + input: "layer2.0.downsample.0.weight" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + input: "layer2.1.conv1.weight" + input: "layer2.1.bn1.weight" + input: "layer2.1.bn1.bias" + input: "layer2.1.bn1.running_mean" + input: "layer2.1.bn1.running_var" + input: "layer2.1.conv2.weight" + input: "layer2.1.bn2.weight" + input: "layer2.1.bn2.bias" + input: "layer2.1.bn2.running_mean" + input: "layer2.1.bn2.running_var" + output: "layer2_1" + name: "torch_nn_modules_container_Sequential_layer2_1_6" + op_type: "torch_nn_modules_container_Sequential_layer2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "layer2_1" + input: "layer3.0.conv1.weight" + input: "layer3.0.bn1.weight" + input: "layer3.0.bn1.bias" + input: "layer3.0.bn1.running_mean" + input: "layer3.0.bn1.running_var" + input: "layer3.0.conv2.weight" + input: "layer3.0.bn2.weight" + input: "layer3.0.bn2.bias" + input: "layer3.0.bn2.running_mean" + input: "layer3.0.bn2.running_var" + input: "layer3.0.downsample.0.weight" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + input: "layer3.1.conv1.weight" + input: "layer3.1.bn1.weight" + input: "layer3.1.bn1.bias" + input: "layer3.1.bn1.running_mean" + input: "layer3.1.bn1.running_var" + input: "layer3.1.conv2.weight" + input: "layer3.1.bn2.weight" + input: "layer3.1.bn2.bias" + input: "layer3.1.bn2.running_mean" + input: "layer3.1.bn2.running_var" + output: "layer3_1" + name: "torch_nn_modules_container_Sequential_layer3_1_7" + op_type: "torch_nn_modules_container_Sequential_layer3_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "layer3_1" + input: "layer4.0.conv1.weight" + input: "layer4.0.bn1.weight" + input: "layer4.0.bn1.bias" + input: "layer4.0.bn1.running_mean" + input: "layer4.0.bn1.running_var" + input: "layer4.0.conv2.weight" + input: "layer4.0.bn2.weight" + input: "layer4.0.bn2.bias" + input: "layer4.0.bn2.running_mean" + input: "layer4.0.bn2.running_var" + input: "layer4.0.downsample.0.weight" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + input: "layer4.1.conv1.weight" + input: "layer4.1.bn1.weight" + input: "layer4.1.bn1.bias" + input: "layer4.1.bn1.running_mean" + input: "layer4.1.bn1.running_var" + input: "layer4.1.conv2.weight" + input: "layer4.1.bn2.weight" + input: "layer4.1.bn2.bias" + input: "layer4.1.bn2.running_mean" + input: "layer4.1.bn2.running_var" + output: "layer4_1" + name: "torch_nn_modules_container_Sequential_layer4_1_8" + op_type: "torch_nn_modules_container_Sequential_layer4_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "layer4_1" + output: "avgpool_1" + name: "torch_nn_modules_pooling_AdaptiveAvgPool2d_avgpool_1_9" + op_type: "torch_nn_modules_pooling_AdaptiveAvgPool2d_avgpool_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + output: "_val_110" + name: "Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "avgpool_1" + input: "_val_110" + output: "view" + name: "aten_view_11" + op_type: "aten_view" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "view" + input: "fc.weight" + input: "fc.bias" + output: "fc_1" + name: "torch_nn_modules_linear_Linear_fc_1_12" + op_type: "torch_nn_modules_linear_Linear_fc_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + name: "main_graph" + initializer { + dims: 64 + dims: 3 + dims: 7 + dims: 7 + data_type: 1 + name: "conv1.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "bn1.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "bn1.bias" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "bn1.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "bn1.running_var" + raw_data: "" + } + initializer { + dims: 64 + dims: 64 + dims: 3 + dims: 3 + data_type: 1 + name: "layer1.0.conv1.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 64 + dims: 64 + dims: 3 + dims: 3 + data_type: 1 + name: "layer1.0.conv2.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 64 + dims: 64 + dims: 3 + dims: 3 + data_type: 1 + name: "layer1.1.conv1.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 64 + dims: 64 + dims: 3 + dims: 3 + data_type: 1 + name: "layer1.1.conv2.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 64 + data_type: 1 + name: "layer1.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 128 + dims: 64 + dims: 3 + dims: 3 + data_type: 1 + name: "layer2.0.conv1.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 128 + dims: 128 + dims: 3 + dims: 3 + data_type: 1 + name: "layer2.0.conv2.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 128 + dims: 64 + dims: 1 + dims: 1 + data_type: 1 + name: "layer2.0.downsample.0.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.downsample.1.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.downsample.1.bias" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.downsample.1.running_mean" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.0.downsample.1.running_var" + raw_data: "" + } + initializer { + dims: 128 + dims: 128 + dims: 3 + dims: 3 + data_type: 1 + name: "layer2.1.conv1.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 128 + dims: 128 + dims: 3 + dims: 3 + data_type: 1 + name: "layer2.1.conv2.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 128 + data_type: 1 + name: "layer2.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 256 + dims: 128 + dims: 3 + dims: 3 + data_type: 1 + name: "layer3.0.conv1.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + dims: 3 + dims: 3 + data_type: 1 + name: "layer3.0.conv2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 256 + dims: 128 + dims: 1 + dims: 1 + data_type: 1 + name: "layer3.0.downsample.0.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.downsample.1.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.downsample.1.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.downsample.1.running_mean" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.0.downsample.1.running_var" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + dims: 3 + dims: 3 + data_type: 1 + name: "layer3.1.conv1.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 256 + dims: 256 + dims: 3 + dims: 3 + data_type: 1 + name: "layer3.1.conv2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 256 + data_type: 1 + name: "layer3.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 512 + dims: 256 + dims: 3 + dims: 3 + data_type: 1 + name: "layer4.0.conv1.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn1.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn1.bias" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn1.running_var" + raw_data: "" + } + initializer { + dims: 512 + dims: 512 + dims: 3 + dims: 3 + data_type: 1 + name: "layer4.0.conv2.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn2.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn2.bias" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.bn2.running_var" + raw_data: "" + } + initializer { + dims: 512 + dims: 256 + dims: 1 + dims: 1 + data_type: 1 + name: "layer4.0.downsample.0.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.downsample.1.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.downsample.1.bias" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.downsample.1.running_mean" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.0.downsample.1.running_var" + raw_data: "" + } + initializer { + dims: 512 + dims: 512 + dims: 3 + dims: 3 + data_type: 1 + name: "layer4.1.conv1.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn1.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn1.bias" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn1.running_mean" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn1.running_var" + raw_data: "" + } + initializer { + dims: 512 + dims: 512 + dims: 3 + dims: 3 + data_type: 1 + name: "layer4.1.conv2.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn2.weight" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn2.bias" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn2.running_mean" + raw_data: "" + } + initializer { + dims: 512 + data_type: 1 + name: "layer4.1.bn2.running_var" + raw_data: "" + } + initializer { + dims: 1000 + dims: 512 + data_type: 1 + name: "fc.weight" + raw_data: "" + } + initializer { + dims: 1000 + data_type: 1 + name: "fc.bias" + raw_data: "" + } + input { + name: "l_x_" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 224 + } + dim { + dim_value: 224 + } + } + } + } + } + output { + name: "fc_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1000 + } + } + } + } + } + value_info { + name: "conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 3 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 64 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer1.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 64 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer1.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 64 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer1.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + dim { + dim_value: 64 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer1.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer1.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 64 + } + } + } + } + } + value_info { + name: "layer2.0.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer2.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer2.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.downsample.0.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 64 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "layer2.0.downsample.1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.downsample.1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.downsample.1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.0.downsample.1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer2.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + dim { + dim_value: 128 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer2.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer2.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 128 + } + } + } + } + } + value_info { + name: "layer3.0.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 128 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer3.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer3.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.downsample.0.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 128 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "layer3.0.downsample.1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.downsample.1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.downsample.1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.0.downsample.1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer3.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + dim { + dim_value: 256 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer3.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer3.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 256 + } + } + } + } + } + value_info { + name: "layer4.0.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + dim { + dim_value: 256 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer4.0.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + dim { + dim_value: 512 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer4.0.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.downsample.0.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + dim { + dim_value: 256 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "layer4.0.downsample.1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.downsample.1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.downsample.1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.0.downsample.1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.conv1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + dim { + dim_value: 512 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer4.1.bn1.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.bn1.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.bn1.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.bn1.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.conv2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + dim { + dim_value: 512 + } + dim { + dim_value: 3 + } + dim { + dim_value: 3 + } + } + } + } + } + value_info { + name: "layer4.1.bn2.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.bn2.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.bn2.running_mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "layer4.1.bn2.running_var" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "fc.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1000 + } + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "fc.bias" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1000 + } + } + } + } + } + value_info { + name: "conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "maxpool_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "layer1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "layer2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "layer3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "layer4_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "avgpool_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "_val_110" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 2 + } + } + } + } + } + value_info { + name: "view" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_conv1_1/l_x_" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 3 + } + dim { + dim_value: 224 + } + dim { + dim_value: 224 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_conv1_1/convolution" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_bn1_1/convolution" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_bn1_1/_native_batch_norm_legit_no_training" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_bn1_1/_native_batch_norm_legit_no_training_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_bn1_1/_native_batch_norm_legit_no_training_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_relu_1/getitem" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_relu_1/relu" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_relu_1/copy" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_pooling_MaxPool2d_maxpool_1/copy" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 112 + } + dim { + dim_value: 112 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_pooling_MaxPool2d_maxpool_1/max_pool2d_with_indices" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_pooling_MaxPool2d_maxpool_1/max_pool2d_with_indices_1" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv1_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv1_1/convolution_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn1_1/convolution_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn1_1/_native_batch_norm_legit_no_training_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn1_1/_native_batch_norm_legit_no_training_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn1_1/_native_batch_norm_legit_no_training_1_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_1/getitem_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_1/relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_1/copy_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv2_1/copy_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv2_1/convolution_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn2_1/convolution_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn2_1/_native_batch_norm_legit_no_training_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn2_1/_native_batch_norm_legit_no_training_2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn2_1/_native_batch_norm_legit_no_training_2_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_2/add" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_2/relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_2/copy_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/getattr_l__self___layer1___0___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/getattr_l__self___layer1___0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/getattr_l__self___layer1___0___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/getattr_l__self___layer1___0___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/getattr_l__self___layer1___0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/add" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_0_1/getattr_l__self___layer1___0___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv1_1/copy_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv1_1/convolution_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn1_1/convolution_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn1_1/_native_batch_norm_legit_no_training_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn1_1/_native_batch_norm_legit_no_training_3_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn1_1/_native_batch_norm_legit_no_training_3_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_1/getitem_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_1/relu_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_1/copy_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv2_1/copy_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv2_1/convolution_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn2_1/convolution_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn2_1/_native_batch_norm_legit_no_training_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn2_1/_native_batch_norm_legit_no_training_4_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn2_1/_native_batch_norm_legit_no_training_4_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_2/add_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_2/relu_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_2/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/copy_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/getattr_l__self___layer1___1___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/getattr_l__self___layer1___1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/getattr_l__self___layer1___1___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/getattr_l__self___layer1___1___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/getattr_l__self___layer1___1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/add_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer1_1_1/getattr_l__self___layer1___1___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer1_1/getitem_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer1_1/layer1_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer1_1/layer1_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv1_1/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv1_1/convolution_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn1_1/convolution_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn1_1/_native_batch_norm_legit_no_training_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn1_1/_native_batch_norm_legit_no_training_5_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn1_1/_native_batch_norm_legit_no_training_5_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_1/getitem_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_1/relu_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_1/copy_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv2_1/copy_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv2_1/convolution_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn2_1/convolution_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn2_1/_native_batch_norm_legit_no_training_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn2_1/_native_batch_norm_legit_no_training_6_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn2_1/_native_batch_norm_legit_no_training_6_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___downsample_0_1/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___downsample_0_1/convolution_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___downsample_1_1/convolution_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___downsample_1_1/_native_batch_norm_legit_no_training_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___downsample_1_1/_native_batch_norm_legit_no_training_7_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___downsample_1_1/_native_batch_norm_legit_no_training_7_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer2___0___downsample_1/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer2___0___downsample_1/getattr_l__self___layer2___0___downsample_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer2___0___downsample_1/getattr_l__self___layer2___0___downsample_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_2/add_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_2/relu_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_2/copy_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/getattr_l__self___layer2___0___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/getattr_l__self___layer2___0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/getattr_l__self___layer2___0___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/getattr_l__self___layer2___0___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/getattr_l__self___layer2___0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/getattr_l__self___layer2___0___downsample_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/add_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_0_1/getattr_l__self___layer2___0___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv1_1/copy_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv1_1/convolution_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn1_1/convolution_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn1_1/_native_batch_norm_legit_no_training_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn1_1/_native_batch_norm_legit_no_training_8_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn1_1/_native_batch_norm_legit_no_training_8_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_1/getitem_26" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_1/relu_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_1/copy_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv2_1/copy_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv2_1/convolution_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn2_1/convolution_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn2_1/_native_batch_norm_legit_no_training_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn2_1/_native_batch_norm_legit_no_training_9_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn2_1/_native_batch_norm_legit_no_training_9_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_2/add_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_2/relu_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_2/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/copy_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/getattr_l__self___layer2___1___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/getattr_l__self___layer2___1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/getattr_l__self___layer2___1___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/getattr_l__self___layer2___1___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/getattr_l__self___layer2___1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/add_3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer2_1_1/getattr_l__self___layer2___1___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer2_1/copy_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 64 + } + dim { + dim_value: 56 + } + dim { + dim_value: 56 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer2_1/layer2_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer2_1/layer2_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv1_1/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv1_1/convolution_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn1_1/convolution_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn1_1/_native_batch_norm_legit_no_training_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn1_1/_native_batch_norm_legit_no_training_10_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn1_1/_native_batch_norm_legit_no_training_10_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_1/getitem_32" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_1/relu_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_1/copy_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv2_1/copy_9" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv2_1/convolution_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn2_1/convolution_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn2_1/_native_batch_norm_legit_no_training_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn2_1/_native_batch_norm_legit_no_training_11_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn2_1/_native_batch_norm_legit_no_training_11_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___downsample_0_1/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___downsample_0_1/convolution_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___downsample_1_1/convolution_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___downsample_1_1/_native_batch_norm_legit_no_training_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___downsample_1_1/_native_batch_norm_legit_no_training_12_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___downsample_1_1/_native_batch_norm_legit_no_training_12_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer3___0___downsample_1/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer3___0___downsample_1/getattr_l__self___layer3___0___downsample_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer3___0___downsample_1/getattr_l__self___layer3___0___downsample_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_2/add_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_2/relu_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_2/copy_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/getattr_l__self___layer3___0___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/getattr_l__self___layer3___0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/getattr_l__self___layer3___0___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/getattr_l__self___layer3___0___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/getattr_l__self___layer3___0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/getattr_l__self___layer3___0___downsample_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/add_4" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_0_1/getattr_l__self___layer3___0___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv1_1/copy_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv1_1/convolution_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn1_1/convolution_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn1_1/_native_batch_norm_legit_no_training_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn1_1/_native_batch_norm_legit_no_training_13_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn1_1/_native_batch_norm_legit_no_training_13_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_1/getitem_41" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_1/relu_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_1/copy_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv2_1/copy_11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv2_1/convolution_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn2_1/convolution_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn2_1/_native_batch_norm_legit_no_training_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn2_1/_native_batch_norm_legit_no_training_14_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn2_1/_native_batch_norm_legit_no_training_14_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_2/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_2/relu_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_2/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/copy_10" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/getattr_l__self___layer3___1___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/getattr_l__self___layer3___1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/getattr_l__self___layer3___1___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/getattr_l__self___layer3___1___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/getattr_l__self___layer3___1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/add_5" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer3_1_1/getattr_l__self___layer3___1___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer3_1/copy_8" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 128 + } + dim { + dim_value: 28 + } + dim { + dim_value: 28 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer3_1/layer3_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer3_1/layer3_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv1_1/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv1_1/convolution_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn1_1/convolution_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn1_1/_native_batch_norm_legit_no_training_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn1_1/_native_batch_norm_legit_no_training_15_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn1_1/_native_batch_norm_legit_no_training_15_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_1/getitem_47" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_1/relu_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_1/copy_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv2_1/copy_13" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv2_1/convolution_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn2_1/convolution_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn2_1/_native_batch_norm_legit_no_training_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn2_1/_native_batch_norm_legit_no_training_16_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn2_1/_native_batch_norm_legit_no_training_16_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___downsample_0_1/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___downsample_0_1/convolution_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___downsample_1_1/convolution_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___downsample_1_1/_native_batch_norm_legit_no_training_17" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___downsample_1_1/_native_batch_norm_legit_no_training_17_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___downsample_1_1/_native_batch_norm_legit_no_training_17_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer4___0___downsample_1/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer4___0___downsample_1/getattr_l__self___layer4___0___downsample_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_getattr_L__self___layer4___0___downsample_1/getattr_l__self___layer4___0___downsample_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_2/add_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_2/relu_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_2/copy_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/getattr_l__self___layer4___0___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/getattr_l__self___layer4___0___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/getattr_l__self___layer4___0___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/getattr_l__self___layer4___0___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/getattr_l__self___layer4___0___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/getattr_l__self___layer4___0___downsample_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/add_6" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_0_1/getattr_l__self___layer4___0___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv1_1/copy_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv1_1/convolution_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn1_1/convolution_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn1_1/_native_batch_norm_legit_no_training_18" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn1_1/_native_batch_norm_legit_no_training_18_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn1_1/_native_batch_norm_legit_no_training_18_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_1/getitem_56" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_1/relu_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_1/copy_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv2_1/copy_15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv2_1/convolution_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn2_1/convolution_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn2_1/_native_batch_norm_legit_no_training_19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn2_1/_native_batch_norm_legit_no_training_19_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn2_1/_native_batch_norm_legit_no_training_19_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 0 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_2/add_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_2/relu_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_2/copy_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/copy_14" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/getattr_l__self___layer4___1___conv1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/getattr_l__self___layer4___1___bn1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/getattr_l__self___layer4___1___relu_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/getattr_l__self___layer4___1___conv2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/getattr_l__self___layer4___1___bn2_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/add_7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torchvision.0.16.0a0+0370134::torchvision_models_resnet_BasicBlock_layer4_1_1/getattr_l__self___layer4___1___relu_2" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer4_1/copy_12" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 256 + } + dim { + dim_value: 14 + } + dim { + dim_value: 14 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer4_1/layer4_0_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_container_Sequential_layer4_1/layer4_1_1" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_pooling_AdaptiveAvgPool2d_avgpool_1/copy_16" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 7 + } + dim { + dim_value: 7 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_pooling_AdaptiveAvgPool2d_avgpool_1/mean" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_linear_Linear_fc_1/view" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 512 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_linear_Linear_fc_1/t" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 512 + } + dim { + dim_value: 1000 + } + } + } + } + } + value_info { + name: "pkg.torch.2.2.0a0+git4ab5507::torch_nn_modules_linear_Linear_fc_1/addmm" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1000 + } + } + } + } + } +} +opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 +} +opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 +} +opset_import { + domain: "pkg.torchvision.0.16.0a0+0370134" + version: 1 +} +opset_import { + domain: "" + version: 18 +} +opset_import { + domain: "pkg.onnxscript.torch_lib.common" + version: 1 +} +functions { + name: "_aten_convolution_onnx" + input: "input" + input: "weight" + input: "bias" + input: "transposed" + output: "result_12" + attribute: "strides" + attribute: "pads" + attribute: "dilations" + node { + input: "weight" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "weight_size" + name: "n1" + op_type: "Size" + } + node { + input: "input" + output: "tmp_0" + name: "n2" + op_type: "Shape" + } + node { + input: "tmp_0" + output: "tmp_1" + name: "n3" + op_type: "Size" + } + node { + input: "tmp_1" + input: "weight_size" + output: "tmp_2" + name: "n4" + op_type: "Equal" + } + node { + input: "tmp_2" + output: "no_batch" + name: "n5" + op_type: "Not" + } + node { + input: "no_batch" + output: "input_6" + name: "n6" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_3" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "input" + input: "tmp_3" + output: "input_4" + name: "n1" + op_type: "Unsqueeze" + } + name: "thenGraph_23" + output { + name: "input_4" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "input" + output: "input_5" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_23" + output { + name: "input_5" + } + } + type: GRAPH + } + } + node { + input: "transposed" + output: "result_8" + name: "n7" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "input_6" + input: "weight" + input: "bias" + output: "result" + name: "n0" + op_type: "ConvTranspose" + attribute { + name: "dilations" + type: INTS + ref_attr_name: "dilations" + } + attribute { + name: "group" + type: INT + ref_attr_name: "groups" + } + attribute { + name: "output_padding" + type: INTS + ref_attr_name: "output_padding" + } + attribute { + name: "pads" + type: INTS + ref_attr_name: "pads" + } + attribute { + name: "strides" + type: INTS + ref_attr_name: "strides" + } + } + name: "thenGraph_26" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "input_6" + input: "weight" + input: "bias" + output: "result_7" + name: "n0" + op_type: "Conv" + attribute { + name: "dilations" + type: INTS + ref_attr_name: "dilations" + } + attribute { + name: "group" + type: INT + ref_attr_name: "groups" + } + attribute { + name: "pads" + type: INTS + ref_attr_name: "pads" + } + attribute { + name: "strides" + type: INTS + ref_attr_name: "strides" + } + } + name: "elseGraph_26" + output { + name: "result_7" + } + } + type: GRAPH + } + } + node { + input: "no_batch" + output: "result_12" + name: "n8" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_9" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "result_8" + input: "tmp_9" + output: "result_10" + name: "n1" + op_type: "Squeeze" + } + name: "thenGraph_48" + output { + name: "result_10" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "result_8" + output: "result_11" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_48" + output { + name: "result_11" + } + } + type: GRAPH + } + } + doc_string: "ConvXd with attributes pre-computed to fit the ONNX spec." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "output_padding" + ints: 0 + type: INTS + } + attribute_proto { + name: "groups" + i: 1 + type: INT + } +} +functions { + name: "torch_nn_modules_conv_Conv2d_conv1_1" + input: "l_x_" + input: "conv1.weight" + output: "convolution" + node { + input: "conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "l_x_" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "l_x_" + input: "conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 3 + ints: 3 + ints: 3 + ints: 3 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "_aten_native_batch_norm_inference_onnx" + input: "input" + input: "weight" + input: "bias" + input: "running_mean" + input: "running_var" + output: "norm" + output: "empty_mean" + output: "empty_var" + attribute: "training" + attribute: "momentum" + attribute: "eps" + node { + input: "input" + input: "weight" + input: "bias" + input: "running_mean" + input: "running_var" + output: "norm" + name: "n0" + op_type: "BatchNormalization" + attribute { + name: "epsilon" + type: FLOAT + ref_attr_name: "eps" + } + attribute { + name: "momentum" + type: FLOAT + ref_attr_name: "momentum" + } + attribute { + name: "training_mode" + type: INT + ref_attr_name: "training" + } + } + node { + input: "input" + output: "tmp" + name: "n1" + op_type: "Shape" + attribute { + name: "end" + i: 0 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + input: "tmp" + output: "empty_mean" + name: "n2" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "input" + output: "tmp_0" + name: "n3" + op_type: "Shape" + attribute { + name: "end" + i: 0 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + input: "tmp_0" + output: "empty_var" + name: "n4" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_bn1_1" + input: "convolution" + input: "bn1.weight" + input: "bn1.bias" + input: "bn1.running_mean" + input: "bn1.running_var" + output: "_native_batch_norm_legit_no_training" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution" + input: "bn1.weight" + input: "bn1.bias" + input: "bn1.running_mean" + input: "bn1.running_var" + output: "_native_batch_norm_legit_no_training" + output: "_native_batch_norm_legit_no_training_1" + output: "_native_batch_norm_legit_no_training_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "aten_relu" + input: "self" + output: "return_val" + node { + input: "self" + output: "return_val" + name: "n0" + op_type: "Relu" + } + doc_string: "relu(Tensor self) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_copy" + input: "self" + input: "src" + output: "self_0" + node { + input: "src" + output: "self_0" + name: "n0" + op_type: "Identity" + } + doc_string: "copy(Tensor self, Tensor src, bool non_blocking=False) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "non_blocking" + i: 0 + type: INT + } +} +functions { + name: "torch_nn_modules_activation_ReLU_relu_1" + input: "getitem" + output: "copy" + node { + input: "getitem" + output: "relu" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem" + input: "relu" + output: "copy" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "_aten_max_pool_with_indices_onnx" + input: "self" + output: "pool_result_13" + output: "indices_12" + attribute: "kernel_size" + attribute: "stride" + attribute: "padding" + attribute: "dilation" + attribute: "ceil_mode" + attribute: "unbatched_rank" + attribute: "n_dims_one" + attribute: "n_dims_zero" + attribute: "n_dims_axes" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "self_rank" + name: "n1" + op_type: "Size" + } + node { + output: "unbatched_rank" + name: "n2" + op_type: "Constant" + attribute { + name: "value_int" + type: INT + ref_attr_name: "unbatched_rank" + } + } + node { + input: "unbatched_rank" + input: "self_rank" + output: "unbatched_rank_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "self_rank" + input: "unbatched_rank_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "self_2" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "int64_0" + name: "n0" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "self" + input: "int64_0" + output: "self_0" + name: "n1" + op_type: "Unsqueeze" + } + name: "thenGraph_15" + output { + name: "self_0" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "self_1" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_15" + output { + name: "self_1" + } + } + type: GRAPH + } + } + node { + input: "self_2" + output: "pool_result" + output: "indices" + name: "n6" + op_type: "MaxPool" + attribute { + name: "ceil_mode" + type: INT + ref_attr_name: "ceil_mode" + } + attribute { + name: "dilations" + type: INTS + ref_attr_name: "dilation" + } + attribute { + name: "kernel_shape" + type: INTS + ref_attr_name: "kernel_size" + } + attribute { + name: "pads" + type: INTS + ref_attr_name: "padding" + } + attribute { + name: "strides" + type: INTS + ref_attr_name: "stride" + } + } + node { + input: "self_2" + output: "_" + output: "flatten_indices" + name: "n7" + op_type: "MaxPool" + attribute { + name: "dilations" + type: INTS + ref_attr_name: "dilation" + } + attribute { + name: "kernel_shape" + type: INTS + ref_attr_name: "n_dims_one" + } + attribute { + name: "strides" + type: INTS + ref_attr_name: "n_dims_one" + } + } + node { + output: "ends" + name: "n8" + op_type: "Constant" + attribute { + name: "value_ints" + type: INTS + ref_attr_name: "n_dims_one" + } + } + node { + output: "starts" + name: "n9" + op_type: "Constant" + attribute { + name: "value_ints" + type: INTS + ref_attr_name: "n_dims_zero" + } + } + node { + output: "axes" + name: "n10" + op_type: "Constant" + attribute { + name: "value_ints" + type: INTS + ref_attr_name: "n_dims_axes" + } + } + node { + input: "flatten_indices" + input: "starts" + input: "ends" + input: "axes" + output: "delta" + name: "n11" + op_type: "Slice" + } + node { + input: "indices" + input: "delta" + output: "indices_3" + name: "n12" + op_type: "Sub" + } + node { + output: "unbatched_rank_4" + name: "n13" + op_type: "Constant" + attribute { + name: "value_int" + type: INT + ref_attr_name: "unbatched_rank" + } + } + node { + input: "unbatched_rank_4" + input: "self_rank" + output: "unbatched_rank_4_cast" + name: "n14" + op_type: "CastLike" + } + node { + input: "self_rank" + input: "unbatched_rank_4_cast" + output: "cond_5" + name: "n15" + op_type: "Equal" + } + node { + input: "cond_5" + output: "indices_12" + output: "pool_result_13" + name: "n16" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "tmp_6" + name: "n0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "pool_result" + input: "tmp_6" + output: "pool_result_7" + name: "n1" + op_type: "Squeeze" + } + node { + output: "tmp_8" + name: "n2" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + type: INTS + } + } + node { + input: "indices_3" + input: "tmp_8" + output: "indices_9" + name: "n3" + op_type: "Squeeze" + } + name: "thenGraph_70" + output { + name: "indices_9" + } + output { + name: "pool_result_7" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "indices_3" + output: "indices_10" + name: "n0" + op_type: "Identity" + } + node { + input: "pool_result" + output: "pool_result_11" + name: "n1" + op_type: "Identity" + } + name: "elseGraph_70" + output { + name: "indices_10" + } + output { + name: "pool_result_11" + } + } + type: GRAPH + } + } + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "torch_nn_modules_pooling_MaxPool2d_maxpool_1" + input: "copy" + output: "max_pool2d_with_indices" + node { + input: "copy" + output: "max_pool2d_with_indices" + output: "max_pool2d_with_indices_1" + name: "_aten_max_pool_with_indices_onnx_0" + op_type: "_aten_max_pool_with_indices_onnx" + attribute { + name: "ceil_mode" + i: 0 + type: INT + } + attribute { + name: "dilation" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "kernel_size" + ints: 3 + ints: 3 + type: INTS + } + attribute { + name: "n_dims_axes" + ints: 2 + ints: 3 + type: INTS + } + attribute { + name: "n_dims_one" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "n_dims_zero" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "padding" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "stride" + ints: 2 + ints: 2 + type: INTS + } + attribute { + name: "unbatched_rank" + i: 3 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv1_1" + input: "getitem_3" + input: "layer1.0.conv1.weight" + output: "convolution_1" + node { + input: "layer1.0.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "getitem_3" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "getitem_3" + input: "layer1.0.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_1" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn1_1" + input: "convolution_1" + input: "layer1.0.bn1.weight" + input: "layer1.0.bn1.bias" + input: "layer1.0.bn1.running_mean" + input: "layer1.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_1" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_1" + input: "layer1.0.bn1.weight" + input: "layer1.0.bn1.bias" + input: "layer1.0.bn1.running_mean" + input: "layer1.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_1" + output: "_native_batch_norm_legit_no_training_1_1" + output: "_native_batch_norm_legit_no_training_1_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_1" + input: "getitem_5" + output: "copy_1" + node { + input: "getitem_5" + output: "relu_1" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_5" + input: "relu_1" + output: "copy_1" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv2_1" + input: "copy_1" + input: "layer1.0.conv2.weight" + output: "convolution_2" + node { + input: "layer1.0.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_1" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_1" + input: "layer1.0.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_2" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn2_1" + input: "convolution_2" + input: "layer1.0.bn2.weight" + input: "layer1.0.bn2.bias" + input: "layer1.0.bn2.running_mean" + input: "layer1.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_2" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_2" + input: "layer1.0.bn2.weight" + input: "layer1.0.bn2.bias" + input: "layer1.0.bn2.running_mean" + input: "layer1.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_2" + output: "_native_batch_norm_legit_no_training_2_1" + output: "_native_batch_norm_legit_no_training_2_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_2" + input: "add" + output: "copy_2" + node { + input: "add" + output: "relu_2" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add" + input: "relu_2" + output: "copy_2" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "aten_add" + input: "self" + input: "other" + output: "return_val" + node { + output: "alpha" + name: "n0" + op_type: "Constant" + attribute { + name: "value_float" + type: FLOAT + ref_attr_name: "alpha" + } + } + node { + input: "alpha" + input: "other" + output: "alpha_0" + name: "n1" + op_type: "CastLike" + } + node { + input: "other" + input: "alpha_0" + output: "other_1" + name: "n2" + op_type: "Mul" + } + node { + input: "self" + input: "other_1" + output: "return_val" + name: "n3" + op_type: "Add" + } + doc_string: "add.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "alpha" + f: 1.0 + type: FLOAT + } +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer1_0_1" + input: "getitem_3" + input: "layer1.0.conv1.weight" + input: "layer1.0.bn1.weight" + input: "layer1.0.bn1.bias" + input: "layer1.0.bn1.running_mean" + input: "layer1.0.bn1.running_var" + input: "layer1.0.conv2.weight" + input: "layer1.0.bn2.weight" + input: "layer1.0.bn2.bias" + input: "layer1.0.bn2.running_mean" + input: "layer1.0.bn2.running_var" + output: "getattr_l__self___layer1___0___relu_2" + node { + input: "getitem_3" + input: "layer1.0.conv1.weight" + output: "getattr_l__self___layer1___0___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___0___conv1_1" + input: "layer1.0.bn1.weight" + input: "layer1.0.bn1.bias" + input: "layer1.0.bn1.running_mean" + input: "layer1.0.bn1.running_var" + output: "getattr_l__self___layer1___0___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___0___bn1_1" + output: "getattr_l__self___layer1___0___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___0___relu_1" + input: "layer1.0.conv2.weight" + output: "getattr_l__self___layer1___0___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___0___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___0___conv2_1" + input: "layer1.0.bn2.weight" + input: "layer1.0.bn2.bias" + input: "layer1.0.bn2.running_mean" + input: "layer1.0.bn2.running_var" + output: "getattr_l__self___layer1___0___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___0___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___0___bn2_1" + input: "getitem_3" + output: "add" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add" + output: "getattr_l__self___layer1___0___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_2_6" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___0___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv1_1" + input: "copy_2" + input: "layer1.1.conv1.weight" + output: "convolution_3" + node { + input: "layer1.1.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_2" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_2" + input: "layer1.1.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_3" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn1_1" + input: "convolution_3" + input: "layer1.1.bn1.weight" + input: "layer1.1.bn1.bias" + input: "layer1.1.bn1.running_mean" + input: "layer1.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_3" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_3" + input: "layer1.1.bn1.weight" + input: "layer1.1.bn1.bias" + input: "layer1.1.bn1.running_mean" + input: "layer1.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_3" + output: "_native_batch_norm_legit_no_training_3_1" + output: "_native_batch_norm_legit_no_training_3_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_1" + input: "getitem_11" + output: "copy_3" + node { + input: "getitem_11" + output: "relu_3" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_11" + input: "relu_3" + output: "copy_3" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv2_1" + input: "copy_3" + input: "layer1.1.conv2.weight" + output: "convolution_4" + node { + input: "layer1.1.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_3" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_3" + input: "layer1.1.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_4" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn2_1" + input: "convolution_4" + input: "layer1.1.bn2.weight" + input: "layer1.1.bn2.bias" + input: "layer1.1.bn2.running_mean" + input: "layer1.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_4" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_4" + input: "layer1.1.bn2.weight" + input: "layer1.1.bn2.bias" + input: "layer1.1.bn2.running_mean" + input: "layer1.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_4" + output: "_native_batch_norm_legit_no_training_4_1" + output: "_native_batch_norm_legit_no_training_4_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_2" + input: "add_1" + output: "copy_4" + node { + input: "add_1" + output: "relu_4" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_1" + input: "relu_4" + output: "copy_4" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer1_1_1" + input: "copy_2" + input: "layer1.1.conv1.weight" + input: "layer1.1.bn1.weight" + input: "layer1.1.bn1.bias" + input: "layer1.1.bn1.running_mean" + input: "layer1.1.bn1.running_var" + input: "layer1.1.conv2.weight" + input: "layer1.1.bn2.weight" + input: "layer1.1.bn2.bias" + input: "layer1.1.bn2.running_mean" + input: "layer1.1.bn2.running_var" + output: "getattr_l__self___layer1___1___relu_2" + node { + input: "copy_2" + input: "layer1.1.conv1.weight" + output: "getattr_l__self___layer1___1___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___1___conv1_1" + input: "layer1.1.bn1.weight" + input: "layer1.1.bn1.bias" + input: "layer1.1.bn1.running_mean" + input: "layer1.1.bn1.running_var" + output: "getattr_l__self___layer1___1___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___1___bn1_1" + output: "getattr_l__self___layer1___1___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___1___relu_1" + input: "layer1.1.conv2.weight" + output: "getattr_l__self___layer1___1___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer1___1___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___1___conv2_1" + input: "layer1.1.bn2.weight" + input: "layer1.1.bn2.bias" + input: "layer1.1.bn2.running_mean" + input: "layer1.1.bn2.running_var" + output: "getattr_l__self___layer1___1___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer1___1___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer1___1___bn2_1" + input: "copy_2" + output: "add_1" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_1" + output: "getattr_l__self___layer1___1___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_2_6" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer1___1___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_container_Sequential_layer1_1" + input: "getitem_3" + input: "layer1.0.conv1.weight" + input: "layer1.0.bn1.weight" + input: "layer1.0.bn1.bias" + input: "layer1.0.bn1.running_mean" + input: "layer1.0.bn1.running_var" + input: "layer1.0.conv2.weight" + input: "layer1.0.bn2.weight" + input: "layer1.0.bn2.bias" + input: "layer1.0.bn2.running_mean" + input: "layer1.0.bn2.running_var" + input: "layer1.1.conv1.weight" + input: "layer1.1.bn1.weight" + input: "layer1.1.bn1.bias" + input: "layer1.1.bn1.running_mean" + input: "layer1.1.bn1.running_var" + input: "layer1.1.conv2.weight" + input: "layer1.1.bn2.weight" + input: "layer1.1.bn2.bias" + input: "layer1.1.bn2.running_mean" + input: "layer1.1.bn2.running_var" + output: "layer1_1_1" + node { + input: "getitem_3" + input: "layer1.0.conv1.weight" + input: "layer1.0.bn1.weight" + input: "layer1.0.bn1.bias" + input: "layer1.0.bn1.running_mean" + input: "layer1.0.bn1.running_var" + input: "layer1.0.conv2.weight" + input: "layer1.0.bn2.weight" + input: "layer1.0.bn2.bias" + input: "layer1.0.bn2.running_mean" + input: "layer1.0.bn2.running_var" + output: "layer1_0_1" + name: "torchvision_models_resnet_BasicBlock_layer1_0_1_0" + op_type: "torchvision_models_resnet_BasicBlock_layer1_0_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + node { + input: "layer1_0_1" + input: "layer1.1.conv1.weight" + input: "layer1.1.bn1.weight" + input: "layer1.1.bn1.bias" + input: "layer1.1.bn1.running_mean" + input: "layer1.1.bn1.running_var" + input: "layer1.1.conv2.weight" + input: "layer1.1.bn2.weight" + input: "layer1.1.bn2.bias" + input: "layer1.1.bn2.running_mean" + input: "layer1.1.bn2.running_var" + output: "layer1_1_1" + name: "torchvision_models_resnet_BasicBlock_layer1_1_1_1" + op_type: "torchvision_models_resnet_BasicBlock_layer1_1_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torchvision.0.16.0a0+0370134" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv1_1" + input: "copy_4" + input: "layer2.0.conv1.weight" + output: "convolution_5" + node { + input: "layer2.0.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_4" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_4" + input: "layer2.0.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_5" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn1_1" + input: "convolution_5" + input: "layer2.0.bn1.weight" + input: "layer2.0.bn1.bias" + input: "layer2.0.bn1.running_mean" + input: "layer2.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_5" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_5" + input: "layer2.0.bn1.weight" + input: "layer2.0.bn1.bias" + input: "layer2.0.bn1.running_mean" + input: "layer2.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_5" + output: "_native_batch_norm_legit_no_training_5_1" + output: "_native_batch_norm_legit_no_training_5_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_1" + input: "getitem_17" + output: "copy_5" + node { + input: "getitem_17" + output: "relu_5" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_17" + input: "relu_5" + output: "copy_5" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv2_1" + input: "copy_5" + input: "layer2.0.conv2.weight" + output: "convolution_6" + node { + input: "layer2.0.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_5" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_5" + input: "layer2.0.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_6" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn2_1" + input: "convolution_6" + input: "layer2.0.bn2.weight" + input: "layer2.0.bn2.bias" + input: "layer2.0.bn2.running_mean" + input: "layer2.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_6" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_6" + input: "layer2.0.bn2.weight" + input: "layer2.0.bn2.bias" + input: "layer2.0.bn2.running_mean" + input: "layer2.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_6" + output: "_native_batch_norm_legit_no_training_6_1" + output: "_native_batch_norm_legit_no_training_6_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___downsample_0_1" + input: "copy_4" + input: "layer2.0.downsample.0.weight" + output: "convolution_7" + node { + input: "layer2.0.downsample.0.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_4" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_4" + input: "layer2.0.downsample.0.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_7" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___downsample_1_1" + input: "convolution_7" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + output: "_native_batch_norm_legit_no_training_7" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_7" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + output: "_native_batch_norm_legit_no_training_7" + output: "_native_batch_norm_legit_no_training_7_1" + output: "_native_batch_norm_legit_no_training_7_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_container_Sequential_getattr_L__self___layer2___0___downsample_1" + input: "copy_4" + input: "layer2.0.downsample.0.weight" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + output: "getattr_l__self___layer2___0___downsample_1_1" + node { + input: "copy_4" + input: "layer2.0.downsample.0.weight" + output: "getattr_l__self___layer2___0___downsample_0_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___downsample_0_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___downsample_0_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___0___downsample_0_1" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + output: "getattr_l__self___layer2___0___downsample_1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___downsample_1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___downsample_1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_2" + input: "add_2" + output: "copy_6" + node { + input: "add_2" + output: "relu_6" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_2" + input: "relu_6" + output: "copy_6" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer2_0_1" + input: "copy_4" + input: "layer2.0.conv1.weight" + input: "layer2.0.bn1.weight" + input: "layer2.0.bn1.bias" + input: "layer2.0.bn1.running_mean" + input: "layer2.0.bn1.running_var" + input: "layer2.0.conv2.weight" + input: "layer2.0.bn2.weight" + input: "layer2.0.bn2.bias" + input: "layer2.0.bn2.running_mean" + input: "layer2.0.bn2.running_var" + input: "layer2.0.downsample.0.weight" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + output: "getattr_l__self___layer2___0___relu_2" + node { + input: "copy_4" + input: "layer2.0.conv1.weight" + output: "getattr_l__self___layer2___0___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___0___conv1_1" + input: "layer2.0.bn1.weight" + input: "layer2.0.bn1.bias" + input: "layer2.0.bn1.running_mean" + input: "layer2.0.bn1.running_var" + output: "getattr_l__self___layer2___0___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___0___bn1_1" + output: "getattr_l__self___layer2___0___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___0___relu_1" + input: "layer2.0.conv2.weight" + output: "getattr_l__self___layer2___0___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___0___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___0___conv2_1" + input: "layer2.0.bn2.weight" + input: "layer2.0.bn2.bias" + input: "layer2.0.bn2.running_mean" + input: "layer2.0.bn2.running_var" + output: "getattr_l__self___layer2___0___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___0___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "copy_4" + input: "layer2.0.downsample.0.weight" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + output: "getattr_l__self___layer2___0___downsample_1" + name: "torch_nn_modules_container_Sequential_getattr_L__self___layer2___0___downsample_1_5" + op_type: "torch_nn_modules_container_Sequential_getattr_L__self___layer2___0___downsample_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___0___bn2_1" + input: "getattr_l__self___layer2___0___downsample_1" + output: "add_2" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_2" + output: "getattr_l__self___layer2___0___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_2_7" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___0___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv1_1" + input: "copy_6" + input: "layer2.1.conv1.weight" + output: "convolution_8" + node { + input: "layer2.1.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_6" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_6" + input: "layer2.1.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_8" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn1_1" + input: "convolution_8" + input: "layer2.1.bn1.weight" + input: "layer2.1.bn1.bias" + input: "layer2.1.bn1.running_mean" + input: "layer2.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_8" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_8" + input: "layer2.1.bn1.weight" + input: "layer2.1.bn1.bias" + input: "layer2.1.bn1.running_mean" + input: "layer2.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_8" + output: "_native_batch_norm_legit_no_training_8_1" + output: "_native_batch_norm_legit_no_training_8_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_1" + input: "getitem_26" + output: "copy_7" + node { + input: "getitem_26" + output: "relu_7" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_26" + input: "relu_7" + output: "copy_7" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv2_1" + input: "copy_7" + input: "layer2.1.conv2.weight" + output: "convolution_9" + node { + input: "layer2.1.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_7" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_7" + input: "layer2.1.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_9" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn2_1" + input: "convolution_9" + input: "layer2.1.bn2.weight" + input: "layer2.1.bn2.bias" + input: "layer2.1.bn2.running_mean" + input: "layer2.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_9" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_9" + input: "layer2.1.bn2.weight" + input: "layer2.1.bn2.bias" + input: "layer2.1.bn2.running_mean" + input: "layer2.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_9" + output: "_native_batch_norm_legit_no_training_9_1" + output: "_native_batch_norm_legit_no_training_9_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_2" + input: "add_3" + output: "copy_8" + node { + input: "add_3" + output: "relu_8" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_3" + input: "relu_8" + output: "copy_8" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer2_1_1" + input: "copy_6" + input: "layer2.1.conv1.weight" + input: "layer2.1.bn1.weight" + input: "layer2.1.bn1.bias" + input: "layer2.1.bn1.running_mean" + input: "layer2.1.bn1.running_var" + input: "layer2.1.conv2.weight" + input: "layer2.1.bn2.weight" + input: "layer2.1.bn2.bias" + input: "layer2.1.bn2.running_mean" + input: "layer2.1.bn2.running_var" + output: "getattr_l__self___layer2___1___relu_2" + node { + input: "copy_6" + input: "layer2.1.conv1.weight" + output: "getattr_l__self___layer2___1___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___1___conv1_1" + input: "layer2.1.bn1.weight" + input: "layer2.1.bn1.bias" + input: "layer2.1.bn1.running_mean" + input: "layer2.1.bn1.running_var" + output: "getattr_l__self___layer2___1___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___1___bn1_1" + output: "getattr_l__self___layer2___1___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___1___relu_1" + input: "layer2.1.conv2.weight" + output: "getattr_l__self___layer2___1___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer2___1___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___1___conv2_1" + input: "layer2.1.bn2.weight" + input: "layer2.1.bn2.bias" + input: "layer2.1.bn2.running_mean" + input: "layer2.1.bn2.running_var" + output: "getattr_l__self___layer2___1___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer2___1___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer2___1___bn2_1" + input: "copy_6" + output: "add_3" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_3" + output: "getattr_l__self___layer2___1___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_2_6" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer2___1___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_container_Sequential_layer2_1" + input: "copy_4" + input: "layer2.0.conv1.weight" + input: "layer2.0.bn1.weight" + input: "layer2.0.bn1.bias" + input: "layer2.0.bn1.running_mean" + input: "layer2.0.bn1.running_var" + input: "layer2.0.conv2.weight" + input: "layer2.0.bn2.weight" + input: "layer2.0.bn2.bias" + input: "layer2.0.bn2.running_mean" + input: "layer2.0.bn2.running_var" + input: "layer2.0.downsample.0.weight" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + input: "layer2.1.conv1.weight" + input: "layer2.1.bn1.weight" + input: "layer2.1.bn1.bias" + input: "layer2.1.bn1.running_mean" + input: "layer2.1.bn1.running_var" + input: "layer2.1.conv2.weight" + input: "layer2.1.bn2.weight" + input: "layer2.1.bn2.bias" + input: "layer2.1.bn2.running_mean" + input: "layer2.1.bn2.running_var" + output: "layer2_1_1" + node { + input: "copy_4" + input: "layer2.0.conv1.weight" + input: "layer2.0.bn1.weight" + input: "layer2.0.bn1.bias" + input: "layer2.0.bn1.running_mean" + input: "layer2.0.bn1.running_var" + input: "layer2.0.conv2.weight" + input: "layer2.0.bn2.weight" + input: "layer2.0.bn2.bias" + input: "layer2.0.bn2.running_mean" + input: "layer2.0.bn2.running_var" + input: "layer2.0.downsample.0.weight" + input: "layer2.0.downsample.1.weight" + input: "layer2.0.downsample.1.bias" + input: "layer2.0.downsample.1.running_mean" + input: "layer2.0.downsample.1.running_var" + output: "layer2_0_1" + name: "torchvision_models_resnet_BasicBlock_layer2_0_1_0" + op_type: "torchvision_models_resnet_BasicBlock_layer2_0_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + node { + input: "layer2_0_1" + input: "layer2.1.conv1.weight" + input: "layer2.1.bn1.weight" + input: "layer2.1.bn1.bias" + input: "layer2.1.bn1.running_mean" + input: "layer2.1.bn1.running_var" + input: "layer2.1.conv2.weight" + input: "layer2.1.bn2.weight" + input: "layer2.1.bn2.bias" + input: "layer2.1.bn2.running_mean" + input: "layer2.1.bn2.running_var" + output: "layer2_1_1" + name: "torchvision_models_resnet_BasicBlock_layer2_1_1_1" + op_type: "torchvision_models_resnet_BasicBlock_layer2_1_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torchvision.0.16.0a0+0370134" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv1_1" + input: "copy_8" + input: "layer3.0.conv1.weight" + output: "convolution_10" + node { + input: "layer3.0.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_8" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_8" + input: "layer3.0.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_10" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn1_1" + input: "convolution_10" + input: "layer3.0.bn1.weight" + input: "layer3.0.bn1.bias" + input: "layer3.0.bn1.running_mean" + input: "layer3.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_10" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_10" + input: "layer3.0.bn1.weight" + input: "layer3.0.bn1.bias" + input: "layer3.0.bn1.running_mean" + input: "layer3.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_10" + output: "_native_batch_norm_legit_no_training_10_1" + output: "_native_batch_norm_legit_no_training_10_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_1" + input: "getitem_32" + output: "copy_9" + node { + input: "getitem_32" + output: "relu_9" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_32" + input: "relu_9" + output: "copy_9" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv2_1" + input: "copy_9" + input: "layer3.0.conv2.weight" + output: "convolution_11" + node { + input: "layer3.0.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_9" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_9" + input: "layer3.0.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_11" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn2_1" + input: "convolution_11" + input: "layer3.0.bn2.weight" + input: "layer3.0.bn2.bias" + input: "layer3.0.bn2.running_mean" + input: "layer3.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_11" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_11" + input: "layer3.0.bn2.weight" + input: "layer3.0.bn2.bias" + input: "layer3.0.bn2.running_mean" + input: "layer3.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_11" + output: "_native_batch_norm_legit_no_training_11_1" + output: "_native_batch_norm_legit_no_training_11_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___downsample_0_1" + input: "copy_8" + input: "layer3.0.downsample.0.weight" + output: "convolution_12" + node { + input: "layer3.0.downsample.0.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_8" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_8" + input: "layer3.0.downsample.0.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_12" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___downsample_1_1" + input: "convolution_12" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + output: "_native_batch_norm_legit_no_training_12" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_12" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + output: "_native_batch_norm_legit_no_training_12" + output: "_native_batch_norm_legit_no_training_12_1" + output: "_native_batch_norm_legit_no_training_12_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_container_Sequential_getattr_L__self___layer3___0___downsample_1" + input: "copy_8" + input: "layer3.0.downsample.0.weight" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + output: "getattr_l__self___layer3___0___downsample_1_1" + node { + input: "copy_8" + input: "layer3.0.downsample.0.weight" + output: "getattr_l__self___layer3___0___downsample_0_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___downsample_0_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___downsample_0_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___0___downsample_0_1" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + output: "getattr_l__self___layer3___0___downsample_1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___downsample_1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___downsample_1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_2" + input: "add_4" + output: "copy_10" + node { + input: "add_4" + output: "relu_10" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_4" + input: "relu_10" + output: "copy_10" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer3_0_1" + input: "copy_8" + input: "layer3.0.conv1.weight" + input: "layer3.0.bn1.weight" + input: "layer3.0.bn1.bias" + input: "layer3.0.bn1.running_mean" + input: "layer3.0.bn1.running_var" + input: "layer3.0.conv2.weight" + input: "layer3.0.bn2.weight" + input: "layer3.0.bn2.bias" + input: "layer3.0.bn2.running_mean" + input: "layer3.0.bn2.running_var" + input: "layer3.0.downsample.0.weight" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + output: "getattr_l__self___layer3___0___relu_2" + node { + input: "copy_8" + input: "layer3.0.conv1.weight" + output: "getattr_l__self___layer3___0___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___0___conv1_1" + input: "layer3.0.bn1.weight" + input: "layer3.0.bn1.bias" + input: "layer3.0.bn1.running_mean" + input: "layer3.0.bn1.running_var" + output: "getattr_l__self___layer3___0___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___0___bn1_1" + output: "getattr_l__self___layer3___0___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___0___relu_1" + input: "layer3.0.conv2.weight" + output: "getattr_l__self___layer3___0___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___0___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___0___conv2_1" + input: "layer3.0.bn2.weight" + input: "layer3.0.bn2.bias" + input: "layer3.0.bn2.running_mean" + input: "layer3.0.bn2.running_var" + output: "getattr_l__self___layer3___0___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___0___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "copy_8" + input: "layer3.0.downsample.0.weight" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + output: "getattr_l__self___layer3___0___downsample_1" + name: "torch_nn_modules_container_Sequential_getattr_L__self___layer3___0___downsample_1_5" + op_type: "torch_nn_modules_container_Sequential_getattr_L__self___layer3___0___downsample_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___0___bn2_1" + input: "getattr_l__self___layer3___0___downsample_1" + output: "add_4" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_4" + output: "getattr_l__self___layer3___0___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_2_7" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___0___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv1_1" + input: "copy_10" + input: "layer3.1.conv1.weight" + output: "convolution_13" + node { + input: "layer3.1.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_10" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_10" + input: "layer3.1.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_13" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn1_1" + input: "convolution_13" + input: "layer3.1.bn1.weight" + input: "layer3.1.bn1.bias" + input: "layer3.1.bn1.running_mean" + input: "layer3.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_13" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_13" + input: "layer3.1.bn1.weight" + input: "layer3.1.bn1.bias" + input: "layer3.1.bn1.running_mean" + input: "layer3.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_13" + output: "_native_batch_norm_legit_no_training_13_1" + output: "_native_batch_norm_legit_no_training_13_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_1" + input: "getitem_41" + output: "copy_11" + node { + input: "getitem_41" + output: "relu_11" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_41" + input: "relu_11" + output: "copy_11" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv2_1" + input: "copy_11" + input: "layer3.1.conv2.weight" + output: "convolution_14" + node { + input: "layer3.1.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_11" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_11" + input: "layer3.1.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_14" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn2_1" + input: "convolution_14" + input: "layer3.1.bn2.weight" + input: "layer3.1.bn2.bias" + input: "layer3.1.bn2.running_mean" + input: "layer3.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_14" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_14" + input: "layer3.1.bn2.weight" + input: "layer3.1.bn2.bias" + input: "layer3.1.bn2.running_mean" + input: "layer3.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_14" + output: "_native_batch_norm_legit_no_training_14_1" + output: "_native_batch_norm_legit_no_training_14_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_2" + input: "add_5" + output: "copy_12" + node { + input: "add_5" + output: "relu_12" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_5" + input: "relu_12" + output: "copy_12" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer3_1_1" + input: "copy_10" + input: "layer3.1.conv1.weight" + input: "layer3.1.bn1.weight" + input: "layer3.1.bn1.bias" + input: "layer3.1.bn1.running_mean" + input: "layer3.1.bn1.running_var" + input: "layer3.1.conv2.weight" + input: "layer3.1.bn2.weight" + input: "layer3.1.bn2.bias" + input: "layer3.1.bn2.running_mean" + input: "layer3.1.bn2.running_var" + output: "getattr_l__self___layer3___1___relu_2" + node { + input: "copy_10" + input: "layer3.1.conv1.weight" + output: "getattr_l__self___layer3___1___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___1___conv1_1" + input: "layer3.1.bn1.weight" + input: "layer3.1.bn1.bias" + input: "layer3.1.bn1.running_mean" + input: "layer3.1.bn1.running_var" + output: "getattr_l__self___layer3___1___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___1___bn1_1" + output: "getattr_l__self___layer3___1___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___1___relu_1" + input: "layer3.1.conv2.weight" + output: "getattr_l__self___layer3___1___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer3___1___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___1___conv2_1" + input: "layer3.1.bn2.weight" + input: "layer3.1.bn2.bias" + input: "layer3.1.bn2.running_mean" + input: "layer3.1.bn2.running_var" + output: "getattr_l__self___layer3___1___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer3___1___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer3___1___bn2_1" + input: "copy_10" + output: "add_5" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_5" + output: "getattr_l__self___layer3___1___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_2_6" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer3___1___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_container_Sequential_layer3_1" + input: "copy_8" + input: "layer3.0.conv1.weight" + input: "layer3.0.bn1.weight" + input: "layer3.0.bn1.bias" + input: "layer3.0.bn1.running_mean" + input: "layer3.0.bn1.running_var" + input: "layer3.0.conv2.weight" + input: "layer3.0.bn2.weight" + input: "layer3.0.bn2.bias" + input: "layer3.0.bn2.running_mean" + input: "layer3.0.bn2.running_var" + input: "layer3.0.downsample.0.weight" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + input: "layer3.1.conv1.weight" + input: "layer3.1.bn1.weight" + input: "layer3.1.bn1.bias" + input: "layer3.1.bn1.running_mean" + input: "layer3.1.bn1.running_var" + input: "layer3.1.conv2.weight" + input: "layer3.1.bn2.weight" + input: "layer3.1.bn2.bias" + input: "layer3.1.bn2.running_mean" + input: "layer3.1.bn2.running_var" + output: "layer3_1_1" + node { + input: "copy_8" + input: "layer3.0.conv1.weight" + input: "layer3.0.bn1.weight" + input: "layer3.0.bn1.bias" + input: "layer3.0.bn1.running_mean" + input: "layer3.0.bn1.running_var" + input: "layer3.0.conv2.weight" + input: "layer3.0.bn2.weight" + input: "layer3.0.bn2.bias" + input: "layer3.0.bn2.running_mean" + input: "layer3.0.bn2.running_var" + input: "layer3.0.downsample.0.weight" + input: "layer3.0.downsample.1.weight" + input: "layer3.0.downsample.1.bias" + input: "layer3.0.downsample.1.running_mean" + input: "layer3.0.downsample.1.running_var" + output: "layer3_0_1" + name: "torchvision_models_resnet_BasicBlock_layer3_0_1_0" + op_type: "torchvision_models_resnet_BasicBlock_layer3_0_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + node { + input: "layer3_0_1" + input: "layer3.1.conv1.weight" + input: "layer3.1.bn1.weight" + input: "layer3.1.bn1.bias" + input: "layer3.1.bn1.running_mean" + input: "layer3.1.bn1.running_var" + input: "layer3.1.conv2.weight" + input: "layer3.1.bn2.weight" + input: "layer3.1.bn2.bias" + input: "layer3.1.bn2.running_mean" + input: "layer3.1.bn2.running_var" + output: "layer3_1_1" + name: "torchvision_models_resnet_BasicBlock_layer3_1_1_1" + op_type: "torchvision_models_resnet_BasicBlock_layer3_1_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torchvision.0.16.0a0+0370134" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv1_1" + input: "copy_12" + input: "layer4.0.conv1.weight" + output: "convolution_15" + node { + input: "layer4.0.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_12" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_12" + input: "layer4.0.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_15" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn1_1" + input: "convolution_15" + input: "layer4.0.bn1.weight" + input: "layer4.0.bn1.bias" + input: "layer4.0.bn1.running_mean" + input: "layer4.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_15" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_15" + input: "layer4.0.bn1.weight" + input: "layer4.0.bn1.bias" + input: "layer4.0.bn1.running_mean" + input: "layer4.0.bn1.running_var" + output: "_native_batch_norm_legit_no_training_15" + output: "_native_batch_norm_legit_no_training_15_1" + output: "_native_batch_norm_legit_no_training_15_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_1" + input: "getitem_47" + output: "copy_13" + node { + input: "getitem_47" + output: "relu_13" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_47" + input: "relu_13" + output: "copy_13" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv2_1" + input: "copy_13" + input: "layer4.0.conv2.weight" + output: "convolution_16" + node { + input: "layer4.0.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_13" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_13" + input: "layer4.0.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_16" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn2_1" + input: "convolution_16" + input: "layer4.0.bn2.weight" + input: "layer4.0.bn2.bias" + input: "layer4.0.bn2.running_mean" + input: "layer4.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_16" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_16" + input: "layer4.0.bn2.weight" + input: "layer4.0.bn2.bias" + input: "layer4.0.bn2.running_mean" + input: "layer4.0.bn2.running_var" + output: "_native_batch_norm_legit_no_training_16" + output: "_native_batch_norm_legit_no_training_16_1" + output: "_native_batch_norm_legit_no_training_16_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___downsample_0_1" + input: "copy_12" + input: "layer4.0.downsample.0.weight" + output: "convolution_17" + node { + input: "layer4.0.downsample.0.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_12" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_12" + input: "layer4.0.downsample.0.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_17" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 0 + ints: 0 + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "strides" + ints: 2 + ints: 2 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___downsample_1_1" + input: "convolution_17" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + output: "_native_batch_norm_legit_no_training_17" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_17" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + output: "_native_batch_norm_legit_no_training_17" + output: "_native_batch_norm_legit_no_training_17_1" + output: "_native_batch_norm_legit_no_training_17_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_container_Sequential_getattr_L__self___layer4___0___downsample_1" + input: "copy_12" + input: "layer4.0.downsample.0.weight" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + output: "getattr_l__self___layer4___0___downsample_1_1" + node { + input: "copy_12" + input: "layer4.0.downsample.0.weight" + output: "getattr_l__self___layer4___0___downsample_0_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___downsample_0_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___downsample_0_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___0___downsample_0_1" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + output: "getattr_l__self___layer4___0___downsample_1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___downsample_1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___downsample_1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_2" + input: "add_6" + output: "copy_14" + node { + input: "add_6" + output: "relu_14" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_6" + input: "relu_14" + output: "copy_14" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer4_0_1" + input: "copy_12" + input: "layer4.0.conv1.weight" + input: "layer4.0.bn1.weight" + input: "layer4.0.bn1.bias" + input: "layer4.0.bn1.running_mean" + input: "layer4.0.bn1.running_var" + input: "layer4.0.conv2.weight" + input: "layer4.0.bn2.weight" + input: "layer4.0.bn2.bias" + input: "layer4.0.bn2.running_mean" + input: "layer4.0.bn2.running_var" + input: "layer4.0.downsample.0.weight" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + output: "getattr_l__self___layer4___0___relu_2" + node { + input: "copy_12" + input: "layer4.0.conv1.weight" + output: "getattr_l__self___layer4___0___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___0___conv1_1" + input: "layer4.0.bn1.weight" + input: "layer4.0.bn1.bias" + input: "layer4.0.bn1.running_mean" + input: "layer4.0.bn1.running_var" + output: "getattr_l__self___layer4___0___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___0___bn1_1" + output: "getattr_l__self___layer4___0___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___0___relu_1" + input: "layer4.0.conv2.weight" + output: "getattr_l__self___layer4___0___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___0___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___0___conv2_1" + input: "layer4.0.bn2.weight" + input: "layer4.0.bn2.bias" + input: "layer4.0.bn2.running_mean" + input: "layer4.0.bn2.running_var" + output: "getattr_l__self___layer4___0___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___0___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "copy_12" + input: "layer4.0.downsample.0.weight" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + output: "getattr_l__self___layer4___0___downsample_1" + name: "torch_nn_modules_container_Sequential_getattr_L__self___layer4___0___downsample_1_5" + op_type: "torch_nn_modules_container_Sequential_getattr_L__self___layer4___0___downsample_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___0___bn2_1" + input: "getattr_l__self___layer4___0___downsample_1" + output: "add_6" + name: "aten_add_6" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_6" + output: "getattr_l__self___layer4___0___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_2_7" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___0___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv1_1" + input: "copy_14" + input: "layer4.1.conv1.weight" + output: "convolution_18" + node { + input: "layer4.1.conv1.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_14" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_14" + input: "layer4.1.conv1.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_18" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn1_1" + input: "convolution_18" + input: "layer4.1.bn1.weight" + input: "layer4.1.bn1.bias" + input: "layer4.1.bn1.running_mean" + input: "layer4.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_18" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_18" + input: "layer4.1.bn1.weight" + input: "layer4.1.bn1.bias" + input: "layer4.1.bn1.running_mean" + input: "layer4.1.bn1.running_var" + output: "_native_batch_norm_legit_no_training_18" + output: "_native_batch_norm_legit_no_training_18_1" + output: "_native_batch_norm_legit_no_training_18_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_1" + input: "getitem_56" + output: "copy_15" + node { + input: "getitem_56" + output: "relu_15" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "getitem_56" + input: "relu_15" + output: "copy_15" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv2_1" + input: "copy_15" + input: "layer4.1.conv2.weight" + output: "convolution_19" + node { + input: "layer4.1.conv2.weight" + output: "_val_2" + name: "Shape_2" + op_type: "Shape" + attribute { + name: "end" + i: 1 + type: INT + } + attribute { + name: "start" + i: 0 + type: INT + } + } + node { + output: "_val_3" + name: "Constant_3" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 1 + type: INTS + } + } + node { + input: "_val_2" + input: "_val_3" + output: "_val_4" + name: "Expand_4" + op_type: "Expand" + } + node { + output: "_val_5" + name: "Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "_val_5" + input: "copy_15" + output: "_val_6" + name: "CastLike_6" + op_type: "CastLike" + } + node { + input: "_val_6" + input: "_val_4" + output: "_val_7" + name: "Expand_7" + op_type: "Expand" + } + node { + output: "_val_8" + name: "Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 9 + raw_data: "\000" + } + type: TENSOR + } + } + node { + input: "copy_15" + input: "layer4.1.conv2.weight" + input: "_val_7" + input: "_val_8" + output: "convolution_19" + name: "_aten_convolution_onnx_9" + op_type: "_aten_convolution_onnx" + attribute { + name: "dilations" + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "groups" + i: 1 + type: INT + } + attribute { + name: "output_padding" + ints: 0 + ints: 0 + type: INTS + } + attribute { + name: "pads" + ints: 1 + ints: 1 + ints: 1 + ints: 1 + type: INTS + } + attribute { + name: "strides" + ints: 1 + ints: 1 + type: INTS + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn2_1" + input: "convolution_19" + input: "layer4.1.bn2.weight" + input: "layer4.1.bn2.bias" + input: "layer4.1.bn2.running_mean" + input: "layer4.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_19" + node { + output: "_val_5" + name: "Constant_0" + op_type: "Constant" + attribute { + name: "value_ints" + ints: 0 + ints: 2 + ints: 3 + type: INTS + } + } + node { + input: "convolution_19" + input: "layer4.1.bn2.weight" + input: "layer4.1.bn2.bias" + input: "layer4.1.bn2.running_mean" + input: "layer4.1.bn2.running_var" + output: "_native_batch_norm_legit_no_training_19" + output: "_native_batch_norm_legit_no_training_19_1" + output: "_native_batch_norm_legit_no_training_19_2" + name: "_aten_native_batch_norm_inference_onnx_1" + op_type: "_aten_native_batch_norm_inference_onnx" + attribute { + name: "eps" + f: 1e-05 + type: FLOAT + } + attribute { + name: "momentum" + f: 0.1 + type: FLOAT + } + attribute { + name: "training" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_2" + input: "add_7" + output: "copy_16" + node { + input: "add_7" + output: "relu_16" + name: "aten_relu_0" + op_type: "aten_relu" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_7" + input: "relu_16" + output: "copy_16" + name: "aten_copy_1" + op_type: "aten_copy" + attribute { + name: "non_blocking" + i: 0 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "torchvision_models_resnet_BasicBlock_layer4_1_1" + input: "copy_14" + input: "layer4.1.conv1.weight" + input: "layer4.1.bn1.weight" + input: "layer4.1.bn1.bias" + input: "layer4.1.bn1.running_mean" + input: "layer4.1.bn1.running_var" + input: "layer4.1.conv2.weight" + input: "layer4.1.bn2.weight" + input: "layer4.1.bn2.bias" + input: "layer4.1.bn2.running_mean" + input: "layer4.1.bn2.running_var" + output: "getattr_l__self___layer4___1___relu_2" + node { + input: "copy_14" + input: "layer4.1.conv1.weight" + output: "getattr_l__self___layer4___1___conv1_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv1_1_0" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___1___conv1_1" + input: "layer4.1.bn1.weight" + input: "layer4.1.bn1.bias" + input: "layer4.1.bn1.running_mean" + input: "layer4.1.bn1.running_var" + output: "getattr_l__self___layer4___1___bn1_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn1_1_1" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn1_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___1___bn1_1" + output: "getattr_l__self___layer4___1___relu_1" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_1_2" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___1___relu_1" + input: "layer4.1.conv2.weight" + output: "getattr_l__self___layer4___1___conv2_1" + name: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv2_1_3" + op_type: "torch_nn_modules_conv_Conv2d_getattr_L__self___layer4___1___conv2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___1___conv2_1" + input: "layer4.1.bn2.weight" + input: "layer4.1.bn2.bias" + input: "layer4.1.bn2.running_mean" + input: "layer4.1.bn2.running_var" + output: "getattr_l__self___layer4___1___bn2_1" + name: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn2_1_4" + op_type: "torch_nn_modules_batchnorm_BatchNorm2d_getattr_L__self___layer4___1___bn2_1" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + node { + input: "getattr_l__self___layer4___1___bn2_1" + input: "copy_14" + output: "add_7" + name: "aten_add_5" + op_type: "aten_add" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "add_7" + output: "getattr_l__self___layer4___1___relu_2" + name: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_2_6" + op_type: "torch_nn_modules_activation_ReLU_getattr_L__self___layer4___1___relu_2" + domain: "pkg.torch.2.2.0a0+git4ab5507" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + opset_import { + domain: "pkg.torch.2.2.0a0+git4ab5507" + version: 1 + } + domain: "pkg.torchvision.0.16.0a0+0370134" +} +functions { + name: "torch_nn_modules_container_Sequential_layer4_1" + input: "copy_12" + input: "layer4.0.conv1.weight" + input: "layer4.0.bn1.weight" + input: "layer4.0.bn1.bias" + input: "layer4.0.bn1.running_mean" + input: "layer4.0.bn1.running_var" + input: "layer4.0.conv2.weight" + input: "layer4.0.bn2.weight" + input: "layer4.0.bn2.bias" + input: "layer4.0.bn2.running_mean" + input: "layer4.0.bn2.running_var" + input: "layer4.0.downsample.0.weight" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + input: "layer4.1.conv1.weight" + input: "layer4.1.bn1.weight" + input: "layer4.1.bn1.bias" + input: "layer4.1.bn1.running_mean" + input: "layer4.1.bn1.running_var" + input: "layer4.1.conv2.weight" + input: "layer4.1.bn2.weight" + input: "layer4.1.bn2.bias" + input: "layer4.1.bn2.running_mean" + input: "layer4.1.bn2.running_var" + output: "layer4_1_1" + node { + input: "copy_12" + input: "layer4.0.conv1.weight" + input: "layer4.0.bn1.weight" + input: "layer4.0.bn1.bias" + input: "layer4.0.bn1.running_mean" + input: "layer4.0.bn1.running_var" + input: "layer4.0.conv2.weight" + input: "layer4.0.bn2.weight" + input: "layer4.0.bn2.bias" + input: "layer4.0.bn2.running_mean" + input: "layer4.0.bn2.running_var" + input: "layer4.0.downsample.0.weight" + input: "layer4.0.downsample.1.weight" + input: "layer4.0.downsample.1.bias" + input: "layer4.0.downsample.1.running_mean" + input: "layer4.0.downsample.1.running_var" + output: "layer4_0_1" + name: "torchvision_models_resnet_BasicBlock_layer4_0_1_0" + op_type: "torchvision_models_resnet_BasicBlock_layer4_0_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + node { + input: "layer4_0_1" + input: "layer4.1.conv1.weight" + input: "layer4.1.bn1.weight" + input: "layer4.1.bn1.bias" + input: "layer4.1.bn1.running_mean" + input: "layer4.1.bn1.running_var" + input: "layer4.1.conv2.weight" + input: "layer4.1.bn2.weight" + input: "layer4.1.bn2.bias" + input: "layer4.1.bn2.running_mean" + input: "layer4.1.bn2.running_var" + output: "layer4_1_1" + name: "torchvision_models_resnet_BasicBlock_layer4_1_1_1" + op_type: "torchvision_models_resnet_BasicBlock_layer4_1_1" + domain: "pkg.torchvision.0.16.0a0+0370134" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.torchvision.0.16.0a0+0370134" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "aten_mean_dim" + input: "self" + input: "dim" + output: "result_10" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0" + } + type: TENSOR + } + } + node { + input: "int64_0" + input: "tmp_0" + output: "int64_0_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_0" + input: "int64_0_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "result_10" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "result" + name: "n0" + op_type: "Identity" + } + name: "thenGraph_5" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "dim" + output: "tmp_1" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp_1" + output: "tmp_2" + name: "n1" + op_type: "Size" + } + node { + output: "int64_0_3" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0_3" + } + type: TENSOR + } + } + node { + input: "int64_0_3" + input: "tmp_2" + output: "int64_0_3_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "tmp_2" + input: "int64_0_3_cast" + output: "cond_4" + name: "n4" + op_type: "Equal" + } + node { + input: "cond_4" + output: "dim_8" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + output: "int64_0_5" + name: "n0" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 0 + name: "int64_0_5" + } + type: TENSOR + } + } + node { + input: "dim" + input: "int64_0_5" + output: "dim_6" + name: "n1" + op_type: "Unsqueeze" + } + name: "thenGraph_8" + output { + name: "dim_6" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "dim" + output: "dim_7" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_8" + output { + name: "dim_7" + } + } + type: GRAPH + } + } + node { + input: "self" + input: "dim_8" + output: "result_9" + name: "n6" + op_type: "ReduceMean" + attribute { + name: "keepdims" + type: INT + ref_attr_name: "keepdim" + } + } + name: "elseGraph_5" + output { + name: "result_9" + } + } + type: GRAPH + } + } + doc_string: "mean.dim(Tensor self, int[1]? dim, bool keepdim=False, *, ScalarType? dtype=None) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "keepdim" + i: 0 + type: INT + } +} +functions { + name: "torch_nn_modules_pooling_AdaptiveAvgPool2d_avgpool_1" + input: "copy_16" + output: "mean" + node { + output: "_val_1" + name: "Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 2 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377\376\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "copy_16" + input: "_val_1" + output: "mean" + name: "aten_mean_dim_2" + op_type: "aten_mean_dim" + attribute { + name: "keepdim" + i: 1 + type: INT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "aten_t" + input: "self" + output: "result_1" + node { + input: "self" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "rank" + name: "n1" + op_type: "Size" + } + node { + output: "int64_2" + name: "n2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + int64_data: 2 + name: "int64_2" + } + type: TENSOR + } + } + node { + input: "int64_2" + input: "rank" + output: "int64_2_cast" + name: "n3" + op_type: "CastLike" + } + node { + input: "rank" + input: "int64_2_cast" + output: "cond" + name: "n4" + op_type: "Equal" + } + node { + input: "cond" + output: "result_1" + name: "n5" + op_type: "If" + attribute { + name: "then_branch" + g { + node { + input: "self" + output: "result" + name: "n0" + op_type: "Transpose" + attribute { + name: "perm" + ints: 1 + ints: 0 + type: INTS + } + } + name: "thenGraph_6" + output { + name: "result" + } + } + type: GRAPH + } + attribute { + name: "else_branch" + g { + node { + input: "self" + output: "result_0" + name: "n0" + op_type: "Identity" + } + name: "elseGraph_6" + output { + name: "result_0" + } + } + type: GRAPH + } + } + doc_string: "t(Tensor(a) self) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "aten_addmm" + input: "self" + input: "mat1" + input: "mat2" + output: "return_val" + node { + input: "mat1" + input: "mat2" + input: "self" + output: "return_val" + name: "n0" + op_type: "Gemm" + attribute { + name: "alpha" + type: FLOAT + ref_attr_name: "alpha" + } + attribute { + name: "beta" + type: FLOAT + ref_attr_name: "beta" + } + } + doc_string: "addmm(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1) -> Tensor" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" + attribute_proto { + name: "beta" + f: 1.0 + type: FLOAT + } + attribute_proto { + name: "alpha" + f: 1.0 + type: FLOAT + } +} +functions { + name: "torch_nn_modules_linear_Linear_fc_1" + input: "view" + input: "fc.weight" + input: "fc.bias" + output: "addmm" + node { + input: "fc.weight" + output: "t" + name: "aten_t_0" + op_type: "aten_t" + domain: "pkg.onnxscript.torch_lib" + } + node { + input: "fc.bias" + input: "view" + input: "t" + output: "addmm" + name: "aten_addmm_1" + op_type: "aten_addmm" + attribute { + name: "alpha" + f: 1.0 + type: FLOAT + } + attribute { + name: "beta" + f: 1.0 + type: FLOAT + } + domain: "pkg.onnxscript.torch_lib" + } + opset_import { + domain: "" + version: 18 + } + opset_import { + domain: "pkg.onnxscript.torch_lib" + version: 1 + } + domain: "pkg.torch.2.2.0a0+git4ab5507" +} +functions { + name: "aten_view" + input: "self" + input: "size" + output: "return_val" + node { + input: "size" + output: "size_0" + name: "n0" + op_type: "Cast" + attribute { + name: "to" + i: 7 + type: INT + } + } + node { + input: "self" + input: "size_0" + output: "return_val" + name: "n1" + op_type: "Reshape" + } + doc_string: "view(Tensor(a) self, SymInt[] size) -> Tensor(a)" + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib" +} +functions { + name: "Rank" + input: "input" + output: "return_val" + node { + input: "input" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "return_val" + name: "n1" + op_type: "Size" + } + doc_string: "Take the rank of the input tensor." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib.common" +} +functions { + name: "IsScalar" + input: "input" + output: "return_val" + node { + input: "input" + output: "tmp" + name: "n0" + op_type: "Shape" + } + node { + input: "tmp" + output: "tmp_0" + name: "n1" + op_type: "Size" + } + node { + output: "tmp_1" + name: "n2" + op_type: "Constant" + attribute { + name: "value_int" + i: 0 + type: INT + } + } + node { + input: "tmp_0" + input: "tmp_1" + output: "return_val" + name: "n3" + op_type: "Equal" + } + doc_string: "Return whether the input has rank 0, or is a scalar." + opset_import { + domain: "" + version: 18 + } + domain: "pkg.onnxscript.torch_lib.common" +} diff --git a/testdata/e2e_models/torchscript_model/torchscript_model.textproto b/testdata/e2e_models/torchscript_model/torchscript_model.textproto new file mode 100644 index 00000000..d654a98c --- /dev/null +++ b/testdata/e2e_models/torchscript_model/torchscript_model.textproto @@ -0,0 +1,12820 @@ +ir_version: 8 +producer_name: "pytorch" +producer_version: "2.3.0" +graph { + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.norm.weight" + name: "Identity_326" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.9.post_attention_layernorm.weight" + name: "Identity_327" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.9.input_layernorm.weight" + name: "Identity_328" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.8.post_attention_layernorm.weight" + name: "Identity_329" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.8.input_layernorm.weight" + name: "Identity_330" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.7.post_attention_layernorm.weight" + name: "Identity_331" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.7.input_layernorm.weight" + name: "Identity_332" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.6.post_attention_layernorm.weight" + name: "Identity_333" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.6.input_layernorm.weight" + name: "Identity_334" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.5.post_attention_layernorm.weight" + name: "Identity_335" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.5.input_layernorm.weight" + name: "Identity_336" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.4.post_attention_layernorm.weight" + name: "Identity_337" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.4.input_layernorm.weight" + name: "Identity_338" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.3.post_attention_layernorm.weight" + name: "Identity_339" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.3.input_layernorm.weight" + name: "Identity_340" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.2.post_attention_layernorm.weight" + name: "Identity_341" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.2.input_layernorm.weight" + name: "Identity_342" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.1.post_attention_layernorm.weight" + name: "Identity_343" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.1.input_layernorm.weight" + name: "Identity_344" + op_type: "Identity" + } + node { + input: "model.layers.0.input_layernorm.weight" + output: "model.layers.0.post_attention_layernorm.weight" + name: "Identity_345" + op_type: "Identity" + } + node { + output: "/model/Constant_output_0" + name: "/model/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + dims: 8 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\003\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\005\000\000\000\000\000\000\000\006\000\000\000\000\000\000\000\007\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "model.embed_tokens.weight" + input: "input.1" + output: "/model/embed_tokens/Gather_output_0" + name: "/model/embed_tokens/Gather" + op_type: "Gather" + } + node { + output: "/model/Constant_1_output_0" + name: "/model/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + dims: 1 + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\000\000\000\000\000\000\000\000\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\000\000\000\000\000\000\000\000\000\000\000\000\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\377\377\177\377\377\377\177\377\377\377\177\377\377\377\177\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\377\377\177\377\377\377\177\377\377\377\177\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\377\377\177\377\377\377\177\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\377\377\177\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/Constant_2_output_0" + name: "/model/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/Constant_3_output_0" + name: "/model/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/Constant_3_output_0" + output: "/model/ConstantOfShape_output_0" + name: "/model/ConstantOfShape" + op_type: "ConstantOfShape" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/Constant_4_output_0" + name: "/model/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "/model/ConstantOfShape_output_0" + input: "/model/Constant_4_output_0" + output: "/model/Mul_output_0" + name: "/model/Mul" + op_type: "Mul" + } + node { + input: "/model/Constant_2_output_0" + input: "/model/Mul_output_0" + output: "/model/Equal_output_0" + name: "/model/Equal" + op_type: "Equal" + } + node { + input: "/model/Equal_output_0" + input: "/model/ConstantOfShape_output_0" + input: "/model/Constant_2_output_0" + output: "/model/Where_output_0" + name: "/model/Where" + op_type: "Where" + } + node { + input: "/model/Constant_1_output_0" + input: "/model/Where_output_0" + output: "/model/Expand_output_0" + name: "/model/Expand" + op_type: "Expand" + } + node { + output: "/model/Constant_5_output_0" + name: "/model/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "attention_mask" + input: "/model/Constant_5_output_0" + output: "/model/Unsqueeze_output_0" + name: "/model/Unsqueeze" + op_type: "Unsqueeze" + } + node { + output: "/model/Constant_6_output_0" + name: "/model/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/Unsqueeze_output_0" + input: "/model/Constant_6_output_0" + output: "/model/Unsqueeze_1_output_0" + name: "/model/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + output: "/model/Constant_7_output_0" + name: "/model/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/Constant_8_output_0" + name: "/model/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/Constant_8_output_0" + output: "/model/ConstantOfShape_1_output_0" + name: "/model/ConstantOfShape_1" + op_type: "ConstantOfShape" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/Constant_9_output_0" + name: "/model/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\377" + } + type: TENSOR + } + } + node { + input: "/model/ConstantOfShape_1_output_0" + input: "/model/Constant_9_output_0" + output: "/model/Mul_1_output_0" + name: "/model/Mul_1" + op_type: "Mul" + } + node { + input: "/model/Constant_7_output_0" + input: "/model/Mul_1_output_0" + output: "/model/Equal_1_output_0" + name: "/model/Equal_1" + op_type: "Equal" + } + node { + input: "/model/Equal_1_output_0" + input: "/model/ConstantOfShape_1_output_0" + input: "/model/Constant_7_output_0" + output: "/model/Where_1_output_0" + name: "/model/Where_1" + op_type: "Where" + } + node { + input: "/model/Unsqueeze_1_output_0" + input: "/model/Where_1_output_0" + output: "/model/Expand_1_output_0" + name: "/model/Expand_1" + op_type: "Expand" + } + node { + input: "/model/Expand_1_output_0" + output: "/model/Cast_output_0" + name: "/model/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/Constant_10_output_0" + name: "/model/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/Constant_10_output_0" + input: "/model/Cast_output_0" + output: "/model/Sub_output_0" + name: "/model/Sub" + op_type: "Sub" + } + node { + input: "/model/Sub_output_0" + output: "/model/Cast_1_output_0" + name: "/model/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 9 + type: INT + } + } + node { + input: "/model/Cast_1_output_0" + output: "/model/Cast_2_output_0" + name: "/model/Cast_2" + op_type: "Cast" + attribute { + name: "to" + i: 9 + type: INT + } + } + node { + output: "/model/Constant_11_output_0" + name: "/model/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\377\377\177\377" + } + type: TENSOR + } + } + node { + input: "/model/Cast_2_output_0" + input: "/model/Constant_11_output_0" + input: "/model/Sub_output_0" + output: "/model/Where_2_output_0" + name: "/model/Where_2" + op_type: "Where" + } + node { + input: "/model/Where_2_output_0" + output: "/model/Cast_3_output_0" + name: "/model/Cast_3" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/Cast_3_output_0" + output: "/model/Cast_4_output_0" + name: "/model/Cast_4" + op_type: "Cast" + attribute { + name: "to" + i: 9 + type: INT + } + } + node { + input: "/model/Cast_4_output_0" + output: "/model/Cast_5_output_0" + name: "/model/Cast_5" + op_type: "Cast" + attribute { + name: "to" + i: 9 + type: INT + } + } + node { + output: "/model/Constant_12_output_0" + name: "/model/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\377\377\177\377" + } + type: TENSOR + } + } + node { + input: "/model/Cast_5_output_0" + input: "/model/Constant_12_output_0" + input: "/model/Expand_output_0" + output: "/model/Where_3_output_0" + name: "/model/Where_3" + op_type: "Where" + } + node { + input: "/model/embed_tokens/Gather_output_0" + output: "/model/layers.0/input_layernorm/Cast_output_0" + name: "/model/layers.0/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.0/input_layernorm/Constant_output_0" + name: "/model/layers.0/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/input_layernorm/Cast_output_0" + input: "/model/layers.0/input_layernorm/Constant_output_0" + output: "/model/layers.0/input_layernorm/Pow_output_0" + name: "/model/layers.0/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.0/input_layernorm/Pow_output_0" + output: "/model/layers.0/input_layernorm/ReduceMean_output_0" + name: "/model/layers.0/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.0/input_layernorm/Constant_1_output_0" + name: "/model/layers.0/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/input_layernorm/ReduceMean_output_0" + input: "/model/layers.0/input_layernorm/Constant_1_output_0" + output: "/model/layers.0/input_layernorm/Add_output_0" + name: "/model/layers.0/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.0/input_layernorm/Add_output_0" + output: "/model/layers.0/input_layernorm/Sqrt_output_0" + name: "/model/layers.0/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.0/input_layernorm/Constant_2_output_0" + name: "/model/layers.0/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/input_layernorm/Constant_2_output_0" + input: "/model/layers.0/input_layernorm/Sqrt_output_0" + output: "/model/layers.0/input_layernorm/Div_output_0" + name: "/model/layers.0/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.0/input_layernorm/Cast_output_0" + input: "/model/layers.0/input_layernorm/Div_output_0" + output: "/model/layers.0/input_layernorm/Mul_output_0" + name: "/model/layers.0/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.0/input_layernorm/Mul_output_0" + output: "/model/layers.0/input_layernorm/Cast_1_output_0" + name: "/model/layers.0/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.0.input_layernorm.weight" + input: "/model/layers.0/input_layernorm/Cast_1_output_0" + output: "/model/layers.0/input_layernorm/Mul_1_output_0" + name: "/model/layers.0/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.0/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1734" + output: "/model/layers.0/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.0/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.0/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1735" + output: "/model/layers.0/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.0/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.0/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1736" + output: "/model/layers.0/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.0/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.0/self_attn/Constant_output_0" + name: "/model/layers.0/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_1_output_0" + name: "/model/layers.0/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_2_output_0" + name: "/model/layers.0/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.0/self_attn/Constant_output_0" + output: "/model/layers.0/self_attn/Reshape_output_0" + name: "/model/layers.0/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Reshape_output_0" + output: "/model/layers.0/self_attn/Transpose_output_0" + name: "/model/layers.0/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.0/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.0/self_attn/Constant_1_output_0" + output: "/model/layers.0/self_attn/Reshape_1_output_0" + name: "/model/layers.0/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Reshape_1_output_0" + output: "/model/layers.0/self_attn/Transpose_1_output_0" + name: "/model/layers.0/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.0/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.0/self_attn/Constant_2_output_0" + output: "/model/layers.0/self_attn/Reshape_2_output_0" + name: "/model/layers.0/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Reshape_2_output_0" + output: "value_states" + name: "/model/layers.0/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.0/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.0/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.0/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.0/self_attn/Gather_output_0" + name: "/model/layers.0/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.0/self_attn/Constant_3_output_0" + name: "/model/layers.0/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/Gather_output_0" + input: "/model/layers.0/self_attn/Constant_3_output_0" + output: "/model/layers.0/self_attn/Unsqueeze_output_0" + name: "/model/layers.0/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.0/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.0/self_attn/Gather_1_output_0" + name: "/model/layers.0/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.0/self_attn/Constant_4_output_0" + name: "/model/layers.0/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/Gather_1_output_0" + input: "/model/layers.0/self_attn/Constant_4_output_0" + output: "/model/layers.0/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.0/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.0/self_attn/Transpose_output_0" + input: "/model/layers.0/self_attn/Unsqueeze_output_0" + output: "/model/layers.0/self_attn/Mul_output_0" + name: "/model/layers.0/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.0/self_attn/Constant_5_output_0" + name: "/model/layers.0/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_6_output_0" + name: "/model/layers.0/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_7_output_0" + name: "/model/layers.0/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_8_output_0" + name: "/model/layers.0/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/Transpose_output_0" + input: "/model/layers.0/self_attn/Constant_6_output_0" + input: "/model/layers.0/self_attn/Constant_7_output_0" + input: "/model/layers.0/self_attn/Constant_5_output_0" + input: "/model/layers.0/self_attn/Constant_8_output_0" + output: "/model/layers.0/self_attn/Slice_output_0" + name: "/model/layers.0/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.0/self_attn/Constant_9_output_0" + name: "/model/layers.0/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_10_output_0" + name: "/model/layers.0/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_11_output_0" + name: "/model/layers.0/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_12_output_0" + name: "/model/layers.0/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/Transpose_output_0" + input: "/model/layers.0/self_attn/Constant_10_output_0" + input: "/model/layers.0/self_attn/Constant_11_output_0" + input: "/model/layers.0/self_attn/Constant_9_output_0" + input: "/model/layers.0/self_attn/Constant_12_output_0" + output: "/model/layers.0/self_attn/Slice_1_output_0" + name: "/model/layers.0/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.0/self_attn/Slice_1_output_0" + output: "/model/layers.0/self_attn/Neg_output_0" + name: "/model/layers.0/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.0/self_attn/Neg_output_0" + input: "/model/layers.0/self_attn/Slice_output_0" + output: "/model/layers.0/self_attn/Concat_output_0" + name: "/model/layers.0/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Concat_output_0" + input: "/model/layers.0/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.0/self_attn/Mul_1_output_0" + name: "/model/layers.0/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.0/self_attn/Mul_output_0" + input: "/model/layers.0/self_attn/Mul_1_output_0" + output: "/model/layers.0/self_attn/Add_output_0" + name: "/model/layers.0/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.0/self_attn/Transpose_1_output_0" + input: "/model/layers.0/self_attn/Unsqueeze_output_0" + output: "/model/layers.0/self_attn/Mul_2_output_0" + name: "/model/layers.0/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.0/self_attn/Constant_13_output_0" + name: "/model/layers.0/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_14_output_0" + name: "/model/layers.0/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_15_output_0" + name: "/model/layers.0/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_16_output_0" + name: "/model/layers.0/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/Transpose_1_output_0" + input: "/model/layers.0/self_attn/Constant_14_output_0" + input: "/model/layers.0/self_attn/Constant_15_output_0" + input: "/model/layers.0/self_attn/Constant_13_output_0" + input: "/model/layers.0/self_attn/Constant_16_output_0" + output: "/model/layers.0/self_attn/Slice_2_output_0" + name: "/model/layers.0/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.0/self_attn/Constant_17_output_0" + name: "/model/layers.0/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_18_output_0" + name: "/model/layers.0/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_19_output_0" + name: "/model/layers.0/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.0/self_attn/Constant_20_output_0" + name: "/model/layers.0/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/Transpose_1_output_0" + input: "/model/layers.0/self_attn/Constant_18_output_0" + input: "/model/layers.0/self_attn/Constant_19_output_0" + input: "/model/layers.0/self_attn/Constant_17_output_0" + input: "/model/layers.0/self_attn/Constant_20_output_0" + output: "/model/layers.0/self_attn/Slice_3_output_0" + name: "/model/layers.0/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.0/self_attn/Slice_3_output_0" + output: "/model/layers.0/self_attn/Neg_1_output_0" + name: "/model/layers.0/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.0/self_attn/Neg_1_output_0" + input: "/model/layers.0/self_attn/Slice_2_output_0" + output: "/model/layers.0/self_attn/Concat_1_output_0" + name: "/model/layers.0/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Concat_1_output_0" + input: "/model/layers.0/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.0/self_attn/Mul_3_output_0" + name: "/model/layers.0/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.0/self_attn/Mul_2_output_0" + input: "/model/layers.0/self_attn/Mul_3_output_0" + output: "key_states.3" + name: "/model/layers.0/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.3" + output: "/model/layers.0/self_attn/Transpose_3_output_0" + name: "/model/layers.0/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.0/self_attn/Add_output_0" + input: "/model/layers.0/self_attn/Transpose_3_output_0" + output: "/model/layers.0/self_attn/MatMul_output_0" + name: "/model/layers.0/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.0/self_attn/Constant_21_output_0" + name: "/model/layers.0/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/MatMul_output_0" + input: "/model/layers.0/self_attn/Constant_21_output_0" + output: "/model/layers.0/self_attn/Div_output_0" + name: "/model/layers.0/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.0/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.0/self_attn/Add_2_output_0" + name: "/model/layers.0/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.0/self_attn/Add_2_output_0" + output: "/model/layers.0/self_attn/Softmax_output_0" + name: "/model/layers.0/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Softmax_output_0" + output: "/model/layers.0/self_attn/Cast_output_0" + name: "/model/layers.0/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Cast_output_0" + output: "/model/layers.0/self_attn/Cast_1_output_0" + name: "/model/layers.0/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Cast_1_output_0" + input: "value_states" + output: "/model/layers.0/self_attn/MatMul_1_output_0" + name: "/model/layers.0/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.0/self_attn/MatMul_1_output_0" + output: "/model/layers.0/self_attn/Transpose_4_output_0" + name: "/model/layers.0/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.0/self_attn/Constant_22_output_0" + name: "/model/layers.0/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/self_attn/Transpose_4_output_0" + input: "/model/layers.0/self_attn/Constant_22_output_0" + output: "/model/layers.0/self_attn/Reshape_3_output_0" + name: "/model/layers.0/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.0/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_1772" + output: "/model/layers.0/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.0/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.0/input_layernorm/Cast_output_0" + input: "/model/layers.0/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.0/Add_output_0" + name: "/model/layers.0/Add" + op_type: "Add" + } + node { + input: "/model/layers.0/Add_output_0" + output: "/model/layers.0/post_attention_layernorm/Cast_output_0" + name: "/model/layers.0/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.0/post_attention_layernorm/Constant_output_0" + name: "/model/layers.0/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/post_attention_layernorm/Cast_output_0" + input: "/model/layers.0/post_attention_layernorm/Constant_output_0" + output: "/model/layers.0/post_attention_layernorm/Pow_output_0" + name: "/model/layers.0/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.0/post_attention_layernorm/Pow_output_0" + output: "/model/layers.0/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.0/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.0/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.0/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.0/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.0/post_attention_layernorm/Add_output_0" + name: "/model/layers.0/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.0/post_attention_layernorm/Add_output_0" + output: "/model/layers.0/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.0/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.0/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.0/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.0/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.0/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.0/post_attention_layernorm/Div_output_0" + name: "/model/layers.0/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.0/post_attention_layernorm/Cast_output_0" + input: "/model/layers.0/post_attention_layernorm/Div_output_0" + output: "/model/layers.0/post_attention_layernorm/Mul_output_0" + name: "/model/layers.0/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.0/post_attention_layernorm/Mul_output_0" + output: "/model/layers.0/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.0/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.0.post_attention_layernorm.weight" + input: "/model/layers.0/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.0/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.0/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.0/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1773" + output: "/model/layers.0/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.0/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.0/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.0/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.0/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.0/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.0/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.0/mlp/act_fn/Mul_output_0" + name: "/model/layers.0/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.0/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1774" + output: "/model/layers.0/mlp/up_proj/MatMul_output_0" + name: "/model/layers.0/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.0/mlp/act_fn/Mul_output_0" + input: "/model/layers.0/mlp/up_proj/MatMul_output_0" + output: "/model/layers.0/mlp/Mul_output_0" + name: "/model/layers.0/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.0/mlp/Mul_output_0" + input: "onnx::MatMul_1775" + output: "/model/layers.0/mlp/down_proj/MatMul_output_0" + name: "/model/layers.0/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.0/post_attention_layernorm/Cast_output_0" + input: "/model/layers.0/mlp/down_proj/MatMul_output_0" + output: "/model/layers.0/Add_1_output_0" + name: "/model/layers.0/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.0/Add_1_output_0" + output: "/model/layers.1/input_layernorm/Cast_output_0" + name: "/model/layers.1/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.1/input_layernorm/Constant_output_0" + name: "/model/layers.1/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/input_layernorm/Cast_output_0" + input: "/model/layers.1/input_layernorm/Constant_output_0" + output: "/model/layers.1/input_layernorm/Pow_output_0" + name: "/model/layers.1/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.1/input_layernorm/Pow_output_0" + output: "/model/layers.1/input_layernorm/ReduceMean_output_0" + name: "/model/layers.1/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.1/input_layernorm/Constant_1_output_0" + name: "/model/layers.1/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/input_layernorm/ReduceMean_output_0" + input: "/model/layers.1/input_layernorm/Constant_1_output_0" + output: "/model/layers.1/input_layernorm/Add_output_0" + name: "/model/layers.1/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.1/input_layernorm/Add_output_0" + output: "/model/layers.1/input_layernorm/Sqrt_output_0" + name: "/model/layers.1/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.1/input_layernorm/Constant_2_output_0" + name: "/model/layers.1/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/input_layernorm/Constant_2_output_0" + input: "/model/layers.1/input_layernorm/Sqrt_output_0" + output: "/model/layers.1/input_layernorm/Div_output_0" + name: "/model/layers.1/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.1/input_layernorm/Cast_output_0" + input: "/model/layers.1/input_layernorm/Div_output_0" + output: "/model/layers.1/input_layernorm/Mul_output_0" + name: "/model/layers.1/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.1/input_layernorm/Mul_output_0" + output: "/model/layers.1/input_layernorm/Cast_1_output_0" + name: "/model/layers.1/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.1.input_layernorm.weight" + input: "/model/layers.1/input_layernorm/Cast_1_output_0" + output: "/model/layers.1/input_layernorm/Mul_1_output_0" + name: "/model/layers.1/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.1/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1776" + output: "/model/layers.1/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.1/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.1/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1777" + output: "/model/layers.1/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.1/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.1/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1778" + output: "/model/layers.1/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.1/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.1/self_attn/Constant_output_0" + name: "/model/layers.1/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_1_output_0" + name: "/model/layers.1/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_2_output_0" + name: "/model/layers.1/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.1/self_attn/Constant_output_0" + output: "/model/layers.1/self_attn/Reshape_output_0" + name: "/model/layers.1/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Reshape_output_0" + output: "/model/layers.1/self_attn/Transpose_output_0" + name: "/model/layers.1/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.1/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.1/self_attn/Constant_1_output_0" + output: "/model/layers.1/self_attn/Reshape_1_output_0" + name: "/model/layers.1/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Reshape_1_output_0" + output: "/model/layers.1/self_attn/Transpose_1_output_0" + name: "/model/layers.1/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.1/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.1/self_attn/Constant_2_output_0" + output: "/model/layers.1/self_attn/Reshape_2_output_0" + name: "/model/layers.1/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Reshape_2_output_0" + output: "value_states.3" + name: "/model/layers.1/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.1/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.1/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.1/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.1/self_attn/Gather_output_0" + name: "/model/layers.1/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.1/self_attn/Constant_3_output_0" + name: "/model/layers.1/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/Gather_output_0" + input: "/model/layers.1/self_attn/Constant_3_output_0" + output: "/model/layers.1/self_attn/Unsqueeze_output_0" + name: "/model/layers.1/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.1/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.1/self_attn/Gather_1_output_0" + name: "/model/layers.1/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.1/self_attn/Constant_4_output_0" + name: "/model/layers.1/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/Gather_1_output_0" + input: "/model/layers.1/self_attn/Constant_4_output_0" + output: "/model/layers.1/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.1/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.1/self_attn/Transpose_output_0" + input: "/model/layers.1/self_attn/Unsqueeze_output_0" + output: "/model/layers.1/self_attn/Mul_output_0" + name: "/model/layers.1/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.1/self_attn/Constant_5_output_0" + name: "/model/layers.1/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_6_output_0" + name: "/model/layers.1/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_7_output_0" + name: "/model/layers.1/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_8_output_0" + name: "/model/layers.1/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/Transpose_output_0" + input: "/model/layers.1/self_attn/Constant_6_output_0" + input: "/model/layers.1/self_attn/Constant_7_output_0" + input: "/model/layers.1/self_attn/Constant_5_output_0" + input: "/model/layers.1/self_attn/Constant_8_output_0" + output: "/model/layers.1/self_attn/Slice_output_0" + name: "/model/layers.1/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.1/self_attn/Constant_9_output_0" + name: "/model/layers.1/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_10_output_0" + name: "/model/layers.1/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_11_output_0" + name: "/model/layers.1/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_12_output_0" + name: "/model/layers.1/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/Transpose_output_0" + input: "/model/layers.1/self_attn/Constant_10_output_0" + input: "/model/layers.1/self_attn/Constant_11_output_0" + input: "/model/layers.1/self_attn/Constant_9_output_0" + input: "/model/layers.1/self_attn/Constant_12_output_0" + output: "/model/layers.1/self_attn/Slice_1_output_0" + name: "/model/layers.1/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.1/self_attn/Slice_1_output_0" + output: "/model/layers.1/self_attn/Neg_output_0" + name: "/model/layers.1/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.1/self_attn/Neg_output_0" + input: "/model/layers.1/self_attn/Slice_output_0" + output: "/model/layers.1/self_attn/Concat_output_0" + name: "/model/layers.1/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Concat_output_0" + input: "/model/layers.1/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.1/self_attn/Mul_1_output_0" + name: "/model/layers.1/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.1/self_attn/Mul_output_0" + input: "/model/layers.1/self_attn/Mul_1_output_0" + output: "/model/layers.1/self_attn/Add_output_0" + name: "/model/layers.1/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.1/self_attn/Transpose_1_output_0" + input: "/model/layers.1/self_attn/Unsqueeze_output_0" + output: "/model/layers.1/self_attn/Mul_2_output_0" + name: "/model/layers.1/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.1/self_attn/Constant_13_output_0" + name: "/model/layers.1/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_14_output_0" + name: "/model/layers.1/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_15_output_0" + name: "/model/layers.1/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_16_output_0" + name: "/model/layers.1/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/Transpose_1_output_0" + input: "/model/layers.1/self_attn/Constant_14_output_0" + input: "/model/layers.1/self_attn/Constant_15_output_0" + input: "/model/layers.1/self_attn/Constant_13_output_0" + input: "/model/layers.1/self_attn/Constant_16_output_0" + output: "/model/layers.1/self_attn/Slice_2_output_0" + name: "/model/layers.1/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.1/self_attn/Constant_17_output_0" + name: "/model/layers.1/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_18_output_0" + name: "/model/layers.1/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_19_output_0" + name: "/model/layers.1/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.1/self_attn/Constant_20_output_0" + name: "/model/layers.1/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/Transpose_1_output_0" + input: "/model/layers.1/self_attn/Constant_18_output_0" + input: "/model/layers.1/self_attn/Constant_19_output_0" + input: "/model/layers.1/self_attn/Constant_17_output_0" + input: "/model/layers.1/self_attn/Constant_20_output_0" + output: "/model/layers.1/self_attn/Slice_3_output_0" + name: "/model/layers.1/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.1/self_attn/Slice_3_output_0" + output: "/model/layers.1/self_attn/Neg_1_output_0" + name: "/model/layers.1/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.1/self_attn/Neg_1_output_0" + input: "/model/layers.1/self_attn/Slice_2_output_0" + output: "/model/layers.1/self_attn/Concat_1_output_0" + name: "/model/layers.1/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Concat_1_output_0" + input: "/model/layers.1/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.1/self_attn/Mul_3_output_0" + name: "/model/layers.1/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.1/self_attn/Mul_2_output_0" + input: "/model/layers.1/self_attn/Mul_3_output_0" + output: "key_states.11" + name: "/model/layers.1/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.11" + output: "/model/layers.1/self_attn/Transpose_3_output_0" + name: "/model/layers.1/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.1/self_attn/Add_output_0" + input: "/model/layers.1/self_attn/Transpose_3_output_0" + output: "/model/layers.1/self_attn/MatMul_output_0" + name: "/model/layers.1/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.1/self_attn/Constant_21_output_0" + name: "/model/layers.1/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/MatMul_output_0" + input: "/model/layers.1/self_attn/Constant_21_output_0" + output: "/model/layers.1/self_attn/Div_output_0" + name: "/model/layers.1/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.1/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.1/self_attn/Add_2_output_0" + name: "/model/layers.1/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.1/self_attn/Add_2_output_0" + output: "/model/layers.1/self_attn/Softmax_output_0" + name: "/model/layers.1/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Softmax_output_0" + output: "/model/layers.1/self_attn/Cast_output_0" + name: "/model/layers.1/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Cast_output_0" + output: "/model/layers.1/self_attn/Cast_1_output_0" + name: "/model/layers.1/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Cast_1_output_0" + input: "value_states.3" + output: "/model/layers.1/self_attn/MatMul_1_output_0" + name: "/model/layers.1/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.1/self_attn/MatMul_1_output_0" + output: "/model/layers.1/self_attn/Transpose_4_output_0" + name: "/model/layers.1/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.1/self_attn/Constant_22_output_0" + name: "/model/layers.1/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/self_attn/Transpose_4_output_0" + input: "/model/layers.1/self_attn/Constant_22_output_0" + output: "/model/layers.1/self_attn/Reshape_3_output_0" + name: "/model/layers.1/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.1/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_1814" + output: "/model/layers.1/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.1/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.1/input_layernorm/Cast_output_0" + input: "/model/layers.1/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.1/Add_output_0" + name: "/model/layers.1/Add" + op_type: "Add" + } + node { + input: "/model/layers.1/Add_output_0" + output: "/model/layers.1/post_attention_layernorm/Cast_output_0" + name: "/model/layers.1/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.1/post_attention_layernorm/Constant_output_0" + name: "/model/layers.1/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/post_attention_layernorm/Cast_output_0" + input: "/model/layers.1/post_attention_layernorm/Constant_output_0" + output: "/model/layers.1/post_attention_layernorm/Pow_output_0" + name: "/model/layers.1/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.1/post_attention_layernorm/Pow_output_0" + output: "/model/layers.1/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.1/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.1/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.1/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.1/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.1/post_attention_layernorm/Add_output_0" + name: "/model/layers.1/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.1/post_attention_layernorm/Add_output_0" + output: "/model/layers.1/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.1/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.1/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.1/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.1/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.1/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.1/post_attention_layernorm/Div_output_0" + name: "/model/layers.1/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.1/post_attention_layernorm/Cast_output_0" + input: "/model/layers.1/post_attention_layernorm/Div_output_0" + output: "/model/layers.1/post_attention_layernorm/Mul_output_0" + name: "/model/layers.1/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.1/post_attention_layernorm/Mul_output_0" + output: "/model/layers.1/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.1/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.1.post_attention_layernorm.weight" + input: "/model/layers.1/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.1/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.1/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.1/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1815" + output: "/model/layers.1/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.1/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.1/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.1/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.1/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.1/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.1/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.1/mlp/act_fn/Mul_output_0" + name: "/model/layers.1/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.1/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1816" + output: "/model/layers.1/mlp/up_proj/MatMul_output_0" + name: "/model/layers.1/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.1/mlp/act_fn/Mul_output_0" + input: "/model/layers.1/mlp/up_proj/MatMul_output_0" + output: "/model/layers.1/mlp/Mul_output_0" + name: "/model/layers.1/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.1/mlp/Mul_output_0" + input: "onnx::MatMul_1817" + output: "/model/layers.1/mlp/down_proj/MatMul_output_0" + name: "/model/layers.1/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.1/post_attention_layernorm/Cast_output_0" + input: "/model/layers.1/mlp/down_proj/MatMul_output_0" + output: "/model/layers.1/Add_1_output_0" + name: "/model/layers.1/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.1/Add_1_output_0" + output: "/model/layers.2/input_layernorm/Cast_output_0" + name: "/model/layers.2/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.2/input_layernorm/Constant_output_0" + name: "/model/layers.2/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/input_layernorm/Cast_output_0" + input: "/model/layers.2/input_layernorm/Constant_output_0" + output: "/model/layers.2/input_layernorm/Pow_output_0" + name: "/model/layers.2/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.2/input_layernorm/Pow_output_0" + output: "/model/layers.2/input_layernorm/ReduceMean_output_0" + name: "/model/layers.2/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.2/input_layernorm/Constant_1_output_0" + name: "/model/layers.2/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/input_layernorm/ReduceMean_output_0" + input: "/model/layers.2/input_layernorm/Constant_1_output_0" + output: "/model/layers.2/input_layernorm/Add_output_0" + name: "/model/layers.2/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.2/input_layernorm/Add_output_0" + output: "/model/layers.2/input_layernorm/Sqrt_output_0" + name: "/model/layers.2/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.2/input_layernorm/Constant_2_output_0" + name: "/model/layers.2/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/input_layernorm/Constant_2_output_0" + input: "/model/layers.2/input_layernorm/Sqrt_output_0" + output: "/model/layers.2/input_layernorm/Div_output_0" + name: "/model/layers.2/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.2/input_layernorm/Cast_output_0" + input: "/model/layers.2/input_layernorm/Div_output_0" + output: "/model/layers.2/input_layernorm/Mul_output_0" + name: "/model/layers.2/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.2/input_layernorm/Mul_output_0" + output: "/model/layers.2/input_layernorm/Cast_1_output_0" + name: "/model/layers.2/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.2.input_layernorm.weight" + input: "/model/layers.2/input_layernorm/Cast_1_output_0" + output: "/model/layers.2/input_layernorm/Mul_1_output_0" + name: "/model/layers.2/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.2/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1818" + output: "/model/layers.2/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.2/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.2/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1819" + output: "/model/layers.2/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.2/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.2/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1820" + output: "/model/layers.2/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.2/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.2/self_attn/Constant_output_0" + name: "/model/layers.2/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_1_output_0" + name: "/model/layers.2/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_2_output_0" + name: "/model/layers.2/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.2/self_attn/Constant_output_0" + output: "/model/layers.2/self_attn/Reshape_output_0" + name: "/model/layers.2/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Reshape_output_0" + output: "/model/layers.2/self_attn/Transpose_output_0" + name: "/model/layers.2/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.2/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.2/self_attn/Constant_1_output_0" + output: "/model/layers.2/self_attn/Reshape_1_output_0" + name: "/model/layers.2/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Reshape_1_output_0" + output: "/model/layers.2/self_attn/Transpose_1_output_0" + name: "/model/layers.2/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.2/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.2/self_attn/Constant_2_output_0" + output: "/model/layers.2/self_attn/Reshape_2_output_0" + name: "/model/layers.2/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Reshape_2_output_0" + output: "value_states.7" + name: "/model/layers.2/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.2/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.2/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.2/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.2/self_attn/Gather_output_0" + name: "/model/layers.2/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.2/self_attn/Constant_3_output_0" + name: "/model/layers.2/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/Gather_output_0" + input: "/model/layers.2/self_attn/Constant_3_output_0" + output: "/model/layers.2/self_attn/Unsqueeze_output_0" + name: "/model/layers.2/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.2/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.2/self_attn/Gather_1_output_0" + name: "/model/layers.2/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.2/self_attn/Constant_4_output_0" + name: "/model/layers.2/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/Gather_1_output_0" + input: "/model/layers.2/self_attn/Constant_4_output_0" + output: "/model/layers.2/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.2/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.2/self_attn/Transpose_output_0" + input: "/model/layers.2/self_attn/Unsqueeze_output_0" + output: "/model/layers.2/self_attn/Mul_output_0" + name: "/model/layers.2/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.2/self_attn/Constant_5_output_0" + name: "/model/layers.2/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_6_output_0" + name: "/model/layers.2/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_7_output_0" + name: "/model/layers.2/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_8_output_0" + name: "/model/layers.2/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/Transpose_output_0" + input: "/model/layers.2/self_attn/Constant_6_output_0" + input: "/model/layers.2/self_attn/Constant_7_output_0" + input: "/model/layers.2/self_attn/Constant_5_output_0" + input: "/model/layers.2/self_attn/Constant_8_output_0" + output: "/model/layers.2/self_attn/Slice_output_0" + name: "/model/layers.2/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.2/self_attn/Constant_9_output_0" + name: "/model/layers.2/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_10_output_0" + name: "/model/layers.2/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_11_output_0" + name: "/model/layers.2/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_12_output_0" + name: "/model/layers.2/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/Transpose_output_0" + input: "/model/layers.2/self_attn/Constant_10_output_0" + input: "/model/layers.2/self_attn/Constant_11_output_0" + input: "/model/layers.2/self_attn/Constant_9_output_0" + input: "/model/layers.2/self_attn/Constant_12_output_0" + output: "/model/layers.2/self_attn/Slice_1_output_0" + name: "/model/layers.2/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.2/self_attn/Slice_1_output_0" + output: "/model/layers.2/self_attn/Neg_output_0" + name: "/model/layers.2/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.2/self_attn/Neg_output_0" + input: "/model/layers.2/self_attn/Slice_output_0" + output: "/model/layers.2/self_attn/Concat_output_0" + name: "/model/layers.2/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Concat_output_0" + input: "/model/layers.2/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.2/self_attn/Mul_1_output_0" + name: "/model/layers.2/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.2/self_attn/Mul_output_0" + input: "/model/layers.2/self_attn/Mul_1_output_0" + output: "/model/layers.2/self_attn/Add_output_0" + name: "/model/layers.2/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.2/self_attn/Transpose_1_output_0" + input: "/model/layers.2/self_attn/Unsqueeze_output_0" + output: "/model/layers.2/self_attn/Mul_2_output_0" + name: "/model/layers.2/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.2/self_attn/Constant_13_output_0" + name: "/model/layers.2/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_14_output_0" + name: "/model/layers.2/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_15_output_0" + name: "/model/layers.2/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_16_output_0" + name: "/model/layers.2/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/Transpose_1_output_0" + input: "/model/layers.2/self_attn/Constant_14_output_0" + input: "/model/layers.2/self_attn/Constant_15_output_0" + input: "/model/layers.2/self_attn/Constant_13_output_0" + input: "/model/layers.2/self_attn/Constant_16_output_0" + output: "/model/layers.2/self_attn/Slice_2_output_0" + name: "/model/layers.2/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.2/self_attn/Constant_17_output_0" + name: "/model/layers.2/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_18_output_0" + name: "/model/layers.2/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_19_output_0" + name: "/model/layers.2/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.2/self_attn/Constant_20_output_0" + name: "/model/layers.2/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/Transpose_1_output_0" + input: "/model/layers.2/self_attn/Constant_18_output_0" + input: "/model/layers.2/self_attn/Constant_19_output_0" + input: "/model/layers.2/self_attn/Constant_17_output_0" + input: "/model/layers.2/self_attn/Constant_20_output_0" + output: "/model/layers.2/self_attn/Slice_3_output_0" + name: "/model/layers.2/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.2/self_attn/Slice_3_output_0" + output: "/model/layers.2/self_attn/Neg_1_output_0" + name: "/model/layers.2/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.2/self_attn/Neg_1_output_0" + input: "/model/layers.2/self_attn/Slice_2_output_0" + output: "/model/layers.2/self_attn/Concat_1_output_0" + name: "/model/layers.2/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Concat_1_output_0" + input: "/model/layers.2/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.2/self_attn/Mul_3_output_0" + name: "/model/layers.2/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.2/self_attn/Mul_2_output_0" + input: "/model/layers.2/self_attn/Mul_3_output_0" + output: "key_states.19" + name: "/model/layers.2/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.19" + output: "/model/layers.2/self_attn/Transpose_3_output_0" + name: "/model/layers.2/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.2/self_attn/Add_output_0" + input: "/model/layers.2/self_attn/Transpose_3_output_0" + output: "/model/layers.2/self_attn/MatMul_output_0" + name: "/model/layers.2/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.2/self_attn/Constant_21_output_0" + name: "/model/layers.2/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/MatMul_output_0" + input: "/model/layers.2/self_attn/Constant_21_output_0" + output: "/model/layers.2/self_attn/Div_output_0" + name: "/model/layers.2/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.2/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.2/self_attn/Add_2_output_0" + name: "/model/layers.2/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.2/self_attn/Add_2_output_0" + output: "/model/layers.2/self_attn/Softmax_output_0" + name: "/model/layers.2/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Softmax_output_0" + output: "/model/layers.2/self_attn/Cast_output_0" + name: "/model/layers.2/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Cast_output_0" + output: "/model/layers.2/self_attn/Cast_1_output_0" + name: "/model/layers.2/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Cast_1_output_0" + input: "value_states.7" + output: "/model/layers.2/self_attn/MatMul_1_output_0" + name: "/model/layers.2/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.2/self_attn/MatMul_1_output_0" + output: "/model/layers.2/self_attn/Transpose_4_output_0" + name: "/model/layers.2/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.2/self_attn/Constant_22_output_0" + name: "/model/layers.2/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/self_attn/Transpose_4_output_0" + input: "/model/layers.2/self_attn/Constant_22_output_0" + output: "/model/layers.2/self_attn/Reshape_3_output_0" + name: "/model/layers.2/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.2/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_1856" + output: "/model/layers.2/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.2/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.2/input_layernorm/Cast_output_0" + input: "/model/layers.2/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.2/Add_output_0" + name: "/model/layers.2/Add" + op_type: "Add" + } + node { + input: "/model/layers.2/Add_output_0" + output: "/model/layers.2/post_attention_layernorm/Cast_output_0" + name: "/model/layers.2/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.2/post_attention_layernorm/Constant_output_0" + name: "/model/layers.2/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/post_attention_layernorm/Cast_output_0" + input: "/model/layers.2/post_attention_layernorm/Constant_output_0" + output: "/model/layers.2/post_attention_layernorm/Pow_output_0" + name: "/model/layers.2/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.2/post_attention_layernorm/Pow_output_0" + output: "/model/layers.2/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.2/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.2/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.2/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.2/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.2/post_attention_layernorm/Add_output_0" + name: "/model/layers.2/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.2/post_attention_layernorm/Add_output_0" + output: "/model/layers.2/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.2/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.2/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.2/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.2/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.2/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.2/post_attention_layernorm/Div_output_0" + name: "/model/layers.2/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.2/post_attention_layernorm/Cast_output_0" + input: "/model/layers.2/post_attention_layernorm/Div_output_0" + output: "/model/layers.2/post_attention_layernorm/Mul_output_0" + name: "/model/layers.2/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.2/post_attention_layernorm/Mul_output_0" + output: "/model/layers.2/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.2/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.2.post_attention_layernorm.weight" + input: "/model/layers.2/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.2/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.2/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.2/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1857" + output: "/model/layers.2/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.2/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.2/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.2/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.2/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.2/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.2/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.2/mlp/act_fn/Mul_output_0" + name: "/model/layers.2/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.2/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1858" + output: "/model/layers.2/mlp/up_proj/MatMul_output_0" + name: "/model/layers.2/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.2/mlp/act_fn/Mul_output_0" + input: "/model/layers.2/mlp/up_proj/MatMul_output_0" + output: "/model/layers.2/mlp/Mul_output_0" + name: "/model/layers.2/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.2/mlp/Mul_output_0" + input: "onnx::MatMul_1859" + output: "/model/layers.2/mlp/down_proj/MatMul_output_0" + name: "/model/layers.2/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.2/post_attention_layernorm/Cast_output_0" + input: "/model/layers.2/mlp/down_proj/MatMul_output_0" + output: "/model/layers.2/Add_1_output_0" + name: "/model/layers.2/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.2/Add_1_output_0" + output: "/model/layers.3/input_layernorm/Cast_output_0" + name: "/model/layers.3/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.3/input_layernorm/Constant_output_0" + name: "/model/layers.3/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/input_layernorm/Cast_output_0" + input: "/model/layers.3/input_layernorm/Constant_output_0" + output: "/model/layers.3/input_layernorm/Pow_output_0" + name: "/model/layers.3/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.3/input_layernorm/Pow_output_0" + output: "/model/layers.3/input_layernorm/ReduceMean_output_0" + name: "/model/layers.3/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.3/input_layernorm/Constant_1_output_0" + name: "/model/layers.3/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/input_layernorm/ReduceMean_output_0" + input: "/model/layers.3/input_layernorm/Constant_1_output_0" + output: "/model/layers.3/input_layernorm/Add_output_0" + name: "/model/layers.3/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.3/input_layernorm/Add_output_0" + output: "/model/layers.3/input_layernorm/Sqrt_output_0" + name: "/model/layers.3/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.3/input_layernorm/Constant_2_output_0" + name: "/model/layers.3/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/input_layernorm/Constant_2_output_0" + input: "/model/layers.3/input_layernorm/Sqrt_output_0" + output: "/model/layers.3/input_layernorm/Div_output_0" + name: "/model/layers.3/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.3/input_layernorm/Cast_output_0" + input: "/model/layers.3/input_layernorm/Div_output_0" + output: "/model/layers.3/input_layernorm/Mul_output_0" + name: "/model/layers.3/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.3/input_layernorm/Mul_output_0" + output: "/model/layers.3/input_layernorm/Cast_1_output_0" + name: "/model/layers.3/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.3.input_layernorm.weight" + input: "/model/layers.3/input_layernorm/Cast_1_output_0" + output: "/model/layers.3/input_layernorm/Mul_1_output_0" + name: "/model/layers.3/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.3/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1860" + output: "/model/layers.3/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.3/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.3/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1861" + output: "/model/layers.3/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.3/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.3/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1862" + output: "/model/layers.3/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.3/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.3/self_attn/Constant_output_0" + name: "/model/layers.3/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_1_output_0" + name: "/model/layers.3/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_2_output_0" + name: "/model/layers.3/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.3/self_attn/Constant_output_0" + output: "/model/layers.3/self_attn/Reshape_output_0" + name: "/model/layers.3/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Reshape_output_0" + output: "/model/layers.3/self_attn/Transpose_output_0" + name: "/model/layers.3/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.3/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.3/self_attn/Constant_1_output_0" + output: "/model/layers.3/self_attn/Reshape_1_output_0" + name: "/model/layers.3/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Reshape_1_output_0" + output: "/model/layers.3/self_attn/Transpose_1_output_0" + name: "/model/layers.3/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.3/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.3/self_attn/Constant_2_output_0" + output: "/model/layers.3/self_attn/Reshape_2_output_0" + name: "/model/layers.3/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Reshape_2_output_0" + output: "value_states.11" + name: "/model/layers.3/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.3/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.3/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.3/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.3/self_attn/Gather_output_0" + name: "/model/layers.3/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.3/self_attn/Constant_3_output_0" + name: "/model/layers.3/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/Gather_output_0" + input: "/model/layers.3/self_attn/Constant_3_output_0" + output: "/model/layers.3/self_attn/Unsqueeze_output_0" + name: "/model/layers.3/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.3/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.3/self_attn/Gather_1_output_0" + name: "/model/layers.3/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.3/self_attn/Constant_4_output_0" + name: "/model/layers.3/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/Gather_1_output_0" + input: "/model/layers.3/self_attn/Constant_4_output_0" + output: "/model/layers.3/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.3/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.3/self_attn/Transpose_output_0" + input: "/model/layers.3/self_attn/Unsqueeze_output_0" + output: "/model/layers.3/self_attn/Mul_output_0" + name: "/model/layers.3/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.3/self_attn/Constant_5_output_0" + name: "/model/layers.3/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_6_output_0" + name: "/model/layers.3/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_7_output_0" + name: "/model/layers.3/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_8_output_0" + name: "/model/layers.3/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/Transpose_output_0" + input: "/model/layers.3/self_attn/Constant_6_output_0" + input: "/model/layers.3/self_attn/Constant_7_output_0" + input: "/model/layers.3/self_attn/Constant_5_output_0" + input: "/model/layers.3/self_attn/Constant_8_output_0" + output: "/model/layers.3/self_attn/Slice_output_0" + name: "/model/layers.3/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.3/self_attn/Constant_9_output_0" + name: "/model/layers.3/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_10_output_0" + name: "/model/layers.3/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_11_output_0" + name: "/model/layers.3/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_12_output_0" + name: "/model/layers.3/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/Transpose_output_0" + input: "/model/layers.3/self_attn/Constant_10_output_0" + input: "/model/layers.3/self_attn/Constant_11_output_0" + input: "/model/layers.3/self_attn/Constant_9_output_0" + input: "/model/layers.3/self_attn/Constant_12_output_0" + output: "/model/layers.3/self_attn/Slice_1_output_0" + name: "/model/layers.3/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.3/self_attn/Slice_1_output_0" + output: "/model/layers.3/self_attn/Neg_output_0" + name: "/model/layers.3/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.3/self_attn/Neg_output_0" + input: "/model/layers.3/self_attn/Slice_output_0" + output: "/model/layers.3/self_attn/Concat_output_0" + name: "/model/layers.3/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Concat_output_0" + input: "/model/layers.3/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.3/self_attn/Mul_1_output_0" + name: "/model/layers.3/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.3/self_attn/Mul_output_0" + input: "/model/layers.3/self_attn/Mul_1_output_0" + output: "/model/layers.3/self_attn/Add_output_0" + name: "/model/layers.3/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.3/self_attn/Transpose_1_output_0" + input: "/model/layers.3/self_attn/Unsqueeze_output_0" + output: "/model/layers.3/self_attn/Mul_2_output_0" + name: "/model/layers.3/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.3/self_attn/Constant_13_output_0" + name: "/model/layers.3/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_14_output_0" + name: "/model/layers.3/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_15_output_0" + name: "/model/layers.3/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_16_output_0" + name: "/model/layers.3/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/Transpose_1_output_0" + input: "/model/layers.3/self_attn/Constant_14_output_0" + input: "/model/layers.3/self_attn/Constant_15_output_0" + input: "/model/layers.3/self_attn/Constant_13_output_0" + input: "/model/layers.3/self_attn/Constant_16_output_0" + output: "/model/layers.3/self_attn/Slice_2_output_0" + name: "/model/layers.3/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.3/self_attn/Constant_17_output_0" + name: "/model/layers.3/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_18_output_0" + name: "/model/layers.3/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_19_output_0" + name: "/model/layers.3/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.3/self_attn/Constant_20_output_0" + name: "/model/layers.3/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/Transpose_1_output_0" + input: "/model/layers.3/self_attn/Constant_18_output_0" + input: "/model/layers.3/self_attn/Constant_19_output_0" + input: "/model/layers.3/self_attn/Constant_17_output_0" + input: "/model/layers.3/self_attn/Constant_20_output_0" + output: "/model/layers.3/self_attn/Slice_3_output_0" + name: "/model/layers.3/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.3/self_attn/Slice_3_output_0" + output: "/model/layers.3/self_attn/Neg_1_output_0" + name: "/model/layers.3/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.3/self_attn/Neg_1_output_0" + input: "/model/layers.3/self_attn/Slice_2_output_0" + output: "/model/layers.3/self_attn/Concat_1_output_0" + name: "/model/layers.3/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Concat_1_output_0" + input: "/model/layers.3/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.3/self_attn/Mul_3_output_0" + name: "/model/layers.3/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.3/self_attn/Mul_2_output_0" + input: "/model/layers.3/self_attn/Mul_3_output_0" + output: "key_states.27" + name: "/model/layers.3/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.27" + output: "/model/layers.3/self_attn/Transpose_3_output_0" + name: "/model/layers.3/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.3/self_attn/Add_output_0" + input: "/model/layers.3/self_attn/Transpose_3_output_0" + output: "/model/layers.3/self_attn/MatMul_output_0" + name: "/model/layers.3/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.3/self_attn/Constant_21_output_0" + name: "/model/layers.3/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/MatMul_output_0" + input: "/model/layers.3/self_attn/Constant_21_output_0" + output: "/model/layers.3/self_attn/Div_output_0" + name: "/model/layers.3/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.3/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.3/self_attn/Add_2_output_0" + name: "/model/layers.3/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.3/self_attn/Add_2_output_0" + output: "/model/layers.3/self_attn/Softmax_output_0" + name: "/model/layers.3/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Softmax_output_0" + output: "/model/layers.3/self_attn/Cast_output_0" + name: "/model/layers.3/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Cast_output_0" + output: "/model/layers.3/self_attn/Cast_1_output_0" + name: "/model/layers.3/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Cast_1_output_0" + input: "value_states.11" + output: "/model/layers.3/self_attn/MatMul_1_output_0" + name: "/model/layers.3/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.3/self_attn/MatMul_1_output_0" + output: "/model/layers.3/self_attn/Transpose_4_output_0" + name: "/model/layers.3/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.3/self_attn/Constant_22_output_0" + name: "/model/layers.3/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/self_attn/Transpose_4_output_0" + input: "/model/layers.3/self_attn/Constant_22_output_0" + output: "/model/layers.3/self_attn/Reshape_3_output_0" + name: "/model/layers.3/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.3/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_1898" + output: "/model/layers.3/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.3/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.3/input_layernorm/Cast_output_0" + input: "/model/layers.3/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.3/Add_output_0" + name: "/model/layers.3/Add" + op_type: "Add" + } + node { + input: "/model/layers.3/Add_output_0" + output: "/model/layers.3/post_attention_layernorm/Cast_output_0" + name: "/model/layers.3/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.3/post_attention_layernorm/Constant_output_0" + name: "/model/layers.3/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/post_attention_layernorm/Cast_output_0" + input: "/model/layers.3/post_attention_layernorm/Constant_output_0" + output: "/model/layers.3/post_attention_layernorm/Pow_output_0" + name: "/model/layers.3/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.3/post_attention_layernorm/Pow_output_0" + output: "/model/layers.3/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.3/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.3/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.3/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.3/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.3/post_attention_layernorm/Add_output_0" + name: "/model/layers.3/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.3/post_attention_layernorm/Add_output_0" + output: "/model/layers.3/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.3/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.3/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.3/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.3/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.3/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.3/post_attention_layernorm/Div_output_0" + name: "/model/layers.3/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.3/post_attention_layernorm/Cast_output_0" + input: "/model/layers.3/post_attention_layernorm/Div_output_0" + output: "/model/layers.3/post_attention_layernorm/Mul_output_0" + name: "/model/layers.3/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.3/post_attention_layernorm/Mul_output_0" + output: "/model/layers.3/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.3/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.3.post_attention_layernorm.weight" + input: "/model/layers.3/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.3/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.3/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.3/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1899" + output: "/model/layers.3/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.3/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.3/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.3/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.3/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.3/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.3/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.3/mlp/act_fn/Mul_output_0" + name: "/model/layers.3/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.3/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1900" + output: "/model/layers.3/mlp/up_proj/MatMul_output_0" + name: "/model/layers.3/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.3/mlp/act_fn/Mul_output_0" + input: "/model/layers.3/mlp/up_proj/MatMul_output_0" + output: "/model/layers.3/mlp/Mul_output_0" + name: "/model/layers.3/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.3/mlp/Mul_output_0" + input: "onnx::MatMul_1901" + output: "/model/layers.3/mlp/down_proj/MatMul_output_0" + name: "/model/layers.3/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.3/post_attention_layernorm/Cast_output_0" + input: "/model/layers.3/mlp/down_proj/MatMul_output_0" + output: "/model/layers.3/Add_1_output_0" + name: "/model/layers.3/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.3/Add_1_output_0" + output: "/model/layers.4/input_layernorm/Cast_output_0" + name: "/model/layers.4/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.4/input_layernorm/Constant_output_0" + name: "/model/layers.4/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/input_layernorm/Cast_output_0" + input: "/model/layers.4/input_layernorm/Constant_output_0" + output: "/model/layers.4/input_layernorm/Pow_output_0" + name: "/model/layers.4/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.4/input_layernorm/Pow_output_0" + output: "/model/layers.4/input_layernorm/ReduceMean_output_0" + name: "/model/layers.4/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.4/input_layernorm/Constant_1_output_0" + name: "/model/layers.4/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/input_layernorm/ReduceMean_output_0" + input: "/model/layers.4/input_layernorm/Constant_1_output_0" + output: "/model/layers.4/input_layernorm/Add_output_0" + name: "/model/layers.4/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.4/input_layernorm/Add_output_0" + output: "/model/layers.4/input_layernorm/Sqrt_output_0" + name: "/model/layers.4/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.4/input_layernorm/Constant_2_output_0" + name: "/model/layers.4/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/input_layernorm/Constant_2_output_0" + input: "/model/layers.4/input_layernorm/Sqrt_output_0" + output: "/model/layers.4/input_layernorm/Div_output_0" + name: "/model/layers.4/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.4/input_layernorm/Cast_output_0" + input: "/model/layers.4/input_layernorm/Div_output_0" + output: "/model/layers.4/input_layernorm/Mul_output_0" + name: "/model/layers.4/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.4/input_layernorm/Mul_output_0" + output: "/model/layers.4/input_layernorm/Cast_1_output_0" + name: "/model/layers.4/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.4.input_layernorm.weight" + input: "/model/layers.4/input_layernorm/Cast_1_output_0" + output: "/model/layers.4/input_layernorm/Mul_1_output_0" + name: "/model/layers.4/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.4/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1902" + output: "/model/layers.4/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.4/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.4/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1903" + output: "/model/layers.4/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.4/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.4/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1904" + output: "/model/layers.4/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.4/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.4/self_attn/Constant_output_0" + name: "/model/layers.4/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_1_output_0" + name: "/model/layers.4/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_2_output_0" + name: "/model/layers.4/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.4/self_attn/Constant_output_0" + output: "/model/layers.4/self_attn/Reshape_output_0" + name: "/model/layers.4/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Reshape_output_0" + output: "/model/layers.4/self_attn/Transpose_output_0" + name: "/model/layers.4/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.4/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.4/self_attn/Constant_1_output_0" + output: "/model/layers.4/self_attn/Reshape_1_output_0" + name: "/model/layers.4/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Reshape_1_output_0" + output: "/model/layers.4/self_attn/Transpose_1_output_0" + name: "/model/layers.4/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.4/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.4/self_attn/Constant_2_output_0" + output: "/model/layers.4/self_attn/Reshape_2_output_0" + name: "/model/layers.4/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Reshape_2_output_0" + output: "value_states.15" + name: "/model/layers.4/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.4/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.4/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.4/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.4/self_attn/Gather_output_0" + name: "/model/layers.4/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.4/self_attn/Constant_3_output_0" + name: "/model/layers.4/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/Gather_output_0" + input: "/model/layers.4/self_attn/Constant_3_output_0" + output: "/model/layers.4/self_attn/Unsqueeze_output_0" + name: "/model/layers.4/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.4/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.4/self_attn/Gather_1_output_0" + name: "/model/layers.4/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.4/self_attn/Constant_4_output_0" + name: "/model/layers.4/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/Gather_1_output_0" + input: "/model/layers.4/self_attn/Constant_4_output_0" + output: "/model/layers.4/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.4/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.4/self_attn/Transpose_output_0" + input: "/model/layers.4/self_attn/Unsqueeze_output_0" + output: "/model/layers.4/self_attn/Mul_output_0" + name: "/model/layers.4/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.4/self_attn/Constant_5_output_0" + name: "/model/layers.4/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_6_output_0" + name: "/model/layers.4/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_7_output_0" + name: "/model/layers.4/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_8_output_0" + name: "/model/layers.4/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/Transpose_output_0" + input: "/model/layers.4/self_attn/Constant_6_output_0" + input: "/model/layers.4/self_attn/Constant_7_output_0" + input: "/model/layers.4/self_attn/Constant_5_output_0" + input: "/model/layers.4/self_attn/Constant_8_output_0" + output: "/model/layers.4/self_attn/Slice_output_0" + name: "/model/layers.4/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.4/self_attn/Constant_9_output_0" + name: "/model/layers.4/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_10_output_0" + name: "/model/layers.4/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_11_output_0" + name: "/model/layers.4/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_12_output_0" + name: "/model/layers.4/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/Transpose_output_0" + input: "/model/layers.4/self_attn/Constant_10_output_0" + input: "/model/layers.4/self_attn/Constant_11_output_0" + input: "/model/layers.4/self_attn/Constant_9_output_0" + input: "/model/layers.4/self_attn/Constant_12_output_0" + output: "/model/layers.4/self_attn/Slice_1_output_0" + name: "/model/layers.4/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.4/self_attn/Slice_1_output_0" + output: "/model/layers.4/self_attn/Neg_output_0" + name: "/model/layers.4/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.4/self_attn/Neg_output_0" + input: "/model/layers.4/self_attn/Slice_output_0" + output: "/model/layers.4/self_attn/Concat_output_0" + name: "/model/layers.4/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Concat_output_0" + input: "/model/layers.4/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.4/self_attn/Mul_1_output_0" + name: "/model/layers.4/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.4/self_attn/Mul_output_0" + input: "/model/layers.4/self_attn/Mul_1_output_0" + output: "/model/layers.4/self_attn/Add_output_0" + name: "/model/layers.4/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.4/self_attn/Transpose_1_output_0" + input: "/model/layers.4/self_attn/Unsqueeze_output_0" + output: "/model/layers.4/self_attn/Mul_2_output_0" + name: "/model/layers.4/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.4/self_attn/Constant_13_output_0" + name: "/model/layers.4/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_14_output_0" + name: "/model/layers.4/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_15_output_0" + name: "/model/layers.4/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_16_output_0" + name: "/model/layers.4/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/Transpose_1_output_0" + input: "/model/layers.4/self_attn/Constant_14_output_0" + input: "/model/layers.4/self_attn/Constant_15_output_0" + input: "/model/layers.4/self_attn/Constant_13_output_0" + input: "/model/layers.4/self_attn/Constant_16_output_0" + output: "/model/layers.4/self_attn/Slice_2_output_0" + name: "/model/layers.4/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.4/self_attn/Constant_17_output_0" + name: "/model/layers.4/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_18_output_0" + name: "/model/layers.4/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_19_output_0" + name: "/model/layers.4/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.4/self_attn/Constant_20_output_0" + name: "/model/layers.4/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/Transpose_1_output_0" + input: "/model/layers.4/self_attn/Constant_18_output_0" + input: "/model/layers.4/self_attn/Constant_19_output_0" + input: "/model/layers.4/self_attn/Constant_17_output_0" + input: "/model/layers.4/self_attn/Constant_20_output_0" + output: "/model/layers.4/self_attn/Slice_3_output_0" + name: "/model/layers.4/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.4/self_attn/Slice_3_output_0" + output: "/model/layers.4/self_attn/Neg_1_output_0" + name: "/model/layers.4/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.4/self_attn/Neg_1_output_0" + input: "/model/layers.4/self_attn/Slice_2_output_0" + output: "/model/layers.4/self_attn/Concat_1_output_0" + name: "/model/layers.4/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Concat_1_output_0" + input: "/model/layers.4/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.4/self_attn/Mul_3_output_0" + name: "/model/layers.4/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.4/self_attn/Mul_2_output_0" + input: "/model/layers.4/self_attn/Mul_3_output_0" + output: "key_states.35" + name: "/model/layers.4/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.35" + output: "/model/layers.4/self_attn/Transpose_3_output_0" + name: "/model/layers.4/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.4/self_attn/Add_output_0" + input: "/model/layers.4/self_attn/Transpose_3_output_0" + output: "/model/layers.4/self_attn/MatMul_output_0" + name: "/model/layers.4/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.4/self_attn/Constant_21_output_0" + name: "/model/layers.4/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/MatMul_output_0" + input: "/model/layers.4/self_attn/Constant_21_output_0" + output: "/model/layers.4/self_attn/Div_output_0" + name: "/model/layers.4/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.4/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.4/self_attn/Add_2_output_0" + name: "/model/layers.4/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.4/self_attn/Add_2_output_0" + output: "/model/layers.4/self_attn/Softmax_output_0" + name: "/model/layers.4/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Softmax_output_0" + output: "/model/layers.4/self_attn/Cast_output_0" + name: "/model/layers.4/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Cast_output_0" + output: "/model/layers.4/self_attn/Cast_1_output_0" + name: "/model/layers.4/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Cast_1_output_0" + input: "value_states.15" + output: "/model/layers.4/self_attn/MatMul_1_output_0" + name: "/model/layers.4/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.4/self_attn/MatMul_1_output_0" + output: "/model/layers.4/self_attn/Transpose_4_output_0" + name: "/model/layers.4/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.4/self_attn/Constant_22_output_0" + name: "/model/layers.4/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/self_attn/Transpose_4_output_0" + input: "/model/layers.4/self_attn/Constant_22_output_0" + output: "/model/layers.4/self_attn/Reshape_3_output_0" + name: "/model/layers.4/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.4/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_1940" + output: "/model/layers.4/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.4/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.4/input_layernorm/Cast_output_0" + input: "/model/layers.4/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.4/Add_output_0" + name: "/model/layers.4/Add" + op_type: "Add" + } + node { + input: "/model/layers.4/Add_output_0" + output: "/model/layers.4/post_attention_layernorm/Cast_output_0" + name: "/model/layers.4/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.4/post_attention_layernorm/Constant_output_0" + name: "/model/layers.4/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/post_attention_layernorm/Cast_output_0" + input: "/model/layers.4/post_attention_layernorm/Constant_output_0" + output: "/model/layers.4/post_attention_layernorm/Pow_output_0" + name: "/model/layers.4/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.4/post_attention_layernorm/Pow_output_0" + output: "/model/layers.4/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.4/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.4/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.4/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.4/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.4/post_attention_layernorm/Add_output_0" + name: "/model/layers.4/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.4/post_attention_layernorm/Add_output_0" + output: "/model/layers.4/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.4/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.4/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.4/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.4/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.4/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.4/post_attention_layernorm/Div_output_0" + name: "/model/layers.4/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.4/post_attention_layernorm/Cast_output_0" + input: "/model/layers.4/post_attention_layernorm/Div_output_0" + output: "/model/layers.4/post_attention_layernorm/Mul_output_0" + name: "/model/layers.4/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.4/post_attention_layernorm/Mul_output_0" + output: "/model/layers.4/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.4/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.4.post_attention_layernorm.weight" + input: "/model/layers.4/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.4/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.4/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.4/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1941" + output: "/model/layers.4/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.4/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.4/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.4/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.4/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.4/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.4/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.4/mlp/act_fn/Mul_output_0" + name: "/model/layers.4/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.4/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1942" + output: "/model/layers.4/mlp/up_proj/MatMul_output_0" + name: "/model/layers.4/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.4/mlp/act_fn/Mul_output_0" + input: "/model/layers.4/mlp/up_proj/MatMul_output_0" + output: "/model/layers.4/mlp/Mul_output_0" + name: "/model/layers.4/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.4/mlp/Mul_output_0" + input: "onnx::MatMul_1943" + output: "/model/layers.4/mlp/down_proj/MatMul_output_0" + name: "/model/layers.4/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.4/post_attention_layernorm/Cast_output_0" + input: "/model/layers.4/mlp/down_proj/MatMul_output_0" + output: "/model/layers.4/Add_1_output_0" + name: "/model/layers.4/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.4/Add_1_output_0" + output: "/model/layers.5/input_layernorm/Cast_output_0" + name: "/model/layers.5/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.5/input_layernorm/Constant_output_0" + name: "/model/layers.5/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/input_layernorm/Cast_output_0" + input: "/model/layers.5/input_layernorm/Constant_output_0" + output: "/model/layers.5/input_layernorm/Pow_output_0" + name: "/model/layers.5/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.5/input_layernorm/Pow_output_0" + output: "/model/layers.5/input_layernorm/ReduceMean_output_0" + name: "/model/layers.5/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.5/input_layernorm/Constant_1_output_0" + name: "/model/layers.5/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/input_layernorm/ReduceMean_output_0" + input: "/model/layers.5/input_layernorm/Constant_1_output_0" + output: "/model/layers.5/input_layernorm/Add_output_0" + name: "/model/layers.5/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.5/input_layernorm/Add_output_0" + output: "/model/layers.5/input_layernorm/Sqrt_output_0" + name: "/model/layers.5/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.5/input_layernorm/Constant_2_output_0" + name: "/model/layers.5/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/input_layernorm/Constant_2_output_0" + input: "/model/layers.5/input_layernorm/Sqrt_output_0" + output: "/model/layers.5/input_layernorm/Div_output_0" + name: "/model/layers.5/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.5/input_layernorm/Cast_output_0" + input: "/model/layers.5/input_layernorm/Div_output_0" + output: "/model/layers.5/input_layernorm/Mul_output_0" + name: "/model/layers.5/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.5/input_layernorm/Mul_output_0" + output: "/model/layers.5/input_layernorm/Cast_1_output_0" + name: "/model/layers.5/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.5.input_layernorm.weight" + input: "/model/layers.5/input_layernorm/Cast_1_output_0" + output: "/model/layers.5/input_layernorm/Mul_1_output_0" + name: "/model/layers.5/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.5/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1944" + output: "/model/layers.5/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.5/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.5/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1945" + output: "/model/layers.5/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.5/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.5/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1946" + output: "/model/layers.5/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.5/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.5/self_attn/Constant_output_0" + name: "/model/layers.5/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_1_output_0" + name: "/model/layers.5/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_2_output_0" + name: "/model/layers.5/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.5/self_attn/Constant_output_0" + output: "/model/layers.5/self_attn/Reshape_output_0" + name: "/model/layers.5/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Reshape_output_0" + output: "/model/layers.5/self_attn/Transpose_output_0" + name: "/model/layers.5/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.5/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.5/self_attn/Constant_1_output_0" + output: "/model/layers.5/self_attn/Reshape_1_output_0" + name: "/model/layers.5/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Reshape_1_output_0" + output: "/model/layers.5/self_attn/Transpose_1_output_0" + name: "/model/layers.5/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.5/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.5/self_attn/Constant_2_output_0" + output: "/model/layers.5/self_attn/Reshape_2_output_0" + name: "/model/layers.5/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Reshape_2_output_0" + output: "value_states.19" + name: "/model/layers.5/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.5/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.5/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.5/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.5/self_attn/Gather_output_0" + name: "/model/layers.5/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.5/self_attn/Constant_3_output_0" + name: "/model/layers.5/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/Gather_output_0" + input: "/model/layers.5/self_attn/Constant_3_output_0" + output: "/model/layers.5/self_attn/Unsqueeze_output_0" + name: "/model/layers.5/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.5/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.5/self_attn/Gather_1_output_0" + name: "/model/layers.5/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.5/self_attn/Constant_4_output_0" + name: "/model/layers.5/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/Gather_1_output_0" + input: "/model/layers.5/self_attn/Constant_4_output_0" + output: "/model/layers.5/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.5/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.5/self_attn/Transpose_output_0" + input: "/model/layers.5/self_attn/Unsqueeze_output_0" + output: "/model/layers.5/self_attn/Mul_output_0" + name: "/model/layers.5/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.5/self_attn/Constant_5_output_0" + name: "/model/layers.5/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_6_output_0" + name: "/model/layers.5/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_7_output_0" + name: "/model/layers.5/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_8_output_0" + name: "/model/layers.5/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/Transpose_output_0" + input: "/model/layers.5/self_attn/Constant_6_output_0" + input: "/model/layers.5/self_attn/Constant_7_output_0" + input: "/model/layers.5/self_attn/Constant_5_output_0" + input: "/model/layers.5/self_attn/Constant_8_output_0" + output: "/model/layers.5/self_attn/Slice_output_0" + name: "/model/layers.5/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.5/self_attn/Constant_9_output_0" + name: "/model/layers.5/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_10_output_0" + name: "/model/layers.5/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_11_output_0" + name: "/model/layers.5/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_12_output_0" + name: "/model/layers.5/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/Transpose_output_0" + input: "/model/layers.5/self_attn/Constant_10_output_0" + input: "/model/layers.5/self_attn/Constant_11_output_0" + input: "/model/layers.5/self_attn/Constant_9_output_0" + input: "/model/layers.5/self_attn/Constant_12_output_0" + output: "/model/layers.5/self_attn/Slice_1_output_0" + name: "/model/layers.5/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.5/self_attn/Slice_1_output_0" + output: "/model/layers.5/self_attn/Neg_output_0" + name: "/model/layers.5/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.5/self_attn/Neg_output_0" + input: "/model/layers.5/self_attn/Slice_output_0" + output: "/model/layers.5/self_attn/Concat_output_0" + name: "/model/layers.5/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Concat_output_0" + input: "/model/layers.5/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.5/self_attn/Mul_1_output_0" + name: "/model/layers.5/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.5/self_attn/Mul_output_0" + input: "/model/layers.5/self_attn/Mul_1_output_0" + output: "/model/layers.5/self_attn/Add_output_0" + name: "/model/layers.5/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.5/self_attn/Transpose_1_output_0" + input: "/model/layers.5/self_attn/Unsqueeze_output_0" + output: "/model/layers.5/self_attn/Mul_2_output_0" + name: "/model/layers.5/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.5/self_attn/Constant_13_output_0" + name: "/model/layers.5/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_14_output_0" + name: "/model/layers.5/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_15_output_0" + name: "/model/layers.5/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_16_output_0" + name: "/model/layers.5/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/Transpose_1_output_0" + input: "/model/layers.5/self_attn/Constant_14_output_0" + input: "/model/layers.5/self_attn/Constant_15_output_0" + input: "/model/layers.5/self_attn/Constant_13_output_0" + input: "/model/layers.5/self_attn/Constant_16_output_0" + output: "/model/layers.5/self_attn/Slice_2_output_0" + name: "/model/layers.5/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.5/self_attn/Constant_17_output_0" + name: "/model/layers.5/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_18_output_0" + name: "/model/layers.5/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_19_output_0" + name: "/model/layers.5/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.5/self_attn/Constant_20_output_0" + name: "/model/layers.5/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/Transpose_1_output_0" + input: "/model/layers.5/self_attn/Constant_18_output_0" + input: "/model/layers.5/self_attn/Constant_19_output_0" + input: "/model/layers.5/self_attn/Constant_17_output_0" + input: "/model/layers.5/self_attn/Constant_20_output_0" + output: "/model/layers.5/self_attn/Slice_3_output_0" + name: "/model/layers.5/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.5/self_attn/Slice_3_output_0" + output: "/model/layers.5/self_attn/Neg_1_output_0" + name: "/model/layers.5/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.5/self_attn/Neg_1_output_0" + input: "/model/layers.5/self_attn/Slice_2_output_0" + output: "/model/layers.5/self_attn/Concat_1_output_0" + name: "/model/layers.5/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Concat_1_output_0" + input: "/model/layers.5/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.5/self_attn/Mul_3_output_0" + name: "/model/layers.5/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.5/self_attn/Mul_2_output_0" + input: "/model/layers.5/self_attn/Mul_3_output_0" + output: "key_states.43" + name: "/model/layers.5/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.43" + output: "/model/layers.5/self_attn/Transpose_3_output_0" + name: "/model/layers.5/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.5/self_attn/Add_output_0" + input: "/model/layers.5/self_attn/Transpose_3_output_0" + output: "/model/layers.5/self_attn/MatMul_output_0" + name: "/model/layers.5/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.5/self_attn/Constant_21_output_0" + name: "/model/layers.5/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/MatMul_output_0" + input: "/model/layers.5/self_attn/Constant_21_output_0" + output: "/model/layers.5/self_attn/Div_output_0" + name: "/model/layers.5/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.5/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.5/self_attn/Add_2_output_0" + name: "/model/layers.5/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.5/self_attn/Add_2_output_0" + output: "/model/layers.5/self_attn/Softmax_output_0" + name: "/model/layers.5/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Softmax_output_0" + output: "/model/layers.5/self_attn/Cast_output_0" + name: "/model/layers.5/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Cast_output_0" + output: "/model/layers.5/self_attn/Cast_1_output_0" + name: "/model/layers.5/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Cast_1_output_0" + input: "value_states.19" + output: "/model/layers.5/self_attn/MatMul_1_output_0" + name: "/model/layers.5/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.5/self_attn/MatMul_1_output_0" + output: "/model/layers.5/self_attn/Transpose_4_output_0" + name: "/model/layers.5/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.5/self_attn/Constant_22_output_0" + name: "/model/layers.5/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/self_attn/Transpose_4_output_0" + input: "/model/layers.5/self_attn/Constant_22_output_0" + output: "/model/layers.5/self_attn/Reshape_3_output_0" + name: "/model/layers.5/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.5/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_1982" + output: "/model/layers.5/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.5/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.5/input_layernorm/Cast_output_0" + input: "/model/layers.5/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.5/Add_output_0" + name: "/model/layers.5/Add" + op_type: "Add" + } + node { + input: "/model/layers.5/Add_output_0" + output: "/model/layers.5/post_attention_layernorm/Cast_output_0" + name: "/model/layers.5/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.5/post_attention_layernorm/Constant_output_0" + name: "/model/layers.5/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/post_attention_layernorm/Cast_output_0" + input: "/model/layers.5/post_attention_layernorm/Constant_output_0" + output: "/model/layers.5/post_attention_layernorm/Pow_output_0" + name: "/model/layers.5/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.5/post_attention_layernorm/Pow_output_0" + output: "/model/layers.5/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.5/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.5/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.5/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.5/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.5/post_attention_layernorm/Add_output_0" + name: "/model/layers.5/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.5/post_attention_layernorm/Add_output_0" + output: "/model/layers.5/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.5/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.5/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.5/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.5/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.5/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.5/post_attention_layernorm/Div_output_0" + name: "/model/layers.5/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.5/post_attention_layernorm/Cast_output_0" + input: "/model/layers.5/post_attention_layernorm/Div_output_0" + output: "/model/layers.5/post_attention_layernorm/Mul_output_0" + name: "/model/layers.5/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.5/post_attention_layernorm/Mul_output_0" + output: "/model/layers.5/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.5/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.5.post_attention_layernorm.weight" + input: "/model/layers.5/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.5/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.5/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.5/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1983" + output: "/model/layers.5/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.5/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.5/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.5/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.5/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.5/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.5/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.5/mlp/act_fn/Mul_output_0" + name: "/model/layers.5/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.5/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1984" + output: "/model/layers.5/mlp/up_proj/MatMul_output_0" + name: "/model/layers.5/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.5/mlp/act_fn/Mul_output_0" + input: "/model/layers.5/mlp/up_proj/MatMul_output_0" + output: "/model/layers.5/mlp/Mul_output_0" + name: "/model/layers.5/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.5/mlp/Mul_output_0" + input: "onnx::MatMul_1985" + output: "/model/layers.5/mlp/down_proj/MatMul_output_0" + name: "/model/layers.5/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.5/post_attention_layernorm/Cast_output_0" + input: "/model/layers.5/mlp/down_proj/MatMul_output_0" + output: "/model/layers.5/Add_1_output_0" + name: "/model/layers.5/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.5/Add_1_output_0" + output: "/model/layers.6/input_layernorm/Cast_output_0" + name: "/model/layers.6/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.6/input_layernorm/Constant_output_0" + name: "/model/layers.6/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/input_layernorm/Cast_output_0" + input: "/model/layers.6/input_layernorm/Constant_output_0" + output: "/model/layers.6/input_layernorm/Pow_output_0" + name: "/model/layers.6/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.6/input_layernorm/Pow_output_0" + output: "/model/layers.6/input_layernorm/ReduceMean_output_0" + name: "/model/layers.6/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.6/input_layernorm/Constant_1_output_0" + name: "/model/layers.6/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/input_layernorm/ReduceMean_output_0" + input: "/model/layers.6/input_layernorm/Constant_1_output_0" + output: "/model/layers.6/input_layernorm/Add_output_0" + name: "/model/layers.6/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.6/input_layernorm/Add_output_0" + output: "/model/layers.6/input_layernorm/Sqrt_output_0" + name: "/model/layers.6/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.6/input_layernorm/Constant_2_output_0" + name: "/model/layers.6/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/input_layernorm/Constant_2_output_0" + input: "/model/layers.6/input_layernorm/Sqrt_output_0" + output: "/model/layers.6/input_layernorm/Div_output_0" + name: "/model/layers.6/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.6/input_layernorm/Cast_output_0" + input: "/model/layers.6/input_layernorm/Div_output_0" + output: "/model/layers.6/input_layernorm/Mul_output_0" + name: "/model/layers.6/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.6/input_layernorm/Mul_output_0" + output: "/model/layers.6/input_layernorm/Cast_1_output_0" + name: "/model/layers.6/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.6.input_layernorm.weight" + input: "/model/layers.6/input_layernorm/Cast_1_output_0" + output: "/model/layers.6/input_layernorm/Mul_1_output_0" + name: "/model/layers.6/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.6/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1986" + output: "/model/layers.6/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.6/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.6/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1987" + output: "/model/layers.6/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.6/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.6/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_1988" + output: "/model/layers.6/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.6/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.6/self_attn/Constant_output_0" + name: "/model/layers.6/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_1_output_0" + name: "/model/layers.6/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_2_output_0" + name: "/model/layers.6/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.6/self_attn/Constant_output_0" + output: "/model/layers.6/self_attn/Reshape_output_0" + name: "/model/layers.6/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Reshape_output_0" + output: "/model/layers.6/self_attn/Transpose_output_0" + name: "/model/layers.6/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.6/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.6/self_attn/Constant_1_output_0" + output: "/model/layers.6/self_attn/Reshape_1_output_0" + name: "/model/layers.6/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Reshape_1_output_0" + output: "/model/layers.6/self_attn/Transpose_1_output_0" + name: "/model/layers.6/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.6/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.6/self_attn/Constant_2_output_0" + output: "/model/layers.6/self_attn/Reshape_2_output_0" + name: "/model/layers.6/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Reshape_2_output_0" + output: "value_states.23" + name: "/model/layers.6/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.6/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.6/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.6/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.6/self_attn/Gather_output_0" + name: "/model/layers.6/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.6/self_attn/Constant_3_output_0" + name: "/model/layers.6/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/Gather_output_0" + input: "/model/layers.6/self_attn/Constant_3_output_0" + output: "/model/layers.6/self_attn/Unsqueeze_output_0" + name: "/model/layers.6/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.6/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.6/self_attn/Gather_1_output_0" + name: "/model/layers.6/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.6/self_attn/Constant_4_output_0" + name: "/model/layers.6/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/Gather_1_output_0" + input: "/model/layers.6/self_attn/Constant_4_output_0" + output: "/model/layers.6/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.6/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.6/self_attn/Transpose_output_0" + input: "/model/layers.6/self_attn/Unsqueeze_output_0" + output: "/model/layers.6/self_attn/Mul_output_0" + name: "/model/layers.6/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.6/self_attn/Constant_5_output_0" + name: "/model/layers.6/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_6_output_0" + name: "/model/layers.6/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_7_output_0" + name: "/model/layers.6/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_8_output_0" + name: "/model/layers.6/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/Transpose_output_0" + input: "/model/layers.6/self_attn/Constant_6_output_0" + input: "/model/layers.6/self_attn/Constant_7_output_0" + input: "/model/layers.6/self_attn/Constant_5_output_0" + input: "/model/layers.6/self_attn/Constant_8_output_0" + output: "/model/layers.6/self_attn/Slice_output_0" + name: "/model/layers.6/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.6/self_attn/Constant_9_output_0" + name: "/model/layers.6/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_10_output_0" + name: "/model/layers.6/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_11_output_0" + name: "/model/layers.6/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_12_output_0" + name: "/model/layers.6/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/Transpose_output_0" + input: "/model/layers.6/self_attn/Constant_10_output_0" + input: "/model/layers.6/self_attn/Constant_11_output_0" + input: "/model/layers.6/self_attn/Constant_9_output_0" + input: "/model/layers.6/self_attn/Constant_12_output_0" + output: "/model/layers.6/self_attn/Slice_1_output_0" + name: "/model/layers.6/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.6/self_attn/Slice_1_output_0" + output: "/model/layers.6/self_attn/Neg_output_0" + name: "/model/layers.6/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.6/self_attn/Neg_output_0" + input: "/model/layers.6/self_attn/Slice_output_0" + output: "/model/layers.6/self_attn/Concat_output_0" + name: "/model/layers.6/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Concat_output_0" + input: "/model/layers.6/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.6/self_attn/Mul_1_output_0" + name: "/model/layers.6/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.6/self_attn/Mul_output_0" + input: "/model/layers.6/self_attn/Mul_1_output_0" + output: "/model/layers.6/self_attn/Add_output_0" + name: "/model/layers.6/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.6/self_attn/Transpose_1_output_0" + input: "/model/layers.6/self_attn/Unsqueeze_output_0" + output: "/model/layers.6/self_attn/Mul_2_output_0" + name: "/model/layers.6/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.6/self_attn/Constant_13_output_0" + name: "/model/layers.6/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_14_output_0" + name: "/model/layers.6/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_15_output_0" + name: "/model/layers.6/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_16_output_0" + name: "/model/layers.6/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/Transpose_1_output_0" + input: "/model/layers.6/self_attn/Constant_14_output_0" + input: "/model/layers.6/self_attn/Constant_15_output_0" + input: "/model/layers.6/self_attn/Constant_13_output_0" + input: "/model/layers.6/self_attn/Constant_16_output_0" + output: "/model/layers.6/self_attn/Slice_2_output_0" + name: "/model/layers.6/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.6/self_attn/Constant_17_output_0" + name: "/model/layers.6/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_18_output_0" + name: "/model/layers.6/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_19_output_0" + name: "/model/layers.6/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.6/self_attn/Constant_20_output_0" + name: "/model/layers.6/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/Transpose_1_output_0" + input: "/model/layers.6/self_attn/Constant_18_output_0" + input: "/model/layers.6/self_attn/Constant_19_output_0" + input: "/model/layers.6/self_attn/Constant_17_output_0" + input: "/model/layers.6/self_attn/Constant_20_output_0" + output: "/model/layers.6/self_attn/Slice_3_output_0" + name: "/model/layers.6/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.6/self_attn/Slice_3_output_0" + output: "/model/layers.6/self_attn/Neg_1_output_0" + name: "/model/layers.6/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.6/self_attn/Neg_1_output_0" + input: "/model/layers.6/self_attn/Slice_2_output_0" + output: "/model/layers.6/self_attn/Concat_1_output_0" + name: "/model/layers.6/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Concat_1_output_0" + input: "/model/layers.6/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.6/self_attn/Mul_3_output_0" + name: "/model/layers.6/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.6/self_attn/Mul_2_output_0" + input: "/model/layers.6/self_attn/Mul_3_output_0" + output: "key_states.51" + name: "/model/layers.6/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.51" + output: "/model/layers.6/self_attn/Transpose_3_output_0" + name: "/model/layers.6/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.6/self_attn/Add_output_0" + input: "/model/layers.6/self_attn/Transpose_3_output_0" + output: "/model/layers.6/self_attn/MatMul_output_0" + name: "/model/layers.6/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.6/self_attn/Constant_21_output_0" + name: "/model/layers.6/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/MatMul_output_0" + input: "/model/layers.6/self_attn/Constant_21_output_0" + output: "/model/layers.6/self_attn/Div_output_0" + name: "/model/layers.6/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.6/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.6/self_attn/Add_2_output_0" + name: "/model/layers.6/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.6/self_attn/Add_2_output_0" + output: "/model/layers.6/self_attn/Softmax_output_0" + name: "/model/layers.6/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Softmax_output_0" + output: "/model/layers.6/self_attn/Cast_output_0" + name: "/model/layers.6/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Cast_output_0" + output: "/model/layers.6/self_attn/Cast_1_output_0" + name: "/model/layers.6/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Cast_1_output_0" + input: "value_states.23" + output: "/model/layers.6/self_attn/MatMul_1_output_0" + name: "/model/layers.6/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.6/self_attn/MatMul_1_output_0" + output: "/model/layers.6/self_attn/Transpose_4_output_0" + name: "/model/layers.6/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.6/self_attn/Constant_22_output_0" + name: "/model/layers.6/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/self_attn/Transpose_4_output_0" + input: "/model/layers.6/self_attn/Constant_22_output_0" + output: "/model/layers.6/self_attn/Reshape_3_output_0" + name: "/model/layers.6/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.6/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_2024" + output: "/model/layers.6/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.6/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.6/input_layernorm/Cast_output_0" + input: "/model/layers.6/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.6/Add_output_0" + name: "/model/layers.6/Add" + op_type: "Add" + } + node { + input: "/model/layers.6/Add_output_0" + output: "/model/layers.6/post_attention_layernorm/Cast_output_0" + name: "/model/layers.6/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.6/post_attention_layernorm/Constant_output_0" + name: "/model/layers.6/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/post_attention_layernorm/Cast_output_0" + input: "/model/layers.6/post_attention_layernorm/Constant_output_0" + output: "/model/layers.6/post_attention_layernorm/Pow_output_0" + name: "/model/layers.6/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.6/post_attention_layernorm/Pow_output_0" + output: "/model/layers.6/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.6/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.6/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.6/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.6/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.6/post_attention_layernorm/Add_output_0" + name: "/model/layers.6/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.6/post_attention_layernorm/Add_output_0" + output: "/model/layers.6/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.6/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.6/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.6/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.6/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.6/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.6/post_attention_layernorm/Div_output_0" + name: "/model/layers.6/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.6/post_attention_layernorm/Cast_output_0" + input: "/model/layers.6/post_attention_layernorm/Div_output_0" + output: "/model/layers.6/post_attention_layernorm/Mul_output_0" + name: "/model/layers.6/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.6/post_attention_layernorm/Mul_output_0" + output: "/model/layers.6/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.6/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.6.post_attention_layernorm.weight" + input: "/model/layers.6/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.6/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.6/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.6/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2025" + output: "/model/layers.6/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.6/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.6/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.6/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.6/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.6/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.6/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.6/mlp/act_fn/Mul_output_0" + name: "/model/layers.6/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.6/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2026" + output: "/model/layers.6/mlp/up_proj/MatMul_output_0" + name: "/model/layers.6/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.6/mlp/act_fn/Mul_output_0" + input: "/model/layers.6/mlp/up_proj/MatMul_output_0" + output: "/model/layers.6/mlp/Mul_output_0" + name: "/model/layers.6/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.6/mlp/Mul_output_0" + input: "onnx::MatMul_2027" + output: "/model/layers.6/mlp/down_proj/MatMul_output_0" + name: "/model/layers.6/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.6/post_attention_layernorm/Cast_output_0" + input: "/model/layers.6/mlp/down_proj/MatMul_output_0" + output: "/model/layers.6/Add_1_output_0" + name: "/model/layers.6/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.6/Add_1_output_0" + output: "/model/layers.7/input_layernorm/Cast_output_0" + name: "/model/layers.7/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.7/input_layernorm/Constant_output_0" + name: "/model/layers.7/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/input_layernorm/Cast_output_0" + input: "/model/layers.7/input_layernorm/Constant_output_0" + output: "/model/layers.7/input_layernorm/Pow_output_0" + name: "/model/layers.7/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.7/input_layernorm/Pow_output_0" + output: "/model/layers.7/input_layernorm/ReduceMean_output_0" + name: "/model/layers.7/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.7/input_layernorm/Constant_1_output_0" + name: "/model/layers.7/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/input_layernorm/ReduceMean_output_0" + input: "/model/layers.7/input_layernorm/Constant_1_output_0" + output: "/model/layers.7/input_layernorm/Add_output_0" + name: "/model/layers.7/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.7/input_layernorm/Add_output_0" + output: "/model/layers.7/input_layernorm/Sqrt_output_0" + name: "/model/layers.7/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.7/input_layernorm/Constant_2_output_0" + name: "/model/layers.7/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/input_layernorm/Constant_2_output_0" + input: "/model/layers.7/input_layernorm/Sqrt_output_0" + output: "/model/layers.7/input_layernorm/Div_output_0" + name: "/model/layers.7/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.7/input_layernorm/Cast_output_0" + input: "/model/layers.7/input_layernorm/Div_output_0" + output: "/model/layers.7/input_layernorm/Mul_output_0" + name: "/model/layers.7/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.7/input_layernorm/Mul_output_0" + output: "/model/layers.7/input_layernorm/Cast_1_output_0" + name: "/model/layers.7/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.7.input_layernorm.weight" + input: "/model/layers.7/input_layernorm/Cast_1_output_0" + output: "/model/layers.7/input_layernorm/Mul_1_output_0" + name: "/model/layers.7/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.7/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2028" + output: "/model/layers.7/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.7/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.7/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2029" + output: "/model/layers.7/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.7/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.7/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2030" + output: "/model/layers.7/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.7/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.7/self_attn/Constant_output_0" + name: "/model/layers.7/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_1_output_0" + name: "/model/layers.7/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_2_output_0" + name: "/model/layers.7/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.7/self_attn/Constant_output_0" + output: "/model/layers.7/self_attn/Reshape_output_0" + name: "/model/layers.7/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Reshape_output_0" + output: "/model/layers.7/self_attn/Transpose_output_0" + name: "/model/layers.7/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.7/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.7/self_attn/Constant_1_output_0" + output: "/model/layers.7/self_attn/Reshape_1_output_0" + name: "/model/layers.7/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Reshape_1_output_0" + output: "/model/layers.7/self_attn/Transpose_1_output_0" + name: "/model/layers.7/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.7/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.7/self_attn/Constant_2_output_0" + output: "/model/layers.7/self_attn/Reshape_2_output_0" + name: "/model/layers.7/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Reshape_2_output_0" + output: "value_states.27" + name: "/model/layers.7/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.7/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.7/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.7/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.7/self_attn/Gather_output_0" + name: "/model/layers.7/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.7/self_attn/Constant_3_output_0" + name: "/model/layers.7/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/Gather_output_0" + input: "/model/layers.7/self_attn/Constant_3_output_0" + output: "/model/layers.7/self_attn/Unsqueeze_output_0" + name: "/model/layers.7/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.7/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.7/self_attn/Gather_1_output_0" + name: "/model/layers.7/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.7/self_attn/Constant_4_output_0" + name: "/model/layers.7/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/Gather_1_output_0" + input: "/model/layers.7/self_attn/Constant_4_output_0" + output: "/model/layers.7/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.7/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.7/self_attn/Transpose_output_0" + input: "/model/layers.7/self_attn/Unsqueeze_output_0" + output: "/model/layers.7/self_attn/Mul_output_0" + name: "/model/layers.7/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.7/self_attn/Constant_5_output_0" + name: "/model/layers.7/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_6_output_0" + name: "/model/layers.7/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_7_output_0" + name: "/model/layers.7/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_8_output_0" + name: "/model/layers.7/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/Transpose_output_0" + input: "/model/layers.7/self_attn/Constant_6_output_0" + input: "/model/layers.7/self_attn/Constant_7_output_0" + input: "/model/layers.7/self_attn/Constant_5_output_0" + input: "/model/layers.7/self_attn/Constant_8_output_0" + output: "/model/layers.7/self_attn/Slice_output_0" + name: "/model/layers.7/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.7/self_attn/Constant_9_output_0" + name: "/model/layers.7/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_10_output_0" + name: "/model/layers.7/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_11_output_0" + name: "/model/layers.7/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_12_output_0" + name: "/model/layers.7/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/Transpose_output_0" + input: "/model/layers.7/self_attn/Constant_10_output_0" + input: "/model/layers.7/self_attn/Constant_11_output_0" + input: "/model/layers.7/self_attn/Constant_9_output_0" + input: "/model/layers.7/self_attn/Constant_12_output_0" + output: "/model/layers.7/self_attn/Slice_1_output_0" + name: "/model/layers.7/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.7/self_attn/Slice_1_output_0" + output: "/model/layers.7/self_attn/Neg_output_0" + name: "/model/layers.7/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.7/self_attn/Neg_output_0" + input: "/model/layers.7/self_attn/Slice_output_0" + output: "/model/layers.7/self_attn/Concat_output_0" + name: "/model/layers.7/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Concat_output_0" + input: "/model/layers.7/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.7/self_attn/Mul_1_output_0" + name: "/model/layers.7/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.7/self_attn/Mul_output_0" + input: "/model/layers.7/self_attn/Mul_1_output_0" + output: "/model/layers.7/self_attn/Add_output_0" + name: "/model/layers.7/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.7/self_attn/Transpose_1_output_0" + input: "/model/layers.7/self_attn/Unsqueeze_output_0" + output: "/model/layers.7/self_attn/Mul_2_output_0" + name: "/model/layers.7/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.7/self_attn/Constant_13_output_0" + name: "/model/layers.7/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_14_output_0" + name: "/model/layers.7/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_15_output_0" + name: "/model/layers.7/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_16_output_0" + name: "/model/layers.7/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/Transpose_1_output_0" + input: "/model/layers.7/self_attn/Constant_14_output_0" + input: "/model/layers.7/self_attn/Constant_15_output_0" + input: "/model/layers.7/self_attn/Constant_13_output_0" + input: "/model/layers.7/self_attn/Constant_16_output_0" + output: "/model/layers.7/self_attn/Slice_2_output_0" + name: "/model/layers.7/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.7/self_attn/Constant_17_output_0" + name: "/model/layers.7/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_18_output_0" + name: "/model/layers.7/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_19_output_0" + name: "/model/layers.7/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.7/self_attn/Constant_20_output_0" + name: "/model/layers.7/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/Transpose_1_output_0" + input: "/model/layers.7/self_attn/Constant_18_output_0" + input: "/model/layers.7/self_attn/Constant_19_output_0" + input: "/model/layers.7/self_attn/Constant_17_output_0" + input: "/model/layers.7/self_attn/Constant_20_output_0" + output: "/model/layers.7/self_attn/Slice_3_output_0" + name: "/model/layers.7/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.7/self_attn/Slice_3_output_0" + output: "/model/layers.7/self_attn/Neg_1_output_0" + name: "/model/layers.7/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.7/self_attn/Neg_1_output_0" + input: "/model/layers.7/self_attn/Slice_2_output_0" + output: "/model/layers.7/self_attn/Concat_1_output_0" + name: "/model/layers.7/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Concat_1_output_0" + input: "/model/layers.7/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.7/self_attn/Mul_3_output_0" + name: "/model/layers.7/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.7/self_attn/Mul_2_output_0" + input: "/model/layers.7/self_attn/Mul_3_output_0" + output: "key_states.59" + name: "/model/layers.7/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.59" + output: "/model/layers.7/self_attn/Transpose_3_output_0" + name: "/model/layers.7/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.7/self_attn/Add_output_0" + input: "/model/layers.7/self_attn/Transpose_3_output_0" + output: "/model/layers.7/self_attn/MatMul_output_0" + name: "/model/layers.7/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.7/self_attn/Constant_21_output_0" + name: "/model/layers.7/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/MatMul_output_0" + input: "/model/layers.7/self_attn/Constant_21_output_0" + output: "/model/layers.7/self_attn/Div_output_0" + name: "/model/layers.7/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.7/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.7/self_attn/Add_2_output_0" + name: "/model/layers.7/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.7/self_attn/Add_2_output_0" + output: "/model/layers.7/self_attn/Softmax_output_0" + name: "/model/layers.7/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Softmax_output_0" + output: "/model/layers.7/self_attn/Cast_output_0" + name: "/model/layers.7/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Cast_output_0" + output: "/model/layers.7/self_attn/Cast_1_output_0" + name: "/model/layers.7/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Cast_1_output_0" + input: "value_states.27" + output: "/model/layers.7/self_attn/MatMul_1_output_0" + name: "/model/layers.7/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.7/self_attn/MatMul_1_output_0" + output: "/model/layers.7/self_attn/Transpose_4_output_0" + name: "/model/layers.7/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.7/self_attn/Constant_22_output_0" + name: "/model/layers.7/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/self_attn/Transpose_4_output_0" + input: "/model/layers.7/self_attn/Constant_22_output_0" + output: "/model/layers.7/self_attn/Reshape_3_output_0" + name: "/model/layers.7/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.7/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_2066" + output: "/model/layers.7/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.7/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.7/input_layernorm/Cast_output_0" + input: "/model/layers.7/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.7/Add_output_0" + name: "/model/layers.7/Add" + op_type: "Add" + } + node { + input: "/model/layers.7/Add_output_0" + output: "/model/layers.7/post_attention_layernorm/Cast_output_0" + name: "/model/layers.7/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.7/post_attention_layernorm/Constant_output_0" + name: "/model/layers.7/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/post_attention_layernorm/Cast_output_0" + input: "/model/layers.7/post_attention_layernorm/Constant_output_0" + output: "/model/layers.7/post_attention_layernorm/Pow_output_0" + name: "/model/layers.7/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.7/post_attention_layernorm/Pow_output_0" + output: "/model/layers.7/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.7/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.7/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.7/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.7/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.7/post_attention_layernorm/Add_output_0" + name: "/model/layers.7/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.7/post_attention_layernorm/Add_output_0" + output: "/model/layers.7/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.7/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.7/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.7/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.7/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.7/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.7/post_attention_layernorm/Div_output_0" + name: "/model/layers.7/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.7/post_attention_layernorm/Cast_output_0" + input: "/model/layers.7/post_attention_layernorm/Div_output_0" + output: "/model/layers.7/post_attention_layernorm/Mul_output_0" + name: "/model/layers.7/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.7/post_attention_layernorm/Mul_output_0" + output: "/model/layers.7/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.7/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.7.post_attention_layernorm.weight" + input: "/model/layers.7/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.7/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.7/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.7/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2067" + output: "/model/layers.7/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.7/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.7/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.7/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.7/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.7/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.7/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.7/mlp/act_fn/Mul_output_0" + name: "/model/layers.7/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.7/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2068" + output: "/model/layers.7/mlp/up_proj/MatMul_output_0" + name: "/model/layers.7/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.7/mlp/act_fn/Mul_output_0" + input: "/model/layers.7/mlp/up_proj/MatMul_output_0" + output: "/model/layers.7/mlp/Mul_output_0" + name: "/model/layers.7/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.7/mlp/Mul_output_0" + input: "onnx::MatMul_2069" + output: "/model/layers.7/mlp/down_proj/MatMul_output_0" + name: "/model/layers.7/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.7/post_attention_layernorm/Cast_output_0" + input: "/model/layers.7/mlp/down_proj/MatMul_output_0" + output: "/model/layers.7/Add_1_output_0" + name: "/model/layers.7/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.7/Add_1_output_0" + output: "/model/layers.8/input_layernorm/Cast_output_0" + name: "/model/layers.8/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.8/input_layernorm/Constant_output_0" + name: "/model/layers.8/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/input_layernorm/Cast_output_0" + input: "/model/layers.8/input_layernorm/Constant_output_0" + output: "/model/layers.8/input_layernorm/Pow_output_0" + name: "/model/layers.8/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.8/input_layernorm/Pow_output_0" + output: "/model/layers.8/input_layernorm/ReduceMean_output_0" + name: "/model/layers.8/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.8/input_layernorm/Constant_1_output_0" + name: "/model/layers.8/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/input_layernorm/ReduceMean_output_0" + input: "/model/layers.8/input_layernorm/Constant_1_output_0" + output: "/model/layers.8/input_layernorm/Add_output_0" + name: "/model/layers.8/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.8/input_layernorm/Add_output_0" + output: "/model/layers.8/input_layernorm/Sqrt_output_0" + name: "/model/layers.8/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.8/input_layernorm/Constant_2_output_0" + name: "/model/layers.8/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/input_layernorm/Constant_2_output_0" + input: "/model/layers.8/input_layernorm/Sqrt_output_0" + output: "/model/layers.8/input_layernorm/Div_output_0" + name: "/model/layers.8/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.8/input_layernorm/Cast_output_0" + input: "/model/layers.8/input_layernorm/Div_output_0" + output: "/model/layers.8/input_layernorm/Mul_output_0" + name: "/model/layers.8/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.8/input_layernorm/Mul_output_0" + output: "/model/layers.8/input_layernorm/Cast_1_output_0" + name: "/model/layers.8/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.8.input_layernorm.weight" + input: "/model/layers.8/input_layernorm/Cast_1_output_0" + output: "/model/layers.8/input_layernorm/Mul_1_output_0" + name: "/model/layers.8/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.8/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2070" + output: "/model/layers.8/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.8/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.8/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2071" + output: "/model/layers.8/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.8/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.8/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2072" + output: "/model/layers.8/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.8/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.8/self_attn/Constant_output_0" + name: "/model/layers.8/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_1_output_0" + name: "/model/layers.8/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_2_output_0" + name: "/model/layers.8/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.8/self_attn/Constant_output_0" + output: "/model/layers.8/self_attn/Reshape_output_0" + name: "/model/layers.8/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Reshape_output_0" + output: "/model/layers.8/self_attn/Transpose_output_0" + name: "/model/layers.8/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.8/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.8/self_attn/Constant_1_output_0" + output: "/model/layers.8/self_attn/Reshape_1_output_0" + name: "/model/layers.8/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Reshape_1_output_0" + output: "/model/layers.8/self_attn/Transpose_1_output_0" + name: "/model/layers.8/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.8/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.8/self_attn/Constant_2_output_0" + output: "/model/layers.8/self_attn/Reshape_2_output_0" + name: "/model/layers.8/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Reshape_2_output_0" + output: "value_states.31" + name: "/model/layers.8/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.8/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.8/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.8/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.8/self_attn/Gather_output_0" + name: "/model/layers.8/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.8/self_attn/Constant_3_output_0" + name: "/model/layers.8/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/Gather_output_0" + input: "/model/layers.8/self_attn/Constant_3_output_0" + output: "/model/layers.8/self_attn/Unsqueeze_output_0" + name: "/model/layers.8/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.8/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.8/self_attn/Gather_1_output_0" + name: "/model/layers.8/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.8/self_attn/Constant_4_output_0" + name: "/model/layers.8/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/Gather_1_output_0" + input: "/model/layers.8/self_attn/Constant_4_output_0" + output: "/model/layers.8/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.8/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.8/self_attn/Transpose_output_0" + input: "/model/layers.8/self_attn/Unsqueeze_output_0" + output: "/model/layers.8/self_attn/Mul_output_0" + name: "/model/layers.8/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.8/self_attn/Constant_5_output_0" + name: "/model/layers.8/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_6_output_0" + name: "/model/layers.8/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_7_output_0" + name: "/model/layers.8/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_8_output_0" + name: "/model/layers.8/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/Transpose_output_0" + input: "/model/layers.8/self_attn/Constant_6_output_0" + input: "/model/layers.8/self_attn/Constant_7_output_0" + input: "/model/layers.8/self_attn/Constant_5_output_0" + input: "/model/layers.8/self_attn/Constant_8_output_0" + output: "/model/layers.8/self_attn/Slice_output_0" + name: "/model/layers.8/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.8/self_attn/Constant_9_output_0" + name: "/model/layers.8/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_10_output_0" + name: "/model/layers.8/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_11_output_0" + name: "/model/layers.8/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_12_output_0" + name: "/model/layers.8/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/Transpose_output_0" + input: "/model/layers.8/self_attn/Constant_10_output_0" + input: "/model/layers.8/self_attn/Constant_11_output_0" + input: "/model/layers.8/self_attn/Constant_9_output_0" + input: "/model/layers.8/self_attn/Constant_12_output_0" + output: "/model/layers.8/self_attn/Slice_1_output_0" + name: "/model/layers.8/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.8/self_attn/Slice_1_output_0" + output: "/model/layers.8/self_attn/Neg_output_0" + name: "/model/layers.8/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.8/self_attn/Neg_output_0" + input: "/model/layers.8/self_attn/Slice_output_0" + output: "/model/layers.8/self_attn/Concat_output_0" + name: "/model/layers.8/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Concat_output_0" + input: "/model/layers.8/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.8/self_attn/Mul_1_output_0" + name: "/model/layers.8/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.8/self_attn/Mul_output_0" + input: "/model/layers.8/self_attn/Mul_1_output_0" + output: "/model/layers.8/self_attn/Add_output_0" + name: "/model/layers.8/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.8/self_attn/Transpose_1_output_0" + input: "/model/layers.8/self_attn/Unsqueeze_output_0" + output: "/model/layers.8/self_attn/Mul_2_output_0" + name: "/model/layers.8/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.8/self_attn/Constant_13_output_0" + name: "/model/layers.8/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_14_output_0" + name: "/model/layers.8/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_15_output_0" + name: "/model/layers.8/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_16_output_0" + name: "/model/layers.8/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/Transpose_1_output_0" + input: "/model/layers.8/self_attn/Constant_14_output_0" + input: "/model/layers.8/self_attn/Constant_15_output_0" + input: "/model/layers.8/self_attn/Constant_13_output_0" + input: "/model/layers.8/self_attn/Constant_16_output_0" + output: "/model/layers.8/self_attn/Slice_2_output_0" + name: "/model/layers.8/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.8/self_attn/Constant_17_output_0" + name: "/model/layers.8/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_18_output_0" + name: "/model/layers.8/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_19_output_0" + name: "/model/layers.8/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.8/self_attn/Constant_20_output_0" + name: "/model/layers.8/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/Transpose_1_output_0" + input: "/model/layers.8/self_attn/Constant_18_output_0" + input: "/model/layers.8/self_attn/Constant_19_output_0" + input: "/model/layers.8/self_attn/Constant_17_output_0" + input: "/model/layers.8/self_attn/Constant_20_output_0" + output: "/model/layers.8/self_attn/Slice_3_output_0" + name: "/model/layers.8/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.8/self_attn/Slice_3_output_0" + output: "/model/layers.8/self_attn/Neg_1_output_0" + name: "/model/layers.8/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.8/self_attn/Neg_1_output_0" + input: "/model/layers.8/self_attn/Slice_2_output_0" + output: "/model/layers.8/self_attn/Concat_1_output_0" + name: "/model/layers.8/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Concat_1_output_0" + input: "/model/layers.8/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.8/self_attn/Mul_3_output_0" + name: "/model/layers.8/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.8/self_attn/Mul_2_output_0" + input: "/model/layers.8/self_attn/Mul_3_output_0" + output: "key_states.67" + name: "/model/layers.8/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.67" + output: "/model/layers.8/self_attn/Transpose_3_output_0" + name: "/model/layers.8/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.8/self_attn/Add_output_0" + input: "/model/layers.8/self_attn/Transpose_3_output_0" + output: "/model/layers.8/self_attn/MatMul_output_0" + name: "/model/layers.8/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.8/self_attn/Constant_21_output_0" + name: "/model/layers.8/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/MatMul_output_0" + input: "/model/layers.8/self_attn/Constant_21_output_0" + output: "/model/layers.8/self_attn/Div_output_0" + name: "/model/layers.8/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.8/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.8/self_attn/Add_2_output_0" + name: "/model/layers.8/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.8/self_attn/Add_2_output_0" + output: "/model/layers.8/self_attn/Softmax_output_0" + name: "/model/layers.8/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Softmax_output_0" + output: "/model/layers.8/self_attn/Cast_output_0" + name: "/model/layers.8/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Cast_output_0" + output: "/model/layers.8/self_attn/Cast_1_output_0" + name: "/model/layers.8/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Cast_1_output_0" + input: "value_states.31" + output: "/model/layers.8/self_attn/MatMul_1_output_0" + name: "/model/layers.8/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.8/self_attn/MatMul_1_output_0" + output: "/model/layers.8/self_attn/Transpose_4_output_0" + name: "/model/layers.8/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.8/self_attn/Constant_22_output_0" + name: "/model/layers.8/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/self_attn/Transpose_4_output_0" + input: "/model/layers.8/self_attn/Constant_22_output_0" + output: "/model/layers.8/self_attn/Reshape_3_output_0" + name: "/model/layers.8/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.8/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_2108" + output: "/model/layers.8/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.8/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.8/input_layernorm/Cast_output_0" + input: "/model/layers.8/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.8/Add_output_0" + name: "/model/layers.8/Add" + op_type: "Add" + } + node { + input: "/model/layers.8/Add_output_0" + output: "/model/layers.8/post_attention_layernorm/Cast_output_0" + name: "/model/layers.8/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.8/post_attention_layernorm/Constant_output_0" + name: "/model/layers.8/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/post_attention_layernorm/Cast_output_0" + input: "/model/layers.8/post_attention_layernorm/Constant_output_0" + output: "/model/layers.8/post_attention_layernorm/Pow_output_0" + name: "/model/layers.8/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.8/post_attention_layernorm/Pow_output_0" + output: "/model/layers.8/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.8/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.8/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.8/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.8/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.8/post_attention_layernorm/Add_output_0" + name: "/model/layers.8/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.8/post_attention_layernorm/Add_output_0" + output: "/model/layers.8/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.8/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.8/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.8/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.8/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.8/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.8/post_attention_layernorm/Div_output_0" + name: "/model/layers.8/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.8/post_attention_layernorm/Cast_output_0" + input: "/model/layers.8/post_attention_layernorm/Div_output_0" + output: "/model/layers.8/post_attention_layernorm/Mul_output_0" + name: "/model/layers.8/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.8/post_attention_layernorm/Mul_output_0" + output: "/model/layers.8/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.8/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.8.post_attention_layernorm.weight" + input: "/model/layers.8/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.8/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.8/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.8/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2109" + output: "/model/layers.8/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.8/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.8/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.8/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.8/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.8/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.8/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.8/mlp/act_fn/Mul_output_0" + name: "/model/layers.8/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.8/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2110" + output: "/model/layers.8/mlp/up_proj/MatMul_output_0" + name: "/model/layers.8/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.8/mlp/act_fn/Mul_output_0" + input: "/model/layers.8/mlp/up_proj/MatMul_output_0" + output: "/model/layers.8/mlp/Mul_output_0" + name: "/model/layers.8/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.8/mlp/Mul_output_0" + input: "onnx::MatMul_2111" + output: "/model/layers.8/mlp/down_proj/MatMul_output_0" + name: "/model/layers.8/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.8/post_attention_layernorm/Cast_output_0" + input: "/model/layers.8/mlp/down_proj/MatMul_output_0" + output: "/model/layers.8/Add_1_output_0" + name: "/model/layers.8/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.8/Add_1_output_0" + output: "/model/layers.9/input_layernorm/Cast_output_0" + name: "/model/layers.9/input_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.9/input_layernorm/Constant_output_0" + name: "/model/layers.9/input_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/input_layernorm/Cast_output_0" + input: "/model/layers.9/input_layernorm/Constant_output_0" + output: "/model/layers.9/input_layernorm/Pow_output_0" + name: "/model/layers.9/input_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.9/input_layernorm/Pow_output_0" + output: "/model/layers.9/input_layernorm/ReduceMean_output_0" + name: "/model/layers.9/input_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.9/input_layernorm/Constant_1_output_0" + name: "/model/layers.9/input_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/input_layernorm/ReduceMean_output_0" + input: "/model/layers.9/input_layernorm/Constant_1_output_0" + output: "/model/layers.9/input_layernorm/Add_output_0" + name: "/model/layers.9/input_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.9/input_layernorm/Add_output_0" + output: "/model/layers.9/input_layernorm/Sqrt_output_0" + name: "/model/layers.9/input_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.9/input_layernorm/Constant_2_output_0" + name: "/model/layers.9/input_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/input_layernorm/Constant_2_output_0" + input: "/model/layers.9/input_layernorm/Sqrt_output_0" + output: "/model/layers.9/input_layernorm/Div_output_0" + name: "/model/layers.9/input_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.9/input_layernorm/Cast_output_0" + input: "/model/layers.9/input_layernorm/Div_output_0" + output: "/model/layers.9/input_layernorm/Mul_output_0" + name: "/model/layers.9/input_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.9/input_layernorm/Mul_output_0" + output: "/model/layers.9/input_layernorm/Cast_1_output_0" + name: "/model/layers.9/input_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.9.input_layernorm.weight" + input: "/model/layers.9/input_layernorm/Cast_1_output_0" + output: "/model/layers.9/input_layernorm/Mul_1_output_0" + name: "/model/layers.9/input_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.9/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2112" + output: "/model/layers.9/self_attn/q_proj/MatMul_output_0" + name: "/model/layers.9/self_attn/q_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.9/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2113" + output: "/model/layers.9/self_attn/k_proj/MatMul_output_0" + name: "/model/layers.9/self_attn/k_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.9/input_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2114" + output: "/model/layers.9/self_attn/v_proj/MatMul_output_0" + name: "/model/layers.9/self_attn/v_proj/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.9/self_attn/Constant_output_0" + name: "/model/layers.9/self_attn/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_1_output_0" + name: "/model/layers.9/self_attn/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_2_output_0" + name: "/model/layers.9/self_attn/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 4 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/q_proj/MatMul_output_0" + input: "/model/layers.9/self_attn/Constant_output_0" + output: "/model/layers.9/self_attn/Reshape_output_0" + name: "/model/layers.9/self_attn/Reshape" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Reshape_output_0" + output: "/model/layers.9/self_attn/Transpose_output_0" + name: "/model/layers.9/self_attn/Transpose" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.9/self_attn/k_proj/MatMul_output_0" + input: "/model/layers.9/self_attn/Constant_1_output_0" + output: "/model/layers.9/self_attn/Reshape_1_output_0" + name: "/model/layers.9/self_attn/Reshape_1" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Reshape_1_output_0" + output: "/model/layers.9/self_attn/Transpose_1_output_0" + name: "/model/layers.9/self_attn/Transpose_1" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + input: "/model/layers.9/self_attn/v_proj/MatMul_output_0" + input: "/model/layers.9/self_attn/Constant_2_output_0" + output: "/model/layers.9/self_attn/Reshape_2_output_0" + name: "/model/layers.9/self_attn/Reshape_2" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Reshape_2_output_0" + output: "value_states.35" + name: "/model/layers.9/self_attn/Transpose_2" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.9/self_attn/rotary_emb/Constant_output_0" + name: "/model/layers.9/self_attn/rotary_emb/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?AQ\n?\230\270~?\271\374\177?\370\377\177?AQ\n?\230\270~?\271\374\177?\370\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?3\021\325\276\245\345z?\345\362\177?\337\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?&p}\277\357\220t?\203\342\177?\265\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?0U\'\277\247\312k?\224\313\177?z\377\177?,<\221>@\251`?\031\256\177?.\377\177?,<\221>@\251`?\031\256\177?.\377\177?\270\315u?2IS?\022\212\177?\322\376\177?\270\315u?2IS?\022\212\177?\322\376\177?\275\377@?\263\314C?\201_\177?e\376\177?\275\377@?\263\314C?\201_\177?e\376\177?" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/rotary_emb/Constant_1_output_0" + name: "/model/layers.9/self_attn/rotary_emb/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 8 + dims: 8 + data_type: 1 + raw_data: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\244jW?wu\314=W\326#>\324\243>\324\243mN\227>\037\271\365<\223\233D;\303\201\020>mN\227>\037\271\365<\223\233D;\317\275A\277\330a\307>\333\313#=X\022\203;\317\275A\277\330a\307>\333\313#=X\022\203;\020|u\277Dw\365>\364\266L=\336\326\243;\020|u\277Dw\365>\364\266L=\336\326\243;\214\017\217\276i\214\020?\321\234u=Y\233\304;\214\017\217\276i\214\020?\321\234u=Y\233\304;F0(?s\353$?2>\217=\307_\345;F0(?s\353$?2>\217=\307_\345;" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/rotary_emb/Constant_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.9/self_attn/Gather_output_0" + name: "/model/layers.9/self_attn/Gather" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.9/self_attn/Constant_3_output_0" + name: "/model/layers.9/self_attn/Constant_3" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/Gather_output_0" + input: "/model/layers.9/self_attn/Constant_3_output_0" + output: "/model/layers.9/self_attn/Unsqueeze_output_0" + name: "/model/layers.9/self_attn/Unsqueeze" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.9/self_attn/rotary_emb/Constant_1_output_0" + input: "/model/Constant_output_0" + output: "/model/layers.9/self_attn/Gather_1_output_0" + name: "/model/layers.9/self_attn/Gather_1" + op_type: "Gather" + attribute { + name: "axis" + i: 0 + type: INT + } + } + node { + output: "/model/layers.9/self_attn/Constant_4_output_0" + name: "/model/layers.9/self_attn/Constant_4" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/Gather_1_output_0" + input: "/model/layers.9/self_attn/Constant_4_output_0" + output: "/model/layers.9/self_attn/Unsqueeze_1_output_0" + name: "/model/layers.9/self_attn/Unsqueeze_1" + op_type: "Unsqueeze" + } + node { + input: "/model/layers.9/self_attn/Transpose_output_0" + input: "/model/layers.9/self_attn/Unsqueeze_output_0" + output: "/model/layers.9/self_attn/Mul_output_0" + name: "/model/layers.9/self_attn/Mul" + op_type: "Mul" + } + node { + output: "/model/layers.9/self_attn/Constant_5_output_0" + name: "/model/layers.9/self_attn/Constant_5" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_6_output_0" + name: "/model/layers.9/self_attn/Constant_6" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_7_output_0" + name: "/model/layers.9/self_attn/Constant_7" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_8_output_0" + name: "/model/layers.9/self_attn/Constant_8" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/Transpose_output_0" + input: "/model/layers.9/self_attn/Constant_6_output_0" + input: "/model/layers.9/self_attn/Constant_7_output_0" + input: "/model/layers.9/self_attn/Constant_5_output_0" + input: "/model/layers.9/self_attn/Constant_8_output_0" + output: "/model/layers.9/self_attn/Slice_output_0" + name: "/model/layers.9/self_attn/Slice" + op_type: "Slice" + } + node { + output: "/model/layers.9/self_attn/Constant_9_output_0" + name: "/model/layers.9/self_attn/Constant_9" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_10_output_0" + name: "/model/layers.9/self_attn/Constant_10" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_11_output_0" + name: "/model/layers.9/self_attn/Constant_11" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_12_output_0" + name: "/model/layers.9/self_attn/Constant_12" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/Transpose_output_0" + input: "/model/layers.9/self_attn/Constant_10_output_0" + input: "/model/layers.9/self_attn/Constant_11_output_0" + input: "/model/layers.9/self_attn/Constant_9_output_0" + input: "/model/layers.9/self_attn/Constant_12_output_0" + output: "/model/layers.9/self_attn/Slice_1_output_0" + name: "/model/layers.9/self_attn/Slice_1" + op_type: "Slice" + } + node { + input: "/model/layers.9/self_attn/Slice_1_output_0" + output: "/model/layers.9/self_attn/Neg_output_0" + name: "/model/layers.9/self_attn/Neg" + op_type: "Neg" + } + node { + input: "/model/layers.9/self_attn/Neg_output_0" + input: "/model/layers.9/self_attn/Slice_output_0" + output: "/model/layers.9/self_attn/Concat_output_0" + name: "/model/layers.9/self_attn/Concat" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Concat_output_0" + input: "/model/layers.9/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.9/self_attn/Mul_1_output_0" + name: "/model/layers.9/self_attn/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.9/self_attn/Mul_output_0" + input: "/model/layers.9/self_attn/Mul_1_output_0" + output: "/model/layers.9/self_attn/Add_output_0" + name: "/model/layers.9/self_attn/Add" + op_type: "Add" + } + node { + input: "/model/layers.9/self_attn/Transpose_1_output_0" + input: "/model/layers.9/self_attn/Unsqueeze_output_0" + output: "/model/layers.9/self_attn/Mul_2_output_0" + name: "/model/layers.9/self_attn/Mul_2" + op_type: "Mul" + } + node { + output: "/model/layers.9/self_attn/Constant_13_output_0" + name: "/model/layers.9/self_attn/Constant_13" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_14_output_0" + name: "/model/layers.9/self_attn/Constant_14" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\000\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_15_output_0" + name: "/model/layers.9/self_attn/Constant_15" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_16_output_0" + name: "/model/layers.9/self_attn/Constant_16" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/Transpose_1_output_0" + input: "/model/layers.9/self_attn/Constant_14_output_0" + input: "/model/layers.9/self_attn/Constant_15_output_0" + input: "/model/layers.9/self_attn/Constant_13_output_0" + input: "/model/layers.9/self_attn/Constant_16_output_0" + output: "/model/layers.9/self_attn/Slice_2_output_0" + name: "/model/layers.9/self_attn/Slice_2" + op_type: "Slice" + } + node { + output: "/model/layers.9/self_attn/Constant_17_output_0" + name: "/model/layers.9/self_attn/Constant_17" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\003\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_18_output_0" + name: "/model/layers.9/self_attn/Constant_18" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\004\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_19_output_0" + name: "/model/layers.9/self_attn/Constant_19" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\377\377\377\377\377\377\377\177" + } + type: TENSOR + } + } + node { + output: "/model/layers.9/self_attn/Constant_20_output_0" + name: "/model/layers.9/self_attn/Constant_20" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 7 + raw_data: "\001\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/Transpose_1_output_0" + input: "/model/layers.9/self_attn/Constant_18_output_0" + input: "/model/layers.9/self_attn/Constant_19_output_0" + input: "/model/layers.9/self_attn/Constant_17_output_0" + input: "/model/layers.9/self_attn/Constant_20_output_0" + output: "/model/layers.9/self_attn/Slice_3_output_0" + name: "/model/layers.9/self_attn/Slice_3" + op_type: "Slice" + } + node { + input: "/model/layers.9/self_attn/Slice_3_output_0" + output: "/model/layers.9/self_attn/Neg_1_output_0" + name: "/model/layers.9/self_attn/Neg_1" + op_type: "Neg" + } + node { + input: "/model/layers.9/self_attn/Neg_1_output_0" + input: "/model/layers.9/self_attn/Slice_2_output_0" + output: "/model/layers.9/self_attn/Concat_1_output_0" + name: "/model/layers.9/self_attn/Concat_1" + op_type: "Concat" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Concat_1_output_0" + input: "/model/layers.9/self_attn/Unsqueeze_1_output_0" + output: "/model/layers.9/self_attn/Mul_3_output_0" + name: "/model/layers.9/self_attn/Mul_3" + op_type: "Mul" + } + node { + input: "/model/layers.9/self_attn/Mul_2_output_0" + input: "/model/layers.9/self_attn/Mul_3_output_0" + output: "key_states.75" + name: "/model/layers.9/self_attn/Add_1" + op_type: "Add" + } + node { + input: "key_states.75" + output: "/model/layers.9/self_attn/Transpose_3_output_0" + name: "/model/layers.9/self_attn/Transpose_3" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 1 + ints: 3 + ints: 2 + type: INTS + } + } + node { + input: "/model/layers.9/self_attn/Add_output_0" + input: "/model/layers.9/self_attn/Transpose_3_output_0" + output: "/model/layers.9/self_attn/MatMul_output_0" + name: "/model/layers.9/self_attn/MatMul" + op_type: "MatMul" + } + node { + output: "/model/layers.9/self_attn/Constant_21_output_0" + name: "/model/layers.9/self_attn/Constant_21" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\363\0045@" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/MatMul_output_0" + input: "/model/layers.9/self_attn/Constant_21_output_0" + output: "/model/layers.9/self_attn/Div_output_0" + name: "/model/layers.9/self_attn/Div" + op_type: "Div" + } + node { + input: "/model/layers.9/self_attn/Div_output_0" + input: "/model/Where_3_output_0" + output: "/model/layers.9/self_attn/Add_2_output_0" + name: "/model/layers.9/self_attn/Add_2" + op_type: "Add" + } + node { + input: "/model/layers.9/self_attn/Add_2_output_0" + output: "/model/layers.9/self_attn/Softmax_output_0" + name: "/model/layers.9/self_attn/Softmax" + op_type: "Softmax" + attribute { + name: "axis" + i: -1 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Softmax_output_0" + output: "/model/layers.9/self_attn/Cast_output_0" + name: "/model/layers.9/self_attn/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Cast_output_0" + output: "/model/layers.9/self_attn/Cast_1_output_0" + name: "/model/layers.9/self_attn/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Cast_1_output_0" + input: "value_states.35" + output: "/model/layers.9/self_attn/MatMul_1_output_0" + name: "/model/layers.9/self_attn/MatMul_1" + op_type: "MatMul" + } + node { + input: "/model/layers.9/self_attn/MatMul_1_output_0" + output: "/model/layers.9/self_attn/Transpose_4_output_0" + name: "/model/layers.9/self_attn/Transpose_4" + op_type: "Transpose" + attribute { + name: "perm" + ints: 0 + ints: 2 + ints: 1 + ints: 3 + type: INTS + } + } + node { + output: "/model/layers.9/self_attn/Constant_22_output_0" + name: "/model/layers.9/self_attn/Constant_22" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 3 + data_type: 7 + raw_data: "\002\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/self_attn/Transpose_4_output_0" + input: "/model/layers.9/self_attn/Constant_22_output_0" + output: "/model/layers.9/self_attn/Reshape_3_output_0" + name: "/model/layers.9/self_attn/Reshape_3" + op_type: "Reshape" + attribute { + name: "allowzero" + i: 0 + type: INT + } + } + node { + input: "/model/layers.9/self_attn/Reshape_3_output_0" + input: "onnx::MatMul_2150" + output: "/model/layers.9/self_attn/o_proj/MatMul_output_0" + name: "/model/layers.9/self_attn/o_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.9/input_layernorm/Cast_output_0" + input: "/model/layers.9/self_attn/o_proj/MatMul_output_0" + output: "/model/layers.9/Add_output_0" + name: "/model/layers.9/Add" + op_type: "Add" + } + node { + input: "/model/layers.9/Add_output_0" + output: "/model/layers.9/post_attention_layernorm/Cast_output_0" + name: "/model/layers.9/post_attention_layernorm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/layers.9/post_attention_layernorm/Constant_output_0" + name: "/model/layers.9/post_attention_layernorm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/post_attention_layernorm/Cast_output_0" + input: "/model/layers.9/post_attention_layernorm/Constant_output_0" + output: "/model/layers.9/post_attention_layernorm/Pow_output_0" + name: "/model/layers.9/post_attention_layernorm/Pow" + op_type: "Pow" + } + node { + input: "/model/layers.9/post_attention_layernorm/Pow_output_0" + output: "/model/layers.9/post_attention_layernorm/ReduceMean_output_0" + name: "/model/layers.9/post_attention_layernorm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/layers.9/post_attention_layernorm/Constant_1_output_0" + name: "/model/layers.9/post_attention_layernorm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/post_attention_layernorm/ReduceMean_output_0" + input: "/model/layers.9/post_attention_layernorm/Constant_1_output_0" + output: "/model/layers.9/post_attention_layernorm/Add_output_0" + name: "/model/layers.9/post_attention_layernorm/Add" + op_type: "Add" + } + node { + input: "/model/layers.9/post_attention_layernorm/Add_output_0" + output: "/model/layers.9/post_attention_layernorm/Sqrt_output_0" + name: "/model/layers.9/post_attention_layernorm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/layers.9/post_attention_layernorm/Constant_2_output_0" + name: "/model/layers.9/post_attention_layernorm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/layers.9/post_attention_layernorm/Constant_2_output_0" + input: "/model/layers.9/post_attention_layernorm/Sqrt_output_0" + output: "/model/layers.9/post_attention_layernorm/Div_output_0" + name: "/model/layers.9/post_attention_layernorm/Div" + op_type: "Div" + } + node { + input: "/model/layers.9/post_attention_layernorm/Cast_output_0" + input: "/model/layers.9/post_attention_layernorm/Div_output_0" + output: "/model/layers.9/post_attention_layernorm/Mul_output_0" + name: "/model/layers.9/post_attention_layernorm/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.9/post_attention_layernorm/Mul_output_0" + output: "/model/layers.9/post_attention_layernorm/Cast_1_output_0" + name: "/model/layers.9/post_attention_layernorm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.layers.9.post_attention_layernorm.weight" + input: "/model/layers.9/post_attention_layernorm/Cast_1_output_0" + output: "/model/layers.9/post_attention_layernorm/Mul_1_output_0" + name: "/model/layers.9/post_attention_layernorm/Mul_1" + op_type: "Mul" + } + node { + input: "/model/layers.9/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2151" + output: "/model/layers.9/mlp/gate_proj/MatMul_output_0" + name: "/model/layers.9/mlp/gate_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.9/mlp/gate_proj/MatMul_output_0" + output: "/model/layers.9/mlp/act_fn/Sigmoid_output_0" + name: "/model/layers.9/mlp/act_fn/Sigmoid" + op_type: "Sigmoid" + } + node { + input: "/model/layers.9/mlp/gate_proj/MatMul_output_0" + input: "/model/layers.9/mlp/act_fn/Sigmoid_output_0" + output: "/model/layers.9/mlp/act_fn/Mul_output_0" + name: "/model/layers.9/mlp/act_fn/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.9/post_attention_layernorm/Mul_1_output_0" + input: "onnx::MatMul_2152" + output: "/model/layers.9/mlp/up_proj/MatMul_output_0" + name: "/model/layers.9/mlp/up_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.9/mlp/act_fn/Mul_output_0" + input: "/model/layers.9/mlp/up_proj/MatMul_output_0" + output: "/model/layers.9/mlp/Mul_output_0" + name: "/model/layers.9/mlp/Mul" + op_type: "Mul" + } + node { + input: "/model/layers.9/mlp/Mul_output_0" + input: "onnx::MatMul_2153" + output: "/model/layers.9/mlp/down_proj/MatMul_output_0" + name: "/model/layers.9/mlp/down_proj/MatMul" + op_type: "MatMul" + } + node { + input: "/model/layers.9/post_attention_layernorm/Cast_output_0" + input: "/model/layers.9/mlp/down_proj/MatMul_output_0" + output: "/model/layers.9/Add_1_output_0" + name: "/model/layers.9/Add_1" + op_type: "Add" + } + node { + input: "/model/layers.9/Add_1_output_0" + output: "/model/norm/Cast_output_0" + name: "/model/norm/Cast" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + output: "/model/norm/Constant_output_0" + name: "/model/norm/Constant" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\000@" + } + type: TENSOR + } + } + node { + input: "/model/norm/Cast_output_0" + input: "/model/norm/Constant_output_0" + output: "/model/norm/Pow_output_0" + name: "/model/norm/Pow" + op_type: "Pow" + } + node { + input: "/model/norm/Pow_output_0" + output: "/model/norm/ReduceMean_output_0" + name: "/model/norm/ReduceMean" + op_type: "ReduceMean" + attribute { + name: "axes" + ints: -1 + type: INTS + } + attribute { + name: "keepdims" + i: 1 + type: INT + } + } + node { + output: "/model/norm/Constant_1_output_0" + name: "/model/norm/Constant_1" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\2757\2065" + } + type: TENSOR + } + } + node { + input: "/model/norm/ReduceMean_output_0" + input: "/model/norm/Constant_1_output_0" + output: "/model/norm/Add_output_0" + name: "/model/norm/Add" + op_type: "Add" + } + node { + input: "/model/norm/Add_output_0" + output: "/model/norm/Sqrt_output_0" + name: "/model/norm/Sqrt" + op_type: "Sqrt" + } + node { + output: "/model/norm/Constant_2_output_0" + name: "/model/norm/Constant_2" + op_type: "Constant" + attribute { + name: "value" + t { + data_type: 1 + raw_data: "\000\000\200?" + } + type: TENSOR + } + } + node { + input: "/model/norm/Constant_2_output_0" + input: "/model/norm/Sqrt_output_0" + output: "/model/norm/Div_output_0" + name: "/model/norm/Div" + op_type: "Div" + } + node { + input: "/model/norm/Cast_output_0" + input: "/model/norm/Div_output_0" + output: "/model/norm/Mul_output_0" + name: "/model/norm/Mul" + op_type: "Mul" + } + node { + input: "/model/norm/Mul_output_0" + output: "/model/norm/Cast_1_output_0" + name: "/model/norm/Cast_1" + op_type: "Cast" + attribute { + name: "to" + i: 1 + type: INT + } + } + node { + input: "model.norm.weight" + input: "/model/norm/Cast_1_output_0" + output: "1709" + name: "/model/norm/Mul_1" + op_type: "Mul" + } + name: "main_graph" + initializer { + dims: 1024 + dims: 16 + data_type: 1 + name: "model.embed_tokens.weight" + raw_data: "" + } + initializer { + dims: 16 + data_type: 1 + name: "model.layers.0.input_layernorm.weight" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1734" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1735" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1736" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1772" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1773" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1774" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1775" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1776" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1777" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1778" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1814" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1815" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1816" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1817" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1818" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1819" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1820" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1856" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1857" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1858" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1859" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1860" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1861" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1862" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1898" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1899" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1900" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1901" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1902" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1903" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1904" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1940" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1941" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1942" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1943" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1944" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1945" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1946" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1982" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1983" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1984" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1985" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1986" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1987" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_1988" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2024" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2025" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2026" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2027" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2028" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2029" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2030" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2066" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2067" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2068" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2069" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2070" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2071" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2072" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2108" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2109" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2110" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2111" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2112" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2113" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2114" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2150" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2151" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2152" + raw_data: "" + } + initializer { + dims: 16 + dims: 16 + data_type: 1 + name: "onnx::MatMul_2153" + raw_data: "" + } + input { + name: "input.1" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + } + } + } + } + input { + name: "attention_mask" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "1709" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 16 + } + } + } + } + } + output { + name: "key_states.3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.3" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.7" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.11" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.15" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.43" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.19" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.51" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.23" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.59" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.27" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.67" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.31" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "key_states.75" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + output { + name: "value_states.35" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 2 + } + dim { + dim_value: 2 + } + dim { + dim_value: 8 + } + dim { + dim_value: 8 + } + } + } + } + } + value_info { + name: "model.embed_tokens.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1024 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "model.layers.0.input_layernorm.weight" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1734" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1735" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1736" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1772" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1773" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1774" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1775" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1776" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1777" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1778" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1814" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1815" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1816" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1817" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1818" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1819" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1820" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1856" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1857" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1858" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1859" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1860" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1861" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1862" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1898" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1899" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1900" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1901" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1902" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1903" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1904" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1940" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1941" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1942" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1943" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1944" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1945" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1946" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1982" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1983" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1984" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1985" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1986" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1987" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_1988" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2024" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2025" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2026" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2027" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2028" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2029" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2030" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2066" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2067" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2068" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2069" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2070" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2071" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2072" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2108" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2109" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2110" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2111" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2112" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2113" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2114" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2150" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2151" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2152" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } + value_info { + name: "onnx::MatMul_2153" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 16 + } + dim { + dim_value: 16 + } + } + } + } + } +} +opset_import { + domain: "" + version: 17 +} diff --git a/tests/graph_view_test.py b/tests/graph_view_test.py index 719649fd..dadbcf62 100644 --- a/tests/graph_view_test.py +++ b/tests/graph_view_test.py @@ -9,11 +9,10 @@ class GraphViewTest(unittest.TestCase): - @unittest.skip("Test model is missing due to storage constraints") def test_it_can_be_serialized_as_graph_proto(self): data_path = ( pathlib.Path(__file__).parent.parent - / "testdata/torchscript_model/torchscript_model.onnx" + / "testdata/e2e_models/torchscript_model/torchscript_model.textproto" ) model_proto = onnx.load(data_path) model = ir.serde.deserialize_model(model_proto) diff --git a/tests/serde_roundtrip_test.py b/tests/serde_roundtrip_test.py index e71180b8..9bb9e3eb 100644 --- a/tests/serde_roundtrip_test.py +++ b/tests/serde_roundtrip_test.py @@ -13,20 +13,45 @@ import onnx_ir as ir import onnx_ir.testing +model_folder_path = pathlib.Path(__file__).resolve().parent.parent / "testdata" onnx_backend_test_path = pathlib.Path(onnx.backend.test.__file__).parent / "data" -model_paths = list(onnx_backend_test_path.rglob("*.onnx")) +assert model_folder_path.exists() +assert onnx_backend_test_path.exists() + +model_paths = [ + *model_folder_path.rglob("*.textproto"), + *onnx_backend_test_path.rglob("*.onnx"), +] test_args = [ (f"{model_path.parent.name}_{model_path.name}", model_path) for model_path in model_paths ] +def initialize_with_data(model: onnx.ModelProto) -> None: + for tensor_proto in model.graph.initializer: + if ( + tensor_proto.raw_data != b"" + or len(tensor_proto.float_data) != 0 + or len(tensor_proto.int32_data) != 0 + or len(tensor_proto.int64_data) != 0 + or len(tensor_proto.string_data) != 0 + or len(tensor_proto.uint64_data) != 0 + ): + continue + # This does not handle string tensors, but it's ok for our purposes + tensor = ir.from_proto(tensor_proto) + data = b"\0" * tensor.nbytes + tensor_proto.raw_data = data + + class SerdeTest(unittest.TestCase): @parameterized.parameterized.expand(test_args) def test_serialization_deserialization_produces_same_model( self, _: str, model_path: pathlib.Path ) -> None: model = onnx.load(model_path) + initialize_with_data(model) # Fix the missing graph name of some test models model.graph.name = "main_graph" onnx.checker.check_model(model) diff --git a/tools/create_test_model.py b/tools/create_test_model.py new file mode 100644 index 00000000..92bd1aa4 --- /dev/null +++ b/tools/create_test_model.py @@ -0,0 +1,45 @@ +# Copyright (c) ONNX Project Contributors +# SPDX-License-Identifier: Apache-2.0 +"""Turn an ONNX model into a textproto and strip all tensors. + +Usage: + python create_test_model.py +""" + +import argparse + +import onnx + + +def strip_tensor_data(tensor: onnx.TensorProto) -> None: + """Strip data from the tensor proto.""" + tensor.raw_data = b"" + del tensor.float_data[:] + del tensor.int32_data[:] + del tensor.int64_data[:] + del tensor.string_data[:] + del tensor.uint64_data[:] + + +def main(): + parser = argparse.ArgumentParser( + description="Convert ONNX model to textproto and strip tensor data." + ) + parser.add_argument("onnx_model", type=str, help="Path to the ONNX model file.") + args = parser.parse_args() + + output_path = args.onnx_model.replace(".onnx", ".textproto") + + # Load the ONNX model + model = onnx.load(args.onnx_model, load_external_data=False) + + for tensor in model.graph.initializer: + strip_tensor_data(tensor) + + # Save the model as a textproto + onnx.save(model, output_path) + print(f"Model saved as {output_path}") + + +if __name__ == "__main__": + main()