summaryrefslogtreecommitdiff
path: root/python/openvino/demo/models/model_bmp.xml
diff options
context:
space:
mode:
authorEric Dao <eric@erickhangdao.com>2025-03-10 17:54:31 -0400
committerEric Dao <eric@erickhangdao.com>2025-03-10 17:54:31 -0400
commitab224e2e6ba65f5a369ec392f99cd8845ad06c98 (patch)
treea1e757e9341863ed52b8ad4c5a1c45933aab9da4 /python/openvino/demo/models/model_bmp.xml
parent40da1752f2c8639186b72f6838aa415e854d0b1d (diff)
downloadthesis-master.tar.gz
thesis-master.tar.bz2
thesis-master.zip
completed thesisHEADmaster
Diffstat (limited to 'python/openvino/demo/models/model_bmp.xml')
-rw-r--r--python/openvino/demo/models/model_bmp.xml1782
1 files changed, 1782 insertions, 0 deletions
diff --git a/python/openvino/demo/models/model_bmp.xml b/python/openvino/demo/models/model_bmp.xml
new file mode 100644
index 0000000..7cd188d
--- /dev/null
+++ b/python/openvino/demo/models/model_bmp.xml
@@ -0,0 +1,1782 @@
+<?xml version="1.0"?>
+<net name="main_graph" version="11">
+ <layers>
+ <layer id="0" name="input.1" type="Parameter" version="opset1">
+ <data shape="32,3,128,128" element_type="f32" />
+ <output>
+ <port id="0" precision="FP32" names="input.1">
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="1" name="onnx::Conv_174" type="Const" version="opset1">
+ <data element_type="f32" shape="16, 3, 3, 3" offset="0" size="1728" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_174">
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="2" name="/conv2x_0/conv2x_0.0/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="3" name="Reshape_44" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 16, 1, 1" offset="1728" size="64" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="4" name="/conv2x_0/conv2x_0.0/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_0/conv2x_0.0/Conv_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="5" name="/conv2x_0/conv2x_0.2/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_0/conv2x_0.2/Relu_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="6" name="onnx::Conv_177" type="Const" version="opset1">
+ <data element_type="f32" shape="16, 16, 3, 3" offset="1792" size="9216" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_177">
+ <dim>16</dim>
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="7" name="/conv2x_0/conv2x_0.3/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>16</dim>
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="8" name="Reshape_61" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 16, 1, 1" offset="11008" size="64" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="9" name="/conv2x_0/conv2x_0.3/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_0/conv2x_0.3/Conv_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="10" name="/conv2x_0/conv2x_0.5/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_0/conv2x_0.5/Relu_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="11" name="/pool/MaxPool" type="MaxPool" version="opset8">
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/pool/MaxPool_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="2" precision="I64">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="12" name="onnx::Conv_180" type="Const" version="opset1">
+ <data element_type="f32" shape="32, 16, 3, 3" offset="11072" size="18432" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_180">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="13" name="/conv2x_1/conv2x_1.0/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="14" name="Reshape_79" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 32, 1, 1" offset="29504" size="128" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="15" name="/conv2x_1/conv2x_1.0/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_1/conv2x_1.0/Conv_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="16" name="/conv2x_1/conv2x_1.2/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_1/conv2x_1.2/Relu_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="17" name="onnx::Conv_183" type="Const" version="opset1">
+ <data element_type="f32" shape="32, 32, 3, 3" offset="29632" size="36864" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_183">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="18" name="/conv2x_1/conv2x_1.3/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="19" name="Reshape_96" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 32, 1, 1" offset="66496" size="128" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="20" name="/conv2x_1/conv2x_1.3/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_1/conv2x_1.3/Conv_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="21" name="/conv2x_1/conv2x_1.5/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_1/conv2x_1.5/Relu_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="22" name="/pool_1/MaxPool" type="MaxPool" version="opset8">
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/pool_1/MaxPool_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="2" precision="I64">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="23" name="onnx::Conv_186" type="Const" version="opset1">
+ <data element_type="f32" shape="64, 32, 3, 3" offset="66624" size="73728" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_186">
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="24" name="/conv2x_2/conv2x_2.0/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="25" name="Reshape_114" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 64, 1, 1" offset="140352" size="256" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="26" name="/conv2x_2/conv2x_2.0/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_2/conv2x_2.0/Conv_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="27" name="/conv2x_2/conv2x_2.2/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_2/conv2x_2.2/Relu_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="28" name="onnx::Conv_189" type="Const" version="opset1">
+ <data element_type="f32" shape="64, 64, 3, 3" offset="140608" size="147456" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_189">
+ <dim>64</dim>
+ <dim>64</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="29" name="/conv2x_2/conv2x_2.3/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>64</dim>
+ <dim>64</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="30" name="Reshape_131" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 64, 1, 1" offset="288064" size="256" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="31" name="/conv2x_2/conv2x_2.3/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_2/conv2x_2.3/Conv_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="32" name="/conv2x_2/conv2x_2.5/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_2/conv2x_2.5/Relu_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="33" name="/pool_2/MaxPool" type="MaxPool" version="opset8">
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/pool_2/MaxPool_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ <port id="2" precision="I64">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="34" name="onnx::Conv_192" type="Const" version="opset1">
+ <data element_type="f32" shape="128, 64, 3, 3" offset="288320" size="294912" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_192">
+ <dim>128</dim>
+ <dim>64</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="35" name="/conv2x_3/conv2x_3.0/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>128</dim>
+ <dim>64</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="36" name="Reshape_149" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 128, 1, 1" offset="583232" size="512" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="37" name="/conv2x_3/conv2x_3.0/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_3/conv2x_3.0/Conv_output_0">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="38" name="/conv2x_3/conv2x_3.2/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_3/conv2x_3.2/Relu_output_0">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="39" name="onnx::Conv_195" type="Const" version="opset1">
+ <data element_type="f32" shape="128, 128, 3, 3" offset="583744" size="589824" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_195">
+ <dim>128</dim>
+ <dim>128</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="40" name="/conv2x_3/conv2x_3.3/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>128</dim>
+ <dim>128</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="41" name="Reshape_166" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 128, 1, 1" offset="1173568" size="512" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="42" name="/conv2x_3/conv2x_3.3/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_3/conv2x_3.3/Conv_output_0">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="43" name="/conv2x_3/conv2x_3.5/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_3/conv2x_3.5/Relu_output_0">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="44" name="/upsample/Constant" type="Const" version="opset1">
+ <data element_type="f32" shape="4" offset="1174080" size="16" />
+ <output>
+ <port id="0" precision="FP32" names="/upsample/Constant_output_0">
+ <dim>4</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="45" name="/upsample/Resize" type="Interpolate" version="opset11">
+ <data mode="nearest" shape_calculation_mode="scales" coordinate_transformation_mode="asymmetric" nearest_mode="floor" antialias="false" pads_begin="0, 0, 0, 0" pads_end="0, 0, 0, 0" cube_coeff="-0.75" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>16</dim>
+ <dim>16</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>4</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/upsample/Resize_output_0">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="46" name="/Concat" type="Concat" version="opset1">
+ <data axis="1" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/Concat_output_0">
+ <dim>32</dim>
+ <dim>192</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="47" name="onnx::Conv_198" type="Const" version="opset1">
+ <data element_type="f32" shape="64, 192, 3, 3" offset="1174096" size="442368" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_198">
+ <dim>64</dim>
+ <dim>192</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="48" name="/conv2x_4/conv2x_4.0/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>192</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>64</dim>
+ <dim>192</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="49" name="Reshape_187" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 64, 1, 1" offset="1616464" size="256" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="50" name="/conv2x_4/conv2x_4.0/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_4/conv2x_4.0/Conv_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="51" name="/conv2x_4/conv2x_4.2/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_4/conv2x_4.2/Relu_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="52" name="onnx::Conv_201" type="Const" version="opset1">
+ <data element_type="f32" shape="64, 64, 3, 3" offset="1616720" size="147456" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_201">
+ <dim>64</dim>
+ <dim>64</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="53" name="/conv2x_4/conv2x_4.3/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>64</dim>
+ <dim>64</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="54" name="Reshape_204" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 64, 1, 1" offset="1764176" size="256" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="55" name="/conv2x_4/conv2x_4.3/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>64</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_4/conv2x_4.3/Conv_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="56" name="/conv2x_4/conv2x_4.5/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_4/conv2x_4.5/Relu_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="57" name="/upsample_1/Constant" type="Const" version="opset1">
+ <data element_type="f32" shape="4" offset="1174080" size="16" />
+ <output>
+ <port id="0" precision="FP32" names="/upsample_1/Constant_output_0">
+ <dim>4</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="58" name="/upsample_1/Resize" type="Interpolate" version="opset11">
+ <data mode="nearest" shape_calculation_mode="scales" coordinate_transformation_mode="asymmetric" nearest_mode="floor" antialias="false" pads_begin="0, 0, 0, 0" pads_end="0, 0, 0, 0" cube_coeff="-0.75" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>32</dim>
+ <dim>32</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>4</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/upsample_1/Resize_output_0">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="59" name="/Concat_1" type="Concat" version="opset1">
+ <data axis="1" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/Concat_1_output_0">
+ <dim>32</dim>
+ <dim>96</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="60" name="onnx::Conv_204" type="Const" version="opset1">
+ <data element_type="f32" shape="32, 96, 3, 3" offset="1764432" size="110592" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_204">
+ <dim>32</dim>
+ <dim>96</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="61" name="/conv2x_5/conv2x_5.0/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>96</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>96</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="62" name="Reshape_225" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 32, 1, 1" offset="1875024" size="128" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="63" name="/conv2x_5/conv2x_5.0/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_5/conv2x_5.0/Conv_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="64" name="/conv2x_5/conv2x_5.2/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_5/conv2x_5.2/Relu_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="65" name="onnx::Conv_207" type="Const" version="opset1">
+ <data element_type="f32" shape="32, 32, 3, 3" offset="1875152" size="36864" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_207">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="66" name="/conv2x_5/conv2x_5.3/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="67" name="Reshape_242" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 32, 1, 1" offset="1912016" size="128" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="68" name="/conv2x_5/conv2x_5.3/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_5/conv2x_5.3/Conv_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="69" name="/conv2x_5/conv2x_5.5/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_5/conv2x_5.5/Relu_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="70" name="/upsample_2/Constant" type="Const" version="opset1">
+ <data element_type="f32" shape="4" offset="1174080" size="16" />
+ <output>
+ <port id="0" precision="FP32" names="/upsample_2/Constant_output_0">
+ <dim>4</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="71" name="/upsample_2/Resize" type="Interpolate" version="opset11">
+ <data mode="nearest" shape_calculation_mode="scales" coordinate_transformation_mode="asymmetric" nearest_mode="floor" antialias="false" pads_begin="0, 0, 0, 0" pads_end="0, 0, 0, 0" cube_coeff="-0.75" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>64</dim>
+ <dim>64</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>4</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/upsample_2/Resize_output_0">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="72" name="/Concat_2" type="Concat" version="opset1">
+ <data axis="1" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>32</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/Concat_2_output_0">
+ <dim>32</dim>
+ <dim>48</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="73" name="onnx::Conv_210" type="Const" version="opset1">
+ <data element_type="f32" shape="16, 48, 3, 3" offset="1912144" size="27648" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_210">
+ <dim>16</dim>
+ <dim>48</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="74" name="/conv2x_6/conv2x_6.0/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>48</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>16</dim>
+ <dim>48</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="75" name="Reshape_263" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 16, 1, 1" offset="1939792" size="64" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="76" name="/conv2x_6/conv2x_6.0/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_6/conv2x_6.0/Conv_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="77" name="/conv2x_6/conv2x_6.2/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_6/conv2x_6.2/Relu_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="78" name="onnx::Conv_213" type="Const" version="opset1">
+ <data element_type="f32" shape="16, 16, 3, 3" offset="1939856" size="9216" />
+ <output>
+ <port id="0" precision="FP32" names="onnx::Conv_213">
+ <dim>16</dim>
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="79" name="/conv2x_6/conv2x_6.3/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>16</dim>
+ <dim>16</dim>
+ <dim>3</dim>
+ <dim>3</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="80" name="Reshape_280" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 16, 1, 1" offset="1949072" size="64" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="81" name="/conv2x_6/conv2x_6.3/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/conv2x_6/conv2x_6.3/Conv_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="82" name="/conv2x_6/conv2x_6.5/Relu" type="ReLU" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ <output>
+ <port id="1" precision="FP32" names="/conv2x_6/conv2x_6.5/Relu_output_0">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="83" name="out.weight" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 16, 1, 1" offset="1949136" size="64" />
+ <output>
+ <port id="0" precision="FP32" names="out.weight">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="84" name="/out/Conv/WithoutBiases" type="Convolution" version="opset1">
+ <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>16</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>16</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="85" name="Reshape_297" type="Const" version="opset1">
+ <data element_type="f32" shape="1, 1, 1, 1" offset="1949200" size="4" />
+ <output>
+ <port id="0" precision="FP32">
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="86" name="/out/Conv" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ <dim>1</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="/out/Conv_output_0">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="87" name="Constant_1585" type="Const" version="opset1">
+ <data element_type="i64" shape="2" offset="1949204" size="16" />
+ <output>
+ <port id="0" precision="I64">
+ <dim>2</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="88" name="Constant_1588" type="Const" version="opset1">
+ <data element_type="i64" shape="2" offset="1949220" size="16" />
+ <output>
+ <port id="0" precision="I64">
+ <dim>2</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="89" name="Constant_1591" type="Const" version="opset1">
+ <data element_type="i64" shape="2" offset="1949236" size="16" />
+ <output>
+ <port id="0" precision="I64">
+ <dim>2</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="90" name="/Slice" type="StridedSlice" version="opset1">
+ <data begin_mask="1, 0" end_mask="1, 0" new_axis_mask="" shrink_axis_mask="" ellipsis_mask="" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>3</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="I64">
+ <dim>2</dim>
+ </port>
+ <port id="2" precision="I64">
+ <dim>2</dim>
+ </port>
+ <port id="3" precision="I64">
+ <dim>2</dim>
+ </port>
+ </input>
+ <output>
+ <port id="4" precision="FP32" names="/Slice_output_0">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="91" name="172" type="Add" version="opset1">
+ <data auto_broadcast="numpy" />
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ <port id="1" precision="FP32">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ <output>
+ <port id="2" precision="FP32" names="172">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </output>
+ </layer>
+ <layer id="92" name="172/sink_port_0" type="Result" version="opset1">
+ <input>
+ <port id="0" precision="FP32">
+ <dim>32</dim>
+ <dim>1</dim>
+ <dim>128</dim>
+ <dim>128</dim>
+ </port>
+ </input>
+ </layer>
+ </layers>
+ <edges>
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
+ <edge from-layer="0" from-port="0" to-layer="90" to-port="0" />
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
+ <edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
+ <edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
+ <edge from-layer="4" from-port="2" to-layer="5" to-port="0" />
+ <edge from-layer="5" from-port="1" to-layer="7" to-port="0" />
+ <edge from-layer="6" from-port="0" to-layer="7" to-port="1" />
+ <edge from-layer="7" from-port="2" to-layer="9" to-port="0" />
+ <edge from-layer="8" from-port="0" to-layer="9" to-port="1" />
+ <edge from-layer="9" from-port="2" to-layer="10" to-port="0" />
+ <edge from-layer="10" from-port="1" to-layer="11" to-port="0" />
+ <edge from-layer="10" from-port="1" to-layer="72" to-port="1" />
+ <edge from-layer="11" from-port="1" to-layer="13" to-port="0" />
+ <edge from-layer="12" from-port="0" to-layer="13" to-port="1" />
+ <edge from-layer="13" from-port="2" to-layer="15" to-port="0" />
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="1" />
+ <edge from-layer="15" from-port="2" to-layer="16" to-port="0" />
+ <edge from-layer="16" from-port="1" to-layer="18" to-port="0" />
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="1" />
+ <edge from-layer="18" from-port="2" to-layer="20" to-port="0" />
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
+ <edge from-layer="20" from-port="2" to-layer="21" to-port="0" />
+ <edge from-layer="21" from-port="1" to-layer="59" to-port="1" />
+ <edge from-layer="21" from-port="1" to-layer="22" to-port="0" />
+ <edge from-layer="22" from-port="1" to-layer="24" to-port="0" />
+ <edge from-layer="23" from-port="0" to-layer="24" to-port="1" />
+ <edge from-layer="24" from-port="2" to-layer="26" to-port="0" />
+ <edge from-layer="25" from-port="0" to-layer="26" to-port="1" />
+ <edge from-layer="26" from-port="2" to-layer="27" to-port="0" />
+ <edge from-layer="27" from-port="1" to-layer="29" to-port="0" />
+ <edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
+ <edge from-layer="29" from-port="2" to-layer="31" to-port="0" />
+ <edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
+ <edge from-layer="31" from-port="2" to-layer="32" to-port="0" />
+ <edge from-layer="32" from-port="1" to-layer="46" to-port="1" />
+ <edge from-layer="32" from-port="1" to-layer="33" to-port="0" />
+ <edge from-layer="33" from-port="1" to-layer="35" to-port="0" />
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
+ <edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
+ <edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
+ <edge from-layer="37" from-port="2" to-layer="38" to-port="0" />
+ <edge from-layer="38" from-port="1" to-layer="40" to-port="0" />
+ <edge from-layer="39" from-port="0" to-layer="40" to-port="1" />
+ <edge from-layer="40" from-port="2" to-layer="42" to-port="0" />
+ <edge from-layer="41" from-port="0" to-layer="42" to-port="1" />
+ <edge from-layer="42" from-port="2" to-layer="43" to-port="0" />
+ <edge from-layer="43" from-port="1" to-layer="45" to-port="0" />
+ <edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
+ <edge from-layer="45" from-port="2" to-layer="46" to-port="0" />
+ <edge from-layer="46" from-port="2" to-layer="48" to-port="0" />
+ <edge from-layer="47" from-port="0" to-layer="48" to-port="1" />
+ <edge from-layer="48" from-port="2" to-layer="50" to-port="0" />
+ <edge from-layer="49" from-port="0" to-layer="50" to-port="1" />
+ <edge from-layer="50" from-port="2" to-layer="51" to-port="0" />
+ <edge from-layer="51" from-port="1" to-layer="53" to-port="0" />
+ <edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
+ <edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
+ <edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
+ <edge from-layer="55" from-port="2" to-layer="56" to-port="0" />
+ <edge from-layer="56" from-port="1" to-layer="58" to-port="0" />
+ <edge from-layer="57" from-port="0" to-layer="58" to-port="1" />
+ <edge from-layer="58" from-port="2" to-layer="59" to-port="0" />
+ <edge from-layer="59" from-port="2" to-layer="61" to-port="0" />
+ <edge from-layer="60" from-port="0" to-layer="61" to-port="1" />
+ <edge from-layer="61" from-port="2" to-layer="63" to-port="0" />
+ <edge from-layer="62" from-port="0" to-layer="63" to-port="1" />
+ <edge from-layer="63" from-port="2" to-layer="64" to-port="0" />
+ <edge from-layer="64" from-port="1" to-layer="66" to-port="0" />
+ <edge from-layer="65" from-port="0" to-layer="66" to-port="1" />
+ <edge from-layer="66" from-port="2" to-layer="68" to-port="0" />
+ <edge from-layer="67" from-port="0" to-layer="68" to-port="1" />
+ <edge from-layer="68" from-port="2" to-layer="69" to-port="0" />
+ <edge from-layer="69" from-port="1" to-layer="71" to-port="0" />
+ <edge from-layer="70" from-port="0" to-layer="71" to-port="1" />
+ <edge from-layer="71" from-port="2" to-layer="72" to-port="0" />
+ <edge from-layer="72" from-port="2" to-layer="74" to-port="0" />
+ <edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
+ <edge from-layer="74" from-port="2" to-layer="76" to-port="0" />
+ <edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
+ <edge from-layer="76" from-port="2" to-layer="77" to-port="0" />
+ <edge from-layer="77" from-port="1" to-layer="79" to-port="0" />
+ <edge from-layer="78" from-port="0" to-layer="79" to-port="1" />
+ <edge from-layer="79" from-port="2" to-layer="81" to-port="0" />
+ <edge from-layer="80" from-port="0" to-layer="81" to-port="1" />
+ <edge from-layer="81" from-port="2" to-layer="82" to-port="0" />
+ <edge from-layer="82" from-port="1" to-layer="84" to-port="0" />
+ <edge from-layer="83" from-port="0" to-layer="84" to-port="1" />
+ <edge from-layer="84" from-port="2" to-layer="86" to-port="0" />
+ <edge from-layer="85" from-port="0" to-layer="86" to-port="1" />
+ <edge from-layer="86" from-port="2" to-layer="91" to-port="0" />
+ <edge from-layer="87" from-port="0" to-layer="90" to-port="1" />
+ <edge from-layer="88" from-port="0" to-layer="90" to-port="2" />
+ <edge from-layer="89" from-port="0" to-layer="90" to-port="3" />
+ <edge from-layer="90" from-port="4" to-layer="91" to-port="1" />
+ <edge from-layer="91" from-port="2" to-layer="92" to-port="0" />
+ </edges>
+ <rt_info>
+ <MO_version value="2024.6.0-17404-4c0f47d2335-releases/2024/6" />
+ <Runtime_version value="2023.3.0-13775-ceeafaf64f3-releases/2023/3" />
+ <conversion_parameters>
+ <input_model value="DIR/model_bmp.onnx" />
+ <is_python_api_used value="False" />
+ </conversion_parameters>
+ <legacy_frontend value="False" />
+ </rt_info>
+</net>