blob: 7bf30277c846f0b3be0c60ffc6762a5584606719 [file] [log] [blame]
Shad Ansari47432b62021-09-27 22:46:25 +00001<?xml version="1.0" ?>
2<net name="ResMobNet_v4 (LReLU) with single SSD head" version="10">
3 <layers>
4 <layer id="0" name="3284328819830" type="Const" version="opset1">
5 <data element_type="f16" offset="0" shape="" size="2"/>
6 <output>
7 <port id="0" precision="FP16"/>
8 </output>
9 </layer>
10 <layer id="1" name="3285328922773" type="Const" version="opset1">
11 <data element_type="f16" offset="2" shape="" size="2"/>
12 <output>
13 <port id="0" precision="FP16"/>
14 </output>
15 </layer>
16 <layer id="2" name="3286329020898" type="Const" version="opset1">
17 <data element_type="f16" offset="0" shape="" size="2"/>
18 <output>
19 <port id="0" precision="FP16"/>
20 </output>
21 </layer>
22 <layer id="3" name="3287329120439" type="Const" version="opset1">
23 <data element_type="f16" offset="2" shape="" size="2"/>
24 <output>
25 <port id="0" precision="FP16"/>
26 </output>
27 </layer>
28 <layer id="4" name="4284428822554" type="Const" version="opset1">
29 <data element_type="f16" offset="4" shape="" size="2"/>
30 <output>
31 <port id="0" precision="FP16"/>
32 </output>
33 </layer>
34 <layer id="5" name="4285428922101" type="Const" version="opset1">
35 <data element_type="f16" offset="6" shape="" size="2"/>
36 <output>
37 <port id="0" precision="FP16"/>
38 </output>
39 </layer>
40 <layer id="6" name="4286429021780" type="Const" version="opset1">
41 <data element_type="f16" offset="4" shape="" size="2"/>
42 <output>
43 <port id="0" precision="FP16"/>
44 </output>
45 </layer>
46 <layer id="7" name="4287429120886" type="Const" version="opset1">
47 <data element_type="f16" offset="6" shape="" size="2"/>
48 <output>
49 <port id="0" precision="FP16"/>
50 </output>
51 </layer>
52 <layer id="8" name="5444544820604" type="Const" version="opset1">
53 <data element_type="f16" offset="8" shape="" size="2"/>
54 <output>
55 <port id="0" precision="FP16"/>
56 </output>
57 </layer>
58 <layer id="9" name="5445544919815" type="Const" version="opset1">
59 <data element_type="f16" offset="10" shape="" size="2"/>
60 <output>
61 <port id="0" precision="FP16"/>
62 </output>
63 </layer>
64 <layer id="10" name="5446545020175" type="Const" version="opset1">
65 <data element_type="f16" offset="8" shape="" size="2"/>
66 <output>
67 <port id="0" precision="FP16"/>
68 </output>
69 </layer>
70 <layer id="11" name="5447545122041" type="Const" version="opset1">
71 <data element_type="f16" offset="10" shape="" size="2"/>
72 <output>
73 <port id="0" precision="FP16"/>
74 </output>
75 </layer>
76 <layer id="12" name="2964296819803" type="Const" version="opset1">
77 <data element_type="f16" offset="12" shape="" size="2"/>
78 <output>
79 <port id="0" precision="FP16"/>
80 </output>
81 </layer>
82 <layer id="13" name="2965296922185" type="Const" version="opset1">
83 <data element_type="f16" offset="14" shape="" size="2"/>
84 <output>
85 <port id="0" precision="FP16"/>
86 </output>
87 </layer>
88 <layer id="14" name="2966297020562" type="Const" version="opset1">
89 <data element_type="f16" offset="12" shape="" size="2"/>
90 <output>
91 <port id="0" precision="FP16"/>
92 </output>
93 </layer>
94 <layer id="15" name="2967297122368" type="Const" version="opset1">
95 <data element_type="f16" offset="14" shape="" size="2"/>
96 <output>
97 <port id="0" precision="FP16"/>
98 </output>
99 </layer>
100 <layer id="16" name="5084508822452" type="Const" version="opset1">
101 <data element_type="f16" offset="16" shape="" size="2"/>
102 <output>
103 <port id="0" precision="FP16"/>
104 </output>
105 </layer>
106 <layer id="17" name="5085508920694" type="Const" version="opset1">
107 <data element_type="f16" offset="18" shape="" size="2"/>
108 <output>
109 <port id="0" precision="FP16"/>
110 </output>
111 </layer>
112 <layer id="18" name="5086509022689" type="Const" version="opset1">
113 <data element_type="f16" offset="16" shape="" size="2"/>
114 <output>
115 <port id="0" precision="FP16"/>
116 </output>
117 </layer>
118 <layer id="19" name="5087509122584" type="Const" version="opset1">
119 <data element_type="f16" offset="18" shape="" size="2"/>
120 <output>
121 <port id="0" precision="FP16"/>
122 </output>
123 </layer>
124 <layer id="20" name="2704270821135" type="Const" version="opset1">
125 <data element_type="f16" offset="20" shape="" size="2"/>
126 <output>
127 <port id="0" precision="FP16"/>
128 </output>
129 </layer>
130 <layer id="21" name="2705270921897" type="Const" version="opset1">
131 <data element_type="f16" offset="22" shape="" size="2"/>
132 <output>
133 <port id="0" precision="FP16"/>
134 </output>
135 </layer>
136 <layer id="22" name="2706271019395" type="Const" version="opset1">
137 <data element_type="f16" offset="20" shape="" size="2"/>
138 <output>
139 <port id="0" precision="FP16"/>
140 </output>
141 </layer>
142 <layer id="23" name="2707271122200" type="Const" version="opset1">
143 <data element_type="f16" offset="22" shape="" size="2"/>
144 <output>
145 <port id="0" precision="FP16"/>
146 </output>
147 </layer>
148 <layer id="24" name="3644364819641" type="Const" version="opset1">
149 <data element_type="f16" offset="24" shape="" size="2"/>
150 <output>
151 <port id="0" precision="FP16"/>
152 </output>
153 </layer>
154 <layer id="25" name="3645364922491" type="Const" version="opset1">
155 <data element_type="f16" offset="26" shape="" size="2"/>
156 <output>
157 <port id="0" precision="FP16"/>
158 </output>
159 </layer>
160 <layer id="26" name="3646365020877" type="Const" version="opset1">
161 <data element_type="f16" offset="24" shape="" size="2"/>
162 <output>
163 <port id="0" precision="FP16"/>
164 </output>
165 </layer>
166 <layer id="27" name="3647365121072" type="Const" version="opset1">
167 <data element_type="f16" offset="26" shape="" size="2"/>
168 <output>
169 <port id="0" precision="FP16"/>
170 </output>
171 </layer>
172 <layer id="28" name="4264426819806" type="Const" version="opset1">
173 <data element_type="f16" offset="28" shape="" size="2"/>
174 <output>
175 <port id="0" precision="FP16"/>
176 </output>
177 </layer>
178 <layer id="29" name="4265426922113" type="Const" version="opset1">
179 <data element_type="f16" offset="30" shape="" size="2"/>
180 <output>
181 <port id="0" precision="FP16"/>
182 </output>
183 </layer>
184 <layer id="30" name="4266427022503" type="Const" version="opset1">
185 <data element_type="f16" offset="28" shape="" size="2"/>
186 <output>
187 <port id="0" precision="FP16"/>
188 </output>
189 </layer>
190 <layer id="31" name="4267427119686" type="Const" version="opset1">
191 <data element_type="f16" offset="30" shape="" size="2"/>
192 <output>
193 <port id="0" precision="FP16"/>
194 </output>
195 </layer>
196 <layer id="32" name="3544354821201" type="Const" version="opset1">
197 <data element_type="f16" offset="32" shape="" size="2"/>
198 <output>
199 <port id="0" precision="FP16"/>
200 </output>
201 </layer>
202 <layer id="33" name="3545354922467" type="Const" version="opset1">
203 <data element_type="f16" offset="34" shape="" size="2"/>
204 <output>
205 <port id="0" precision="FP16"/>
206 </output>
207 </layer>
208 <layer id="34" name="3546355019728" type="Const" version="opset1">
209 <data element_type="f16" offset="32" shape="" size="2"/>
210 <output>
211 <port id="0" precision="FP16"/>
212 </output>
213 </layer>
214 <layer id="35" name="3547355122629" type="Const" version="opset1">
215 <data element_type="f16" offset="34" shape="" size="2"/>
216 <output>
217 <port id="0" precision="FP16"/>
218 </output>
219 </layer>
220 <layer id="36" name="4184418820538" type="Const" version="opset1">
221 <data element_type="f16" offset="36" shape="" size="2"/>
222 <output>
223 <port id="0" precision="FP16"/>
224 </output>
225 </layer>
226 <layer id="37" name="4185418922371" type="Const" version="opset1">
227 <data element_type="f16" offset="38" shape="" size="2"/>
228 <output>
229 <port id="0" precision="FP16"/>
230 </output>
231 </layer>
232 <layer id="38" name="4186419021999" type="Const" version="opset1">
233 <data element_type="f16" offset="36" shape="" size="2"/>
234 <output>
235 <port id="0" precision="FP16"/>
236 </output>
237 </layer>
238 <layer id="39" name="4187419121210" type="Const" version="opset1">
239 <data element_type="f16" offset="38" shape="" size="2"/>
240 <output>
241 <port id="0" precision="FP16"/>
242 </output>
243 </layer>
244 <layer id="40" name="2684268821042" type="Const" version="opset1">
245 <data element_type="f16" offset="40" shape="" size="2"/>
246 <output>
247 <port id="0" precision="FP16"/>
248 </output>
249 </layer>
250 <layer id="41" name="2685268921624" type="Const" version="opset1">
251 <data element_type="f16" offset="42" shape="" size="2"/>
252 <output>
253 <port id="0" precision="FP16"/>
254 </output>
255 </layer>
256 <layer id="42" name="2686269019476" type="Const" version="opset1">
257 <data element_type="f16" offset="40" shape="" size="2"/>
258 <output>
259 <port id="0" precision="FP16"/>
260 </output>
261 </layer>
262 <layer id="43" name="2687269119869" type="Const" version="opset1">
263 <data element_type="f16" offset="42" shape="" size="2"/>
264 <output>
265 <port id="0" precision="FP16"/>
266 </output>
267 </layer>
268 <layer id="44" name="3964396821630" type="Const" version="opset1">
269 <data element_type="f16" offset="44" shape="" size="2"/>
270 <output>
271 <port id="0" precision="FP16"/>
272 </output>
273 </layer>
274 <layer id="45" name="3965396921000" type="Const" version="opset1">
275 <data element_type="f16" offset="46" shape="" size="2"/>
276 <output>
277 <port id="0" precision="FP16"/>
278 </output>
279 </layer>
280 <layer id="46" name="3966397020868" type="Const" version="opset1">
281 <data element_type="f16" offset="44" shape="" size="2"/>
282 <output>
283 <port id="0" precision="FP16"/>
284 </output>
285 </layer>
286 <layer id="47" name="3967397119716" type="Const" version="opset1">
287 <data element_type="f16" offset="46" shape="" size="2"/>
288 <output>
289 <port id="0" precision="FP16"/>
290 </output>
291 </layer>
292 <layer id="48" name="3104310821618" type="Const" version="opset1">
293 <data element_type="f16" offset="48" shape="" size="2"/>
294 <output>
295 <port id="0" precision="FP16"/>
296 </output>
297 </layer>
298 <layer id="49" name="3105310921432" type="Const" version="opset1">
299 <data element_type="f16" offset="50" shape="" size="2"/>
300 <output>
301 <port id="0" precision="FP16"/>
302 </output>
303 </layer>
304 <layer id="50" name="3106311020412" type="Const" version="opset1">
305 <data element_type="f16" offset="48" shape="" size="2"/>
306 <output>
307 <port id="0" precision="FP16"/>
308 </output>
309 </layer>
310 <layer id="51" name="3107311119836" type="Const" version="opset1">
311 <data element_type="f16" offset="50" shape="" size="2"/>
312 <output>
313 <port id="0" precision="FP16"/>
314 </output>
315 </layer>
316 <layer id="52" name="3424342820193" type="Const" version="opset1">
317 <data element_type="f16" offset="52" shape="" size="2"/>
318 <output>
319 <port id="0" precision="FP16"/>
320 </output>
321 </layer>
322 <layer id="53" name="3425342920448" type="Const" version="opset1">
323 <data element_type="f16" offset="54" shape="" size="2"/>
324 <output>
325 <port id="0" precision="FP16"/>
326 </output>
327 </layer>
328 <layer id="54" name="3426343022800" type="Const" version="opset1">
329 <data element_type="f16" offset="52" shape="" size="2"/>
330 <output>
331 <port id="0" precision="FP16"/>
332 </output>
333 </layer>
334 <layer id="55" name="3427343119422" type="Const" version="opset1">
335 <data element_type="f16" offset="54" shape="" size="2"/>
336 <output>
337 <port id="0" precision="FP16"/>
338 </output>
339 </layer>
340 <layer id="56" name="4464446821750" type="Const" version="opset1">
341 <data element_type="f16" offset="56" shape="" size="2"/>
342 <output>
343 <port id="0" precision="FP16"/>
344 </output>
345 </layer>
346 <layer id="57" name="4465446919674" type="Const" version="opset1">
347 <data element_type="f16" offset="58" shape="" size="2"/>
348 <output>
349 <port id="0" precision="FP16"/>
350 </output>
351 </layer>
352 <layer id="58" name="4466447022569" type="Const" version="opset1">
353 <data element_type="f16" offset="56" shape="" size="2"/>
354 <output>
355 <port id="0" precision="FP16"/>
356 </output>
357 </layer>
358 <layer id="59" name="4467447120724" type="Const" version="opset1">
359 <data element_type="f16" offset="58" shape="" size="2"/>
360 <output>
361 <port id="0" precision="FP16"/>
362 </output>
363 </layer>
364 <layer id="60" name="4504450820118" type="Const" version="opset1">
365 <data element_type="f16" offset="60" shape="" size="2"/>
366 <output>
367 <port id="0" precision="FP16"/>
368 </output>
369 </layer>
370 <layer id="61" name="4505450922344" type="Const" version="opset1">
371 <data element_type="f16" offset="62" shape="" size="2"/>
372 <output>
373 <port id="0" precision="FP16"/>
374 </output>
375 </layer>
376 <layer id="62" name="4506451022254" type="Const" version="opset1">
377 <data element_type="f16" offset="60" shape="" size="2"/>
378 <output>
379 <port id="0" precision="FP16"/>
380 </output>
381 </layer>
382 <layer id="63" name="4507451120142" type="Const" version="opset1">
383 <data element_type="f16" offset="62" shape="" size="2"/>
384 <output>
385 <port id="0" precision="FP16"/>
386 </output>
387 </layer>
388 <layer id="64" name="4784478821858" type="Const" version="opset1">
389 <data element_type="f16" offset="64" shape="" size="2"/>
390 <output>
391 <port id="0" precision="FP16"/>
392 </output>
393 </layer>
394 <layer id="65" name="4785478921141" type="Const" version="opset1">
395 <data element_type="f16" offset="66" shape="" size="2"/>
396 <output>
397 <port id="0" precision="FP16"/>
398 </output>
399 </layer>
400 <layer id="66" name="4786479020904" type="Const" version="opset1">
401 <data element_type="f16" offset="64" shape="" size="2"/>
402 <output>
403 <port id="0" precision="FP16"/>
404 </output>
405 </layer>
406 <layer id="67" name="4787479122665" type="Const" version="opset1">
407 <data element_type="f16" offset="66" shape="" size="2"/>
408 <output>
409 <port id="0" precision="FP16"/>
410 </output>
411 </layer>
412 <layer id="68" name="4664466822413" type="Const" version="opset1">
413 <data element_type="f16" offset="68" shape="" size="2"/>
414 <output>
415 <port id="0" precision="FP16"/>
416 </output>
417 </layer>
418 <layer id="69" name="4665466922935" type="Const" version="opset1">
419 <data element_type="f16" offset="70" shape="" size="2"/>
420 <output>
421 <port id="0" precision="FP16"/>
422 </output>
423 </layer>
424 <layer id="70" name="4666467021552" type="Const" version="opset1">
425 <data element_type="f16" offset="68" shape="" size="2"/>
426 <output>
427 <port id="0" precision="FP16"/>
428 </output>
429 </layer>
430 <layer id="71" name="4667467121786" type="Const" version="opset1">
431 <data element_type="f16" offset="70" shape="" size="2"/>
432 <output>
433 <port id="0" precision="FP16"/>
434 </output>
435 </layer>
436 <layer id="72" name="3824382821882" type="Const" version="opset1">
437 <data element_type="f16" offset="72" shape="" size="2"/>
438 <output>
439 <port id="0" precision="FP16"/>
440 </output>
441 </layer>
442 <layer id="73" name="3825382921681" type="Const" version="opset1">
443 <data element_type="f16" offset="74" shape="" size="2"/>
444 <output>
445 <port id="0" precision="FP16"/>
446 </output>
447 </layer>
448 <layer id="74" name="3826383022695" type="Const" version="opset1">
449 <data element_type="f16" offset="72" shape="" size="2"/>
450 <output>
451 <port id="0" precision="FP16"/>
452 </output>
453 </layer>
454 <layer id="75" name="3827383122338" type="Const" version="opset1">
455 <data element_type="f16" offset="74" shape="" size="2"/>
456 <output>
457 <port id="0" precision="FP16"/>
458 </output>
459 </layer>
460 <layer id="76" name="4024402821510" type="Const" version="opset1">
461 <data element_type="f16" offset="76" shape="" size="2"/>
462 <output>
463 <port id="0" precision="FP16"/>
464 </output>
465 </layer>
466 <layer id="77" name="4025402920178" type="Const" version="opset1">
467 <data element_type="f16" offset="78" shape="" size="2"/>
468 <output>
469 <port id="0" precision="FP16"/>
470 </output>
471 </layer>
472 <layer id="78" name="4026403019695" type="Const" version="opset1">
473 <data element_type="f16" offset="76" shape="" size="2"/>
474 <output>
475 <port id="0" precision="FP16"/>
476 </output>
477 </layer>
478 <layer id="79" name="4027403121276" type="Const" version="opset1">
479 <data element_type="f16" offset="78" shape="" size="2"/>
480 <output>
481 <port id="0" precision="FP16"/>
482 </output>
483 </layer>
484 <layer id="80" name="2624262820919" type="Const" version="opset1">
485 <data element_type="f16" offset="80" shape="" size="2"/>
486 <output>
487 <port id="0" precision="FP16"/>
488 </output>
489 </layer>
490 <layer id="81" name="2625262919911" type="Const" version="opset1">
491 <data element_type="f16" offset="82" shape="" size="2"/>
492 <output>
493 <port id="0" precision="FP16"/>
494 </output>
495 </layer>
496 <layer id="82" name="2626263020259" type="Const" version="opset1">
497 <data element_type="f16" offset="80" shape="" size="2"/>
498 <output>
499 <port id="0" precision="FP16"/>
500 </output>
501 </layer>
502 <layer id="83" name="2627263122752" type="Const" version="opset1">
503 <data element_type="f16" offset="82" shape="" size="2"/>
504 <output>
505 <port id="0" precision="FP16"/>
506 </output>
507 </layer>
508 <layer id="84" name="3404340820913" type="Const" version="opset1">
509 <data element_type="f16" offset="84" shape="" size="2"/>
510 <output>
511 <port id="0" precision="FP16"/>
512 </output>
513 </layer>
514 <layer id="85" name="3405340921429" type="Const" version="opset1">
515 <data element_type="f16" offset="86" shape="" size="2"/>
516 <output>
517 <port id="0" precision="FP16"/>
518 </output>
519 </layer>
520 <layer id="86" name="3406341021609" type="Const" version="opset1">
521 <data element_type="f16" offset="84" shape="" size="2"/>
522 <output>
523 <port id="0" precision="FP16"/>
524 </output>
525 </layer>
526 <layer id="87" name="3407341122446" type="Const" version="opset1">
527 <data element_type="f16" offset="86" shape="" size="2"/>
528 <output>
529 <port id="0" precision="FP16"/>
530 </output>
531 </layer>
532 <layer id="88" name="4404440822530" type="Const" version="opset1">
533 <data element_type="f16" offset="88" shape="" size="2"/>
534 <output>
535 <port id="0" precision="FP16"/>
536 </output>
537 </layer>
538 <layer id="89" name="4405440921777" type="Const" version="opset1">
539 <data element_type="f16" offset="90" shape="" size="2"/>
540 <output>
541 <port id="0" precision="FP16"/>
542 </output>
543 </layer>
544 <layer id="90" name="4406441021564" type="Const" version="opset1">
545 <data element_type="f16" offset="88" shape="" size="2"/>
546 <output>
547 <port id="0" precision="FP16"/>
548 </output>
549 </layer>
550 <layer id="91" name="4407441121870" type="Const" version="opset1">
551 <data element_type="f16" offset="90" shape="" size="2"/>
552 <output>
553 <port id="0" precision="FP16"/>
554 </output>
555 </layer>
556 <layer id="92" name="5204520822269" type="Const" version="opset1">
557 <data element_type="f16" offset="92" shape="" size="2"/>
558 <output>
559 <port id="0" precision="FP16"/>
560 </output>
561 </layer>
562 <layer id="93" name="5205520922212" type="Const" version="opset1">
563 <data element_type="f16" offset="94" shape="" size="2"/>
564 <output>
565 <port id="0" precision="FP16"/>
566 </output>
567 </layer>
568 <layer id="94" name="5206521022803" type="Const" version="opset1">
569 <data element_type="f16" offset="92" shape="" size="2"/>
570 <output>
571 <port id="0" precision="FP16"/>
572 </output>
573 </layer>
574 <layer id="95" name="5207521121507" type="Const" version="opset1">
575 <data element_type="f16" offset="94" shape="" size="2"/>
576 <output>
577 <port id="0" precision="FP16"/>
578 </output>
579 </layer>
580 <layer id="96" name="4944494822152" type="Const" version="opset1">
581 <data element_type="f16" offset="96" shape="" size="2"/>
582 <output>
583 <port id="0" precision="FP16"/>
584 </output>
585 </layer>
586 <layer id="97" name="4945494919998" type="Const" version="opset1">
587 <data element_type="f16" offset="98" shape="" size="2"/>
588 <output>
589 <port id="0" precision="FP16"/>
590 </output>
591 </layer>
592 <layer id="98" name="4946495021855" type="Const" version="opset1">
593 <data element_type="f16" offset="96" shape="" size="2"/>
594 <output>
595 <port id="0" precision="FP16"/>
596 </output>
597 </layer>
598 <layer id="99" name="4947495121708" type="Const" version="opset1">
599 <data element_type="f16" offset="98" shape="" size="2"/>
600 <output>
601 <port id="0" precision="FP16"/>
602 </output>
603 </layer>
604 <layer id="100" name="4484448821936" type="Const" version="opset1">
605 <data element_type="f16" offset="100" shape="" size="2"/>
606 <output>
607 <port id="0" precision="FP16"/>
608 </output>
609 </layer>
610 <layer id="101" name="4485448922644" type="Const" version="opset1">
611 <data element_type="f16" offset="102" shape="" size="2"/>
612 <output>
613 <port id="0" precision="FP16"/>
614 </output>
615 </layer>
616 <layer id="102" name="4486449019917" type="Const" version="opset1">
617 <data element_type="f16" offset="100" shape="" size="2"/>
618 <output>
619 <port id="0" precision="FP16"/>
620 </output>
621 </layer>
622 <layer id="103" name="4487449119587" type="Const" version="opset1">
623 <data element_type="f16" offset="102" shape="" size="2"/>
624 <output>
625 <port id="0" precision="FP16"/>
626 </output>
627 </layer>
628 <layer id="104" name="2744274822863" type="Const" version="opset1">
629 <data element_type="f16" offset="104" shape="" size="2"/>
630 <output>
631 <port id="0" precision="FP16"/>
632 </output>
633 </layer>
634 <layer id="105" name="2745274921102" type="Const" version="opset1">
635 <data element_type="f16" offset="106" shape="" size="2"/>
636 <output>
637 <port id="0" precision="FP16"/>
638 </output>
639 </layer>
640 <layer id="106" name="2746275020253" type="Const" version="opset1">
641 <data element_type="f16" offset="104" shape="" size="2"/>
642 <output>
643 <port id="0" precision="FP16"/>
644 </output>
645 </layer>
646 <layer id="107" name="2747275121492" type="Const" version="opset1">
647 <data element_type="f16" offset="106" shape="" size="2"/>
648 <output>
649 <port id="0" precision="FP16"/>
650 </output>
651 </layer>
652 <layer id="108" name="4444444819962" type="Const" version="opset1">
653 <data element_type="f16" offset="108" shape="" size="2"/>
654 <output>
655 <port id="0" precision="FP16"/>
656 </output>
657 </layer>
658 <layer id="109" name="4445444920199" type="Const" version="opset1">
659 <data element_type="f16" offset="110" shape="" size="2"/>
660 <output>
661 <port id="0" precision="FP16"/>
662 </output>
663 </layer>
664 <layer id="110" name="4446445022824" type="Const" version="opset1">
665 <data element_type="f16" offset="108" shape="" size="2"/>
666 <output>
667 <port id="0" precision="FP16"/>
668 </output>
669 </layer>
670 <layer id="111" name="4447445119440" type="Const" version="opset1">
671 <data element_type="f16" offset="110" shape="" size="2"/>
672 <output>
673 <port id="0" precision="FP16"/>
674 </output>
675 </layer>
676 <layer id="112" name="2904290821645" type="Const" version="opset1">
677 <data element_type="f16" offset="112" shape="" size="2"/>
678 <output>
679 <port id="0" precision="FP16"/>
680 </output>
681 </layer>
682 <layer id="113" name="2905290920598" type="Const" version="opset1">
683 <data element_type="f16" offset="114" shape="" size="2"/>
684 <output>
685 <port id="0" precision="FP16"/>
686 </output>
687 </layer>
688 <layer id="114" name="2906291022980" type="Const" version="opset1">
689 <data element_type="f16" offset="112" shape="" size="2"/>
690 <output>
691 <port id="0" precision="FP16"/>
692 </output>
693 </layer>
694 <layer id="115" name="2907291122245" type="Const" version="opset1">
695 <data element_type="f16" offset="114" shape="" size="2"/>
696 <output>
697 <port id="0" precision="FP16"/>
698 </output>
699 </layer>
700 <layer id="116" name="5044504820001" type="Const" version="opset1">
701 <data element_type="f16" offset="116" shape="" size="2"/>
702 <output>
703 <port id="0" precision="FP16"/>
704 </output>
705 </layer>
706 <layer id="117" name="5045504922119" type="Const" version="opset1">
707 <data element_type="f16" offset="118" shape="" size="2"/>
708 <output>
709 <port id="0" precision="FP16"/>
710 </output>
711 </layer>
712 <layer id="118" name="5046505019875" type="Const" version="opset1">
713 <data element_type="f16" offset="116" shape="" size="2"/>
714 <output>
715 <port id="0" precision="FP16"/>
716 </output>
717 </layer>
718 <layer id="119" name="5047505120280" type="Const" version="opset1">
719 <data element_type="f16" offset="118" shape="" size="2"/>
720 <output>
721 <port id="0" precision="FP16"/>
722 </output>
723 </layer>
724 <layer id="120" name="5024502822506" type="Const" version="opset1">
725 <data element_type="f16" offset="120" shape="" size="2"/>
726 <output>
727 <port id="0" precision="FP16"/>
728 </output>
729 </layer>
730 <layer id="121" name="5025502922287" type="Const" version="opset1">
731 <data element_type="f16" offset="122" shape="" size="2"/>
732 <output>
733 <port id="0" precision="FP16"/>
734 </output>
735 </layer>
736 <layer id="122" name="5026503020586" type="Const" version="opset1">
737 <data element_type="f16" offset="120" shape="" size="2"/>
738 <output>
739 <port id="0" precision="FP16"/>
740 </output>
741 </layer>
742 <layer id="123" name="5027503119461" type="Const" version="opset1">
743 <data element_type="f16" offset="122" shape="" size="2"/>
744 <output>
745 <port id="0" precision="FP16"/>
746 </output>
747 </layer>
748 <layer id="124" name="5404540821597" type="Const" version="opset1">
749 <data element_type="f16" offset="124" shape="" size="2"/>
750 <output>
751 <port id="0" precision="FP16"/>
752 </output>
753 </layer>
754 <layer id="125" name="5405540920799" type="Const" version="opset1">
755 <data element_type="f16" offset="126" shape="" size="2"/>
756 <output>
757 <port id="0" precision="FP16"/>
758 </output>
759 </layer>
760 <layer id="126" name="5406541022485" type="Const" version="opset1">
761 <data element_type="f16" offset="124" shape="" size="2"/>
762 <output>
763 <port id="0" precision="FP16"/>
764 </output>
765 </layer>
766 <layer id="127" name="5407541122536" type="Const" version="opset1">
767 <data element_type="f16" offset="126" shape="" size="2"/>
768 <output>
769 <port id="0" precision="FP16"/>
770 </output>
771 </layer>
772 <layer id="128" name="4964496821996" type="Const" version="opset1">
773 <data element_type="f16" offset="128" shape="" size="2"/>
774 <output>
775 <port id="0" precision="FP16"/>
776 </output>
777 </layer>
778 <layer id="129" name="4965496922401" type="Const" version="opset1">
779 <data element_type="f16" offset="130" shape="" size="2"/>
780 <output>
781 <port id="0" precision="FP16"/>
782 </output>
783 </layer>
784 <layer id="130" name="4966497020568" type="Const" version="opset1">
785 <data element_type="f16" offset="128" shape="" size="2"/>
786 <output>
787 <port id="0" precision="FP16"/>
788 </output>
789 </layer>
790 <layer id="131" name="4967497122734" type="Const" version="opset1">
791 <data element_type="f16" offset="130" shape="" size="2"/>
792 <output>
793 <port id="0" precision="FP16"/>
794 </output>
795 </layer>
796 <layer id="132" name="5304530820634" type="Const" version="opset1">
797 <data element_type="f16" offset="132" shape="" size="2"/>
798 <output>
799 <port id="0" precision="FP16"/>
800 </output>
801 </layer>
802 <layer id="133" name="5305530919428" type="Const" version="opset1">
803 <data element_type="f16" offset="134" shape="" size="2"/>
804 <output>
805 <port id="0" precision="FP16"/>
806 </output>
807 </layer>
808 <layer id="134" name="5306531021435" type="Const" version="opset1">
809 <data element_type="f16" offset="132" shape="" size="2"/>
810 <output>
811 <port id="0" precision="FP16"/>
812 </output>
813 </layer>
814 <layer id="135" name="5307531121369" type="Const" version="opset1">
815 <data element_type="f16" offset="134" shape="" size="2"/>
816 <output>
817 <port id="0" precision="FP16"/>
818 </output>
819 </layer>
820 <layer id="136" name="2724272819872" type="Const" version="opset1">
821 <data element_type="f16" offset="136" shape="" size="2"/>
822 <output>
823 <port id="0" precision="FP16"/>
824 </output>
825 </layer>
826 <layer id="137" name="2725272921912" type="Const" version="opset1">
827 <data element_type="f16" offset="138" shape="" size="2"/>
828 <output>
829 <port id="0" precision="FP16"/>
830 </output>
831 </layer>
832 <layer id="138" name="2726273022416" type="Const" version="opset1">
833 <data element_type="f16" offset="136" shape="" size="2"/>
834 <output>
835 <port id="0" precision="FP16"/>
836 </output>
837 </layer>
838 <layer id="139" name="2727273122437" type="Const" version="opset1">
839 <data element_type="f16" offset="138" shape="" size="2"/>
840 <output>
841 <port id="0" precision="FP16"/>
842 </output>
843 </layer>
844 <layer id="140" name="3324332819809" type="Const" version="opset1">
845 <data element_type="f16" offset="140" shape="" size="2"/>
846 <output>
847 <port id="0" precision="FP16"/>
848 </output>
849 </layer>
850 <layer id="141" name="3325332922389" type="Const" version="opset1">
851 <data element_type="f16" offset="142" shape="" size="2"/>
852 <output>
853 <port id="0" precision="FP16"/>
854 </output>
855 </layer>
856 <layer id="142" name="3326333020481" type="Const" version="opset1">
857 <data element_type="f16" offset="140" shape="" size="2"/>
858 <output>
859 <port id="0" precision="FP16"/>
860 </output>
861 </layer>
862 <layer id="143" name="3327333120748" type="Const" version="opset1">
863 <data element_type="f16" offset="142" shape="" size="2"/>
864 <output>
865 <port id="0" precision="FP16"/>
866 </output>
867 </layer>
868 <layer id="144" name="4004400822971" type="Const" version="opset1">
869 <data element_type="f16" offset="144" shape="" size="2"/>
870 <output>
871 <port id="0" precision="FP16"/>
872 </output>
873 </layer>
874 <layer id="145" name="4005400921327" type="Const" version="opset1">
875 <data element_type="f16" offset="146" shape="" size="2"/>
876 <output>
877 <port id="0" precision="FP16"/>
878 </output>
879 </layer>
880 <layer id="146" name="4006401020685" type="Const" version="opset1">
881 <data element_type="f16" offset="144" shape="" size="2"/>
882 <output>
883 <port id="0" precision="FP16"/>
884 </output>
885 </layer>
886 <layer id="147" name="4007401120556" type="Const" version="opset1">
887 <data element_type="f16" offset="146" shape="" size="2"/>
888 <output>
889 <port id="0" precision="FP16"/>
890 </output>
891 </layer>
892 <layer id="148" name="4764476820811" type="Const" version="opset1">
893 <data element_type="f16" offset="148" shape="" size="2"/>
894 <output>
895 <port id="0" precision="FP16"/>
896 </output>
897 </layer>
898 <layer id="149" name="4765476921228" type="Const" version="opset1">
899 <data element_type="f16" offset="150" shape="" size="2"/>
900 <output>
901 <port id="0" precision="FP16"/>
902 </output>
903 </layer>
904 <layer id="150" name="4766477020508" type="Const" version="opset1">
905 <data element_type="f16" offset="148" shape="" size="2"/>
906 <output>
907 <port id="0" precision="FP16"/>
908 </output>
909 </layer>
910 <layer id="151" name="4767477120679" type="Const" version="opset1">
911 <data element_type="f16" offset="150" shape="" size="2"/>
912 <output>
913 <port id="0" precision="FP16"/>
914 </output>
915 </layer>
916 <layer id="152" name="2824282819434" type="Const" version="opset1">
917 <data element_type="f16" offset="152" shape="" size="2"/>
918 <output>
919 <port id="0" precision="FP16"/>
920 </output>
921 </layer>
922 <layer id="153" name="2825282922851" type="Const" version="opset1">
923 <data element_type="f16" offset="154" shape="" size="2"/>
924 <output>
925 <port id="0" precision="FP16"/>
926 </output>
927 </layer>
928 <layer id="154" name="2826283021987" type="Const" version="opset1">
929 <data element_type="f16" offset="152" shape="" size="2"/>
930 <output>
931 <port id="0" precision="FP16"/>
932 </output>
933 </layer>
934 <layer id="155" name="2827283122194" type="Const" version="opset1">
935 <data element_type="f16" offset="154" shape="" size="2"/>
936 <output>
937 <port id="0" precision="FP16"/>
938 </output>
939 </layer>
940 <layer id="156" name="4064406822764" type="Const" version="opset1">
941 <data element_type="f16" offset="156" shape="" size="2"/>
942 <output>
943 <port id="0" precision="FP16"/>
944 </output>
945 </layer>
946 <layer id="157" name="4065406921414" type="Const" version="opset1">
947 <data element_type="f16" offset="158" shape="" size="2"/>
948 <output>
949 <port id="0" precision="FP16"/>
950 </output>
951 </layer>
952 <layer id="158" name="4066407019647" type="Const" version="opset1">
953 <data element_type="f16" offset="156" shape="" size="2"/>
954 <output>
955 <port id="0" precision="FP16"/>
956 </output>
957 </layer>
958 <layer id="159" name="4067407121186" type="Const" version="opset1">
959 <data element_type="f16" offset="158" shape="" size="2"/>
960 <output>
961 <port id="0" precision="FP16"/>
962 </output>
963 </layer>
964 <layer id="160" name="2984298821153" type="Const" version="opset1">
965 <data element_type="f16" offset="160" shape="" size="2"/>
966 <output>
967 <port id="0" precision="FP16"/>
968 </output>
969 </layer>
970 <layer id="161" name="2985298920082" type="Const" version="opset1">
971 <data element_type="f16" offset="162" shape="" size="2"/>
972 <output>
973 <port id="0" precision="FP16"/>
974 </output>
975 </layer>
976 <layer id="162" name="2986299020013" type="Const" version="opset1">
977 <data element_type="f16" offset="160" shape="" size="2"/>
978 <output>
979 <port id="0" precision="FP16"/>
980 </output>
981 </layer>
982 <layer id="163" name="2987299122623" type="Const" version="opset1">
983 <data element_type="f16" offset="162" shape="" size="2"/>
984 <output>
985 <port id="0" precision="FP16"/>
986 </output>
987 </layer>
988 <layer id="164" name="data" type="Parameter" version="opset1">
989 <data element_type="f16" shape="1,3,320,544"/>
990 <output>
991 <port id="0" names="data" precision="FP16">
992 <dim>1</dim>
993 <dim>3</dim>
994 <dim>320</dim>
995 <dim>544</dim>
996 </port>
997 </output>
998 </layer>
999 <layer id="165" name="data_mul_2364474720739" type="Const" version="opset1">
1000 <data element_type="f16" offset="164" shape="1,3,1,1" size="6"/>
1001 <output>
1002 <port id="0" precision="FP16">
1003 <dim>1</dim>
1004 <dim>3</dim>
1005 <dim>1</dim>
1006 <dim>1</dim>
1007 </port>
1008 </output>
1009 </layer>
1010 <layer id="166" name="data/norm/bn/mean/Fused_Mul_" type="Multiply" version="opset1">
1011 <data auto_broadcast="numpy"/>
1012 <input>
1013 <port id="0">
1014 <dim>1</dim>
1015 <dim>3</dim>
1016 <dim>320</dim>
1017 <dim>544</dim>
1018 </port>
1019 <port id="1">
1020 <dim>1</dim>
1021 <dim>3</dim>
1022 <dim>1</dim>
1023 <dim>1</dim>
1024 </port>
1025 </input>
1026 <output>
1027 <port id="2" precision="FP16">
1028 <dim>1</dim>
1029 <dim>3</dim>
1030 <dim>320</dim>
1031 <dim>544</dim>
1032 </port>
1033 </output>
1034 </layer>
1035 <layer id="167" name="data_add_2364674920346" type="Const" version="opset1">
1036 <data element_type="f16" offset="170" shape="1,3,1,1" size="6"/>
1037 <output>
1038 <port id="0" precision="FP16">
1039 <dim>1</dim>
1040 <dim>3</dim>
1041 <dim>1</dim>
1042 <dim>1</dim>
1043 </port>
1044 </output>
1045 </layer>
1046 <layer id="168" name="data/norm/bn/variance/Fused_Add_" type="Add" version="opset1">
1047 <data auto_broadcast="numpy"/>
1048 <input>
1049 <port id="0">
1050 <dim>1</dim>
1051 <dim>3</dim>
1052 <dim>320</dim>
1053 <dim>544</dim>
1054 </port>
1055 <port id="1">
1056 <dim>1</dim>
1057 <dim>3</dim>
1058 <dim>1</dim>
1059 <dim>1</dim>
1060 </port>
1061 </input>
1062 <output>
1063 <port id="2" names="data/norm/bn" precision="FP16">
1064 <dim>1</dim>
1065 <dim>3</dim>
1066 <dim>320</dim>
1067 <dim>544</dim>
1068 </port>
1069 </output>
1070 </layer>
1071 <layer id="169" name="init_block1/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
1072 <data auto_broadcast="numpy" levels="256"/>
1073 <input>
1074 <port id="0">
1075 <dim>1</dim>
1076 <dim>3</dim>
1077 <dim>320</dim>
1078 <dim>544</dim>
1079 </port>
1080 <port id="1"/>
1081 <port id="2"/>
1082 <port id="3"/>
1083 <port id="4"/>
1084 </input>
1085 <output>
1086 <port id="5" precision="FP16">
1087 <dim>1</dim>
1088 <dim>3</dim>
1089 <dim>320</dim>
1090 <dim>544</dim>
1091 </port>
1092 </output>
1093 </layer>
1094 <layer id="170" name="init_block1/dim_inc/bn/mean/Fused_Mul__copy75110080/quantized1156020379" type="Const" version="opset1">
1095 <data element_type="i8" offset="176" shape="32,3,3,3" size="864"/>
1096 <output>
1097 <port id="0" precision="I8">
1098 <dim>32</dim>
1099 <dim>3</dim>
1100 <dim>3</dim>
1101 <dim>3</dim>
1102 </port>
1103 </output>
1104 </layer>
1105 <layer id="171" name="init_block1/dim_inc/bn/mean/Fused_Mul__copy75110080/quantized/to_f16" type="Convert" version="opset1">
1106 <data destination_type="f16"/>
1107 <input>
1108 <port id="0">
1109 <dim>32</dim>
1110 <dim>3</dim>
1111 <dim>3</dim>
1112 <dim>3</dim>
1113 </port>
1114 </input>
1115 <output>
1116 <port id="1" precision="FP16">
1117 <dim>32</dim>
1118 <dim>3</dim>
1119 <dim>3</dim>
1120 <dim>3</dim>
1121 </port>
1122 </output>
1123 </layer>
1124 <layer id="172" name="init_block1/dim_inc/conv/fq_weights_1/zero_point1157320874" type="Const" version="opset1">
1125 <data element_type="f16" offset="1040" shape="32,1,1,1" size="64"/>
1126 <output>
1127 <port id="0" precision="FP16">
1128 <dim>32</dim>
1129 <dim>1</dim>
1130 <dim>1</dim>
1131 <dim>1</dim>
1132 </port>
1133 </output>
1134 </layer>
1135 <layer id="173" name="init_block1/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
1136 <data auto_broadcast="numpy"/>
1137 <input>
1138 <port id="0">
1139 <dim>32</dim>
1140 <dim>3</dim>
1141 <dim>3</dim>
1142 <dim>3</dim>
1143 </port>
1144 <port id="1">
1145 <dim>32</dim>
1146 <dim>1</dim>
1147 <dim>1</dim>
1148 <dim>1</dim>
1149 </port>
1150 </input>
1151 <output>
1152 <port id="2" precision="FP16">
1153 <dim>32</dim>
1154 <dim>3</dim>
1155 <dim>3</dim>
1156 <dim>3</dim>
1157 </port>
1158 </output>
1159 </layer>
1160 <layer id="174" name="init_block1/dim_inc/conv/fq_weights_1/scale1156821582" type="Const" version="opset1">
1161 <data element_type="f16" offset="1104" shape="32,1,1,1" size="64"/>
1162 <output>
1163 <port id="0" precision="FP16">
1164 <dim>32</dim>
1165 <dim>1</dim>
1166 <dim>1</dim>
1167 <dim>1</dim>
1168 </port>
1169 </output>
1170 </layer>
1171 <layer id="175" name="init_block1/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
1172 <data auto_broadcast="numpy"/>
1173 <input>
1174 <port id="0">
1175 <dim>32</dim>
1176 <dim>3</dim>
1177 <dim>3</dim>
1178 <dim>3</dim>
1179 </port>
1180 <port id="1">
1181 <dim>32</dim>
1182 <dim>1</dim>
1183 <dim>1</dim>
1184 <dim>1</dim>
1185 </port>
1186 </input>
1187 <output>
1188 <port id="2" precision="FP16">
1189 <dim>32</dim>
1190 <dim>3</dim>
1191 <dim>3</dim>
1192 <dim>3</dim>
1193 </port>
1194 </output>
1195 </layer>
1196 <layer id="176" name="init_block1/dim_inc/conv" type="Convolution" version="opset1">
1197 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
1198 <input>
1199 <port id="0">
1200 <dim>1</dim>
1201 <dim>3</dim>
1202 <dim>320</dim>
1203 <dim>544</dim>
1204 </port>
1205 <port id="1">
1206 <dim>32</dim>
1207 <dim>3</dim>
1208 <dim>3</dim>
1209 <dim>3</dim>
1210 </port>
1211 </input>
1212 <output>
1213 <port id="2" precision="FP16">
1214 <dim>1</dim>
1215 <dim>32</dim>
1216 <dim>160</dim>
1217 <dim>272</dim>
1218 </port>
1219 </output>
1220 </layer>
1221 <layer id="177" name="data_add_236492365475319752" type="Const" version="opset1">
1222 <data element_type="f16" offset="1168" shape="1,32,1,1" size="64"/>
1223 <output>
1224 <port id="0" precision="FP16">
1225 <dim>1</dim>
1226 <dim>32</dim>
1227 <dim>1</dim>
1228 <dim>1</dim>
1229 </port>
1230 </output>
1231 </layer>
1232 <layer id="178" name="init_block1/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
1233 <data auto_broadcast="numpy"/>
1234 <input>
1235 <port id="0">
1236 <dim>1</dim>
1237 <dim>32</dim>
1238 <dim>160</dim>
1239 <dim>272</dim>
1240 </port>
1241 <port id="1">
1242 <dim>1</dim>
1243 <dim>32</dim>
1244 <dim>1</dim>
1245 <dim>1</dim>
1246 </port>
1247 </input>
1248 <output>
1249 <port id="2" names="init_block1/dim_inc/conv" precision="FP16">
1250 <dim>1</dim>
1251 <dim>32</dim>
1252 <dim>160</dim>
1253 <dim>272</dim>
1254 </port>
1255 </output>
1256 </layer>
1257 <layer id="179" name="init_block1/dim_inc/fn" type="ReLU" version="opset1">
1258 <input>
1259 <port id="0">
1260 <dim>1</dim>
1261 <dim>32</dim>
1262 <dim>160</dim>
1263 <dim>272</dim>
1264 </port>
1265 </input>
1266 <output>
1267 <port id="1" names="init_block1/dim_inc/conv" precision="FP16">
1268 <dim>1</dim>
1269 <dim>32</dim>
1270 <dim>160</dim>
1271 <dim>272</dim>
1272 </port>
1273 </output>
1274 </layer>
1275 <layer id="180" name="bottleneck1_1/add/fq_input_0" type="FakeQuantize" version="opset1">
1276 <data auto_broadcast="numpy" levels="256"/>
1277 <input>
1278 <port id="0">
1279 <dim>1</dim>
1280 <dim>32</dim>
1281 <dim>160</dim>
1282 <dim>272</dim>
1283 </port>
1284 <port id="1"/>
1285 <port id="2"/>
1286 <port id="3"/>
1287 <port id="4"/>
1288 </input>
1289 <output>
1290 <port id="5" precision="FP16">
1291 <dim>1</dim>
1292 <dim>32</dim>
1293 <dim>160</dim>
1294 <dim>272</dim>
1295 </port>
1296 </output>
1297 </layer>
1298 <layer id="181" name="4074407821579" type="Const" version="opset1">
1299 <data element_type="f16" offset="1232" shape="" size="2"/>
1300 <output>
1301 <port id="0" precision="FP16"/>
1302 </output>
1303 </layer>
1304 <layer id="182" name="4075407920544" type="Const" version="opset1">
1305 <data element_type="f16" offset="1234" shape="" size="2"/>
1306 <output>
1307 <port id="0" precision="FP16"/>
1308 </output>
1309 </layer>
1310 <layer id="183" name="4076408020457" type="Const" version="opset1">
1311 <data element_type="f16" offset="1232" shape="" size="2"/>
1312 <output>
1313 <port id="0" precision="FP16"/>
1314 </output>
1315 </layer>
1316 <layer id="184" name="4077408119959" type="Const" version="opset1">
1317 <data element_type="f16" offset="1234" shape="" size="2"/>
1318 <output>
1319 <port id="0" precision="FP16"/>
1320 </output>
1321 </layer>
1322 <layer id="185" name="5484548819965" type="Const" version="opset1">
1323 <data element_type="f16" offset="1236" shape="" size="2"/>
1324 <output>
1325 <port id="0" precision="FP16"/>
1326 </output>
1327 </layer>
1328 <layer id="186" name="5485548920790" type="Const" version="opset1">
1329 <data element_type="f16" offset="1238" shape="" size="2"/>
1330 <output>
1331 <port id="0" precision="FP16"/>
1332 </output>
1333 </layer>
1334 <layer id="187" name="5486549020364" type="Const" version="opset1">
1335 <data element_type="f16" offset="1236" shape="" size="2"/>
1336 <output>
1337 <port id="0" precision="FP16"/>
1338 </output>
1339 </layer>
1340 <layer id="188" name="5487549119788" type="Const" version="opset1">
1341 <data element_type="f16" offset="1238" shape="" size="2"/>
1342 <output>
1343 <port id="0" precision="FP16"/>
1344 </output>
1345 </layer>
1346 <layer id="189" name="3844384820577" type="Const" version="opset1">
1347 <data element_type="f16" offset="1240" shape="1,8,1,1" size="16"/>
1348 <output>
1349 <port id="0" precision="FP16">
1350 <dim>1</dim>
1351 <dim>8</dim>
1352 <dim>1</dim>
1353 <dim>1</dim>
1354 </port>
1355 </output>
1356 </layer>
1357 <layer id="190" name="3845384920994" type="Const" version="opset1">
1358 <data element_type="f16" offset="1256" shape="1,8,1,1" size="16"/>
1359 <output>
1360 <port id="0" precision="FP16">
1361 <dim>1</dim>
1362 <dim>8</dim>
1363 <dim>1</dim>
1364 <dim>1</dim>
1365 </port>
1366 </output>
1367 </layer>
1368 <layer id="191" name="3846385019611" type="Const" version="opset1">
1369 <data element_type="f16" offset="1240" shape="1,8,1,1" size="16"/>
1370 <output>
1371 <port id="0" precision="FP16">
1372 <dim>1</dim>
1373 <dim>8</dim>
1374 <dim>1</dim>
1375 <dim>1</dim>
1376 </port>
1377 </output>
1378 </layer>
1379 <layer id="192" name="3847385119518" type="Const" version="opset1">
1380 <data element_type="f16" offset="1256" shape="1,8,1,1" size="16"/>
1381 <output>
1382 <port id="0" precision="FP16">
1383 <dim>1</dim>
1384 <dim>8</dim>
1385 <dim>1</dim>
1386 <dim>1</dim>
1387 </port>
1388 </output>
1389 </layer>
1390 <layer id="193" name="bottleneck1_1/dim_red/bn/mean/Fused_Mul__copy75610082/quantized1376820805" type="Const" version="opset1">
1391 <data element_type="i8" offset="1272" shape="8,32,1,1" size="256"/>
1392 <output>
1393 <port id="0" precision="I8">
1394 <dim>8</dim>
1395 <dim>32</dim>
1396 <dim>1</dim>
1397 <dim>1</dim>
1398 </port>
1399 </output>
1400 </layer>
1401 <layer id="194" name="bottleneck1_1/dim_red/bn/mean/Fused_Mul__copy75610082/quantized/to_f16" type="Convert" version="opset1">
1402 <data destination_type="f16"/>
1403 <input>
1404 <port id="0">
1405 <dim>8</dim>
1406 <dim>32</dim>
1407 <dim>1</dim>
1408 <dim>1</dim>
1409 </port>
1410 </input>
1411 <output>
1412 <port id="1" precision="FP16">
1413 <dim>8</dim>
1414 <dim>32</dim>
1415 <dim>1</dim>
1416 <dim>1</dim>
1417 </port>
1418 </output>
1419 </layer>
1420 <layer id="195" name="bottleneck1_1/dim_red/conv/fq_weights_1/zero_point1378120046" type="Const" version="opset1">
1421 <data element_type="f16" offset="1528" shape="8,1,1,1" size="16"/>
1422 <output>
1423 <port id="0" precision="FP16">
1424 <dim>8</dim>
1425 <dim>1</dim>
1426 <dim>1</dim>
1427 <dim>1</dim>
1428 </port>
1429 </output>
1430 </layer>
1431 <layer id="196" name="bottleneck1_1/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
1432 <data auto_broadcast="numpy"/>
1433 <input>
1434 <port id="0">
1435 <dim>8</dim>
1436 <dim>32</dim>
1437 <dim>1</dim>
1438 <dim>1</dim>
1439 </port>
1440 <port id="1">
1441 <dim>8</dim>
1442 <dim>1</dim>
1443 <dim>1</dim>
1444 <dim>1</dim>
1445 </port>
1446 </input>
1447 <output>
1448 <port id="2" precision="FP16">
1449 <dim>8</dim>
1450 <dim>32</dim>
1451 <dim>1</dim>
1452 <dim>1</dim>
1453 </port>
1454 </output>
1455 </layer>
1456 <layer id="197" name="bottleneck1_1/dim_red/conv/fq_weights_1/scale1377620769" type="Const" version="opset1">
1457 <data element_type="f16" offset="1544" shape="8,1,1,1" size="16"/>
1458 <output>
1459 <port id="0" precision="FP16">
1460 <dim>8</dim>
1461 <dim>1</dim>
1462 <dim>1</dim>
1463 <dim>1</dim>
1464 </port>
1465 </output>
1466 </layer>
1467 <layer id="198" name="bottleneck1_1/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
1468 <data auto_broadcast="numpy"/>
1469 <input>
1470 <port id="0">
1471 <dim>8</dim>
1472 <dim>32</dim>
1473 <dim>1</dim>
1474 <dim>1</dim>
1475 </port>
1476 <port id="1">
1477 <dim>8</dim>
1478 <dim>1</dim>
1479 <dim>1</dim>
1480 <dim>1</dim>
1481 </port>
1482 </input>
1483 <output>
1484 <port id="2" precision="FP16">
1485 <dim>8</dim>
1486 <dim>32</dim>
1487 <dim>1</dim>
1488 <dim>1</dim>
1489 </port>
1490 </output>
1491 </layer>
1492 <layer id="199" name="bottleneck1_1/dim_red/conv" type="Convolution" version="opset1">
1493 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
1494 <input>
1495 <port id="0">
1496 <dim>1</dim>
1497 <dim>32</dim>
1498 <dim>160</dim>
1499 <dim>272</dim>
1500 </port>
1501 <port id="1">
1502 <dim>8</dim>
1503 <dim>32</dim>
1504 <dim>1</dim>
1505 <dim>1</dim>
1506 </port>
1507 </input>
1508 <output>
1509 <port id="2" precision="FP16">
1510 <dim>1</dim>
1511 <dim>8</dim>
1512 <dim>160</dim>
1513 <dim>272</dim>
1514 </port>
1515 </output>
1516 </layer>
1517 <layer id="200" name="data_add_236572366275820892" type="Const" version="opset1">
1518 <data element_type="f16" offset="1560" shape="1,8,1,1" size="16"/>
1519 <output>
1520 <port id="0" precision="FP16">
1521 <dim>1</dim>
1522 <dim>8</dim>
1523 <dim>1</dim>
1524 <dim>1</dim>
1525 </port>
1526 </output>
1527 </layer>
1528 <layer id="201" name="bottleneck1_1/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
1529 <data auto_broadcast="numpy"/>
1530 <input>
1531 <port id="0">
1532 <dim>1</dim>
1533 <dim>8</dim>
1534 <dim>160</dim>
1535 <dim>272</dim>
1536 </port>
1537 <port id="1">
1538 <dim>1</dim>
1539 <dim>8</dim>
1540 <dim>1</dim>
1541 <dim>1</dim>
1542 </port>
1543 </input>
1544 <output>
1545 <port id="2" names="bottleneck1_1/dim_red/conv" precision="FP16">
1546 <dim>1</dim>
1547 <dim>8</dim>
1548 <dim>160</dim>
1549 <dim>272</dim>
1550 </port>
1551 </output>
1552 </layer>
1553 <layer id="202" name="bottleneck1_1/dim_red/fn/weights3078040028760" type="Const" version="opset1">
1554 <data element_type="f32" offset="1576" shape="1" size="4"/>
1555 <output>
1556 <port id="0" precision="FP32">
1557 <dim>1</dim>
1558 </port>
1559 </output>
1560 </layer>
1561 <layer id="203" name="bottleneck1_1/dim_red/fn" type="PReLU" version="opset1">
1562 <input>
1563 <port id="0">
1564 <dim>1</dim>
1565 <dim>8</dim>
1566 <dim>160</dim>
1567 <dim>272</dim>
1568 </port>
1569 <port id="1">
1570 <dim>1</dim>
1571 </port>
1572 </input>
1573 <output>
1574 <port id="2" names="bottleneck1_1/dim_red/conv" precision="FP16">
1575 <dim>1</dim>
1576 <dim>8</dim>
1577 <dim>160</dim>
1578 <dim>272</dim>
1579 </port>
1580 </output>
1581 </layer>
1582 <layer id="204" name="bottleneck1_1/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
1583 <data auto_broadcast="numpy" levels="256"/>
1584 <input>
1585 <port id="0">
1586 <dim>1</dim>
1587 <dim>8</dim>
1588 <dim>160</dim>
1589 <dim>272</dim>
1590 </port>
1591 <port id="1">
1592 <dim>1</dim>
1593 <dim>8</dim>
1594 <dim>1</dim>
1595 <dim>1</dim>
1596 </port>
1597 <port id="2">
1598 <dim>1</dim>
1599 <dim>8</dim>
1600 <dim>1</dim>
1601 <dim>1</dim>
1602 </port>
1603 <port id="3">
1604 <dim>1</dim>
1605 <dim>8</dim>
1606 <dim>1</dim>
1607 <dim>1</dim>
1608 </port>
1609 <port id="4">
1610 <dim>1</dim>
1611 <dim>8</dim>
1612 <dim>1</dim>
1613 <dim>1</dim>
1614 </port>
1615 </input>
1616 <output>
1617 <port id="5" precision="FP16">
1618 <dim>1</dim>
1619 <dim>8</dim>
1620 <dim>160</dim>
1621 <dim>272</dim>
1622 </port>
1623 </output>
1624 </layer>
1625 <layer id="205" name="16855/value1685720643" type="Const" version="opset1">
1626 <data element_type="i64" offset="1580" shape="5" size="40"/>
1627 <output>
1628 <port id="0" precision="I64">
1629 <dim>5</dim>
1630 </port>
1631 </output>
1632 </layer>
1633 <layer id="206" name="bottleneck1_1/inner/dw1/bn/mean/Fused_Mul__copy76210085/quantized1374422218" type="Const" version="opset1">
1634 <data element_type="i8" offset="1620" shape="8,1,3,3" size="72"/>
1635 <output>
1636 <port id="0" precision="I8">
1637 <dim>8</dim>
1638 <dim>1</dim>
1639 <dim>3</dim>
1640 <dim>3</dim>
1641 </port>
1642 </output>
1643 </layer>
1644 <layer id="207" name="bottleneck1_1/inner/dw1/bn/mean/Fused_Mul__copy76210085/quantized/to_f16" type="Convert" version="opset1">
1645 <data destination_type="f16"/>
1646 <input>
1647 <port id="0">
1648 <dim>8</dim>
1649 <dim>1</dim>
1650 <dim>3</dim>
1651 <dim>3</dim>
1652 </port>
1653 </input>
1654 <output>
1655 <port id="1" precision="FP16">
1656 <dim>8</dim>
1657 <dim>1</dim>
1658 <dim>3</dim>
1659 <dim>3</dim>
1660 </port>
1661 </output>
1662 </layer>
1663 <layer id="208" name="bottleneck1_1/inner/dw1/conv/fq_weights_1/zero_point1375719575" type="Const" version="opset1">
1664 <data element_type="f16" offset="1692" shape="8,1,1,1" size="16"/>
1665 <output>
1666 <port id="0" precision="FP16">
1667 <dim>8</dim>
1668 <dim>1</dim>
1669 <dim>1</dim>
1670 <dim>1</dim>
1671 </port>
1672 </output>
1673 </layer>
1674 <layer id="209" name="bottleneck1_1/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
1675 <data auto_broadcast="numpy"/>
1676 <input>
1677 <port id="0">
1678 <dim>8</dim>
1679 <dim>1</dim>
1680 <dim>3</dim>
1681 <dim>3</dim>
1682 </port>
1683 <port id="1">
1684 <dim>8</dim>
1685 <dim>1</dim>
1686 <dim>1</dim>
1687 <dim>1</dim>
1688 </port>
1689 </input>
1690 <output>
1691 <port id="2" precision="FP16">
1692 <dim>8</dim>
1693 <dim>1</dim>
1694 <dim>3</dim>
1695 <dim>3</dim>
1696 </port>
1697 </output>
1698 </layer>
1699 <layer id="210" name="bottleneck1_1/inner/dw1/conv/fq_weights_1/scale1375220715" type="Const" version="opset1">
1700 <data element_type="f16" offset="1708" shape="8,1,1,1" size="16"/>
1701 <output>
1702 <port id="0" precision="FP16">
1703 <dim>8</dim>
1704 <dim>1</dim>
1705 <dim>1</dim>
1706 <dim>1</dim>
1707 </port>
1708 </output>
1709 </layer>
1710 <layer id="211" name="bottleneck1_1/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
1711 <data auto_broadcast="numpy"/>
1712 <input>
1713 <port id="0">
1714 <dim>8</dim>
1715 <dim>1</dim>
1716 <dim>3</dim>
1717 <dim>3</dim>
1718 </port>
1719 <port id="1">
1720 <dim>8</dim>
1721 <dim>1</dim>
1722 <dim>1</dim>
1723 <dim>1</dim>
1724 </port>
1725 </input>
1726 <output>
1727 <port id="2" precision="FP16">
1728 <dim>8</dim>
1729 <dim>1</dim>
1730 <dim>3</dim>
1731 <dim>3</dim>
1732 </port>
1733 </output>
1734 </layer>
1735 <layer id="212" name="16855" type="Reshape" version="opset1">
1736 <data special_zero="true"/>
1737 <input>
1738 <port id="0">
1739 <dim>8</dim>
1740 <dim>1</dim>
1741 <dim>3</dim>
1742 <dim>3</dim>
1743 </port>
1744 <port id="1">
1745 <dim>5</dim>
1746 </port>
1747 </input>
1748 <output>
1749 <port id="2" precision="FP16">
1750 <dim>8</dim>
1751 <dim>1</dim>
1752 <dim>1</dim>
1753 <dim>3</dim>
1754 <dim>3</dim>
1755 </port>
1756 </output>
1757 </layer>
1758 <layer id="213" name="bottleneck1_1/inner/dw1/conv" type="GroupConvolution" version="opset1">
1759 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
1760 <input>
1761 <port id="0">
1762 <dim>1</dim>
1763 <dim>8</dim>
1764 <dim>160</dim>
1765 <dim>272</dim>
1766 </port>
1767 <port id="1">
1768 <dim>8</dim>
1769 <dim>1</dim>
1770 <dim>1</dim>
1771 <dim>3</dim>
1772 <dim>3</dim>
1773 </port>
1774 </input>
1775 <output>
1776 <port id="2" precision="FP16">
1777 <dim>1</dim>
1778 <dim>8</dim>
1779 <dim>160</dim>
1780 <dim>272</dim>
1781 </port>
1782 </output>
1783 </layer>
1784 <layer id="214" name="data_add_236652367076419749" type="Const" version="opset1">
1785 <data element_type="f16" offset="1724" shape="1,8,1,1" size="16"/>
1786 <output>
1787 <port id="0" precision="FP16">
1788 <dim>1</dim>
1789 <dim>8</dim>
1790 <dim>1</dim>
1791 <dim>1</dim>
1792 </port>
1793 </output>
1794 </layer>
1795 <layer id="215" name="bottleneck1_1/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
1796 <data auto_broadcast="numpy"/>
1797 <input>
1798 <port id="0">
1799 <dim>1</dim>
1800 <dim>8</dim>
1801 <dim>160</dim>
1802 <dim>272</dim>
1803 </port>
1804 <port id="1">
1805 <dim>1</dim>
1806 <dim>8</dim>
1807 <dim>1</dim>
1808 <dim>1</dim>
1809 </port>
1810 </input>
1811 <output>
1812 <port id="2" names="bottleneck1_1/inner/dw1/conv" precision="FP16">
1813 <dim>1</dim>
1814 <dim>8</dim>
1815 <dim>160</dim>
1816 <dim>272</dim>
1817 </port>
1818 </output>
1819 </layer>
1820 <layer id="216" name="bottleneck1_1/inner/dw1/fn/weights3100440070766" type="Const" version="opset1">
1821 <data element_type="f32" offset="1576" shape="1" size="4"/>
1822 <output>
1823 <port id="0" precision="FP32">
1824 <dim>1</dim>
1825 </port>
1826 </output>
1827 </layer>
1828 <layer id="217" name="bottleneck1_1/inner/dw1/fn" type="PReLU" version="opset1">
1829 <input>
1830 <port id="0">
1831 <dim>1</dim>
1832 <dim>8</dim>
1833 <dim>160</dim>
1834 <dim>272</dim>
1835 </port>
1836 <port id="1">
1837 <dim>1</dim>
1838 </port>
1839 </input>
1840 <output>
1841 <port id="2" names="bottleneck1_1/inner/dw1/conv" precision="FP16">
1842 <dim>1</dim>
1843 <dim>8</dim>
1844 <dim>160</dim>
1845 <dim>272</dim>
1846 </port>
1847 </output>
1848 </layer>
1849 <layer id="218" name="bottleneck1_1/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
1850 <data auto_broadcast="numpy" levels="256"/>
1851 <input>
1852 <port id="0">
1853 <dim>1</dim>
1854 <dim>8</dim>
1855 <dim>160</dim>
1856 <dim>272</dim>
1857 </port>
1858 <port id="1"/>
1859 <port id="2"/>
1860 <port id="3"/>
1861 <port id="4"/>
1862 </input>
1863 <output>
1864 <port id="5" precision="FP16">
1865 <dim>1</dim>
1866 <dim>8</dim>
1867 <dim>160</dim>
1868 <dim>272</dim>
1869 </port>
1870 </output>
1871 </layer>
1872 <layer id="219" name="bottleneck1_1/dim_inc/bn/mean/Fused_Mul__copy76810088/quantized1280822266" type="Const" version="opset1">
1873 <data element_type="i8" offset="1740" shape="32,8,1,1" size="256"/>
1874 <output>
1875 <port id="0" precision="I8">
1876 <dim>32</dim>
1877 <dim>8</dim>
1878 <dim>1</dim>
1879 <dim>1</dim>
1880 </port>
1881 </output>
1882 </layer>
1883 <layer id="220" name="bottleneck1_1/dim_inc/bn/mean/Fused_Mul__copy76810088/quantized/to_f16" type="Convert" version="opset1">
1884 <data destination_type="f16"/>
1885 <input>
1886 <port id="0">
1887 <dim>32</dim>
1888 <dim>8</dim>
1889 <dim>1</dim>
1890 <dim>1</dim>
1891 </port>
1892 </input>
1893 <output>
1894 <port id="1" precision="FP16">
1895 <dim>32</dim>
1896 <dim>8</dim>
1897 <dim>1</dim>
1898 <dim>1</dim>
1899 </port>
1900 </output>
1901 </layer>
1902 <layer id="221" name="bottleneck1_1/dim_inc/conv/fq_weights_1/zero_point1282122905" type="Const" version="opset1">
1903 <data element_type="f16" offset="1996" shape="32,1,1,1" size="64"/>
1904 <output>
1905 <port id="0" precision="FP16">
1906 <dim>32</dim>
1907 <dim>1</dim>
1908 <dim>1</dim>
1909 <dim>1</dim>
1910 </port>
1911 </output>
1912 </layer>
1913 <layer id="222" name="bottleneck1_1/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
1914 <data auto_broadcast="numpy"/>
1915 <input>
1916 <port id="0">
1917 <dim>32</dim>
1918 <dim>8</dim>
1919 <dim>1</dim>
1920 <dim>1</dim>
1921 </port>
1922 <port id="1">
1923 <dim>32</dim>
1924 <dim>1</dim>
1925 <dim>1</dim>
1926 <dim>1</dim>
1927 </port>
1928 </input>
1929 <output>
1930 <port id="2" precision="FP16">
1931 <dim>32</dim>
1932 <dim>8</dim>
1933 <dim>1</dim>
1934 <dim>1</dim>
1935 </port>
1936 </output>
1937 </layer>
1938 <layer id="223" name="bottleneck1_1/dim_inc/conv/fq_weights_1/scale1281622482" type="Const" version="opset1">
1939 <data element_type="f16" offset="2060" shape="32,1,1,1" size="64"/>
1940 <output>
1941 <port id="0" precision="FP16">
1942 <dim>32</dim>
1943 <dim>1</dim>
1944 <dim>1</dim>
1945 <dim>1</dim>
1946 </port>
1947 </output>
1948 </layer>
1949 <layer id="224" name="bottleneck1_1/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
1950 <data auto_broadcast="numpy"/>
1951 <input>
1952 <port id="0">
1953 <dim>32</dim>
1954 <dim>8</dim>
1955 <dim>1</dim>
1956 <dim>1</dim>
1957 </port>
1958 <port id="1">
1959 <dim>32</dim>
1960 <dim>1</dim>
1961 <dim>1</dim>
1962 <dim>1</dim>
1963 </port>
1964 </input>
1965 <output>
1966 <port id="2" precision="FP16">
1967 <dim>32</dim>
1968 <dim>8</dim>
1969 <dim>1</dim>
1970 <dim>1</dim>
1971 </port>
1972 </output>
1973 </layer>
1974 <layer id="225" name="bottleneck1_1/dim_inc/conv" type="Convolution" version="opset1">
1975 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
1976 <input>
1977 <port id="0">
1978 <dim>1</dim>
1979 <dim>8</dim>
1980 <dim>160</dim>
1981 <dim>272</dim>
1982 </port>
1983 <port id="1">
1984 <dim>32</dim>
1985 <dim>8</dim>
1986 <dim>1</dim>
1987 <dim>1</dim>
1988 </port>
1989 </input>
1990 <output>
1991 <port id="2" precision="FP16">
1992 <dim>1</dim>
1993 <dim>32</dim>
1994 <dim>160</dim>
1995 <dim>272</dim>
1996 </port>
1997 </output>
1998 </layer>
1999 <layer id="226" name="data_add_236732367877021738" type="Const" version="opset1">
2000 <data element_type="f16" offset="2124" shape="1,32,1,1" size="64"/>
2001 <output>
2002 <port id="0" precision="FP16">
2003 <dim>1</dim>
2004 <dim>32</dim>
2005 <dim>1</dim>
2006 <dim>1</dim>
2007 </port>
2008 </output>
2009 </layer>
2010 <layer id="227" name="bottleneck1_1/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
2011 <data auto_broadcast="numpy"/>
2012 <input>
2013 <port id="0">
2014 <dim>1</dim>
2015 <dim>32</dim>
2016 <dim>160</dim>
2017 <dim>272</dim>
2018 </port>
2019 <port id="1">
2020 <dim>1</dim>
2021 <dim>32</dim>
2022 <dim>1</dim>
2023 <dim>1</dim>
2024 </port>
2025 </input>
2026 <output>
2027 <port id="2" names="bottleneck1_1/dim_inc/conv" precision="FP16">
2028 <dim>1</dim>
2029 <dim>32</dim>
2030 <dim>160</dim>
2031 <dim>272</dim>
2032 </port>
2033 </output>
2034 </layer>
2035 <layer id="228" name="bottleneck1_1/add/fq_input_1" type="FakeQuantize" version="opset1">
2036 <data auto_broadcast="numpy" levels="256"/>
2037 <input>
2038 <port id="0">
2039 <dim>1</dim>
2040 <dim>32</dim>
2041 <dim>160</dim>
2042 <dim>272</dim>
2043 </port>
2044 <port id="1"/>
2045 <port id="2"/>
2046 <port id="3"/>
2047 <port id="4"/>
2048 </input>
2049 <output>
2050 <port id="5" precision="FP16">
2051 <dim>1</dim>
2052 <dim>32</dim>
2053 <dim>160</dim>
2054 <dim>272</dim>
2055 </port>
2056 </output>
2057 </layer>
2058 <layer id="229" name="bottleneck1_1/add" type="Add" version="opset1">
2059 <data auto_broadcast="numpy"/>
2060 <input>
2061 <port id="0">
2062 <dim>1</dim>
2063 <dim>32</dim>
2064 <dim>160</dim>
2065 <dim>272</dim>
2066 </port>
2067 <port id="1">
2068 <dim>1</dim>
2069 <dim>32</dim>
2070 <dim>160</dim>
2071 <dim>272</dim>
2072 </port>
2073 </input>
2074 <output>
2075 <port id="2" names="bottleneck1_1/add" precision="FP16">
2076 <dim>1</dim>
2077 <dim>32</dim>
2078 <dim>160</dim>
2079 <dim>272</dim>
2080 </port>
2081 </output>
2082 </layer>
2083 <layer id="230" name="bottleneck1_1/fn/weights3085240283773" type="Const" version="opset1">
2084 <data element_type="f32" offset="1576" shape="1" size="4"/>
2085 <output>
2086 <port id="0" precision="FP32">
2087 <dim>1</dim>
2088 </port>
2089 </output>
2090 </layer>
2091 <layer id="231" name="bottleneck1_1/fn" type="PReLU" version="opset1">
2092 <input>
2093 <port id="0">
2094 <dim>1</dim>
2095 <dim>32</dim>
2096 <dim>160</dim>
2097 <dim>272</dim>
2098 </port>
2099 <port id="1">
2100 <dim>1</dim>
2101 </port>
2102 </input>
2103 <output>
2104 <port id="2" names="bottleneck1_1/add" precision="FP16">
2105 <dim>1</dim>
2106 <dim>32</dim>
2107 <dim>160</dim>
2108 <dim>272</dim>
2109 </port>
2110 </output>
2111 </layer>
2112 <layer id="232" name="bottleneck1_2/add/fq_input_0" type="FakeQuantize" version="opset1">
2113 <data auto_broadcast="numpy" levels="256"/>
2114 <input>
2115 <port id="0">
2116 <dim>1</dim>
2117 <dim>32</dim>
2118 <dim>160</dim>
2119 <dim>272</dim>
2120 </port>
2121 <port id="1"/>
2122 <port id="2"/>
2123 <port id="3"/>
2124 <port id="4"/>
2125 </input>
2126 <output>
2127 <port id="5" precision="FP16">
2128 <dim>1</dim>
2129 <dim>32</dim>
2130 <dim>160</dim>
2131 <dim>272</dim>
2132 </port>
2133 </output>
2134 </layer>
2135 <layer id="233" name="2834283820706" type="Const" version="opset1">
2136 <data element_type="f16" offset="2188" shape="" size="2"/>
2137 <output>
2138 <port id="0" precision="FP16"/>
2139 </output>
2140 </layer>
2141 <layer id="234" name="2835283922026" type="Const" version="opset1">
2142 <data element_type="f16" offset="2190" shape="" size="2"/>
2143 <output>
2144 <port id="0" precision="FP16"/>
2145 </output>
2146 </layer>
2147 <layer id="235" name="2836284020475" type="Const" version="opset1">
2148 <data element_type="f16" offset="2188" shape="" size="2"/>
2149 <output>
2150 <port id="0" precision="FP16"/>
2151 </output>
2152 </layer>
2153 <layer id="236" name="2837284120844" type="Const" version="opset1">
2154 <data element_type="f16" offset="2190" shape="" size="2"/>
2155 <output>
2156 <port id="0" precision="FP16"/>
2157 </output>
2158 </layer>
2159 <layer id="237" name="3604360820832" type="Const" version="opset1">
2160 <data element_type="f16" offset="2192" shape="" size="2"/>
2161 <output>
2162 <port id="0" precision="FP16"/>
2163 </output>
2164 </layer>
2165 <layer id="238" name="3605360921774" type="Const" version="opset1">
2166 <data element_type="f16" offset="2194" shape="" size="2"/>
2167 <output>
2168 <port id="0" precision="FP16"/>
2169 </output>
2170 </layer>
2171 <layer id="239" name="3606361021057" type="Const" version="opset1">
2172 <data element_type="f16" offset="2192" shape="" size="2"/>
2173 <output>
2174 <port id="0" precision="FP16"/>
2175 </output>
2176 </layer>
2177 <layer id="240" name="3607361121939" type="Const" version="opset1">
2178 <data element_type="f16" offset="2194" shape="" size="2"/>
2179 <output>
2180 <port id="0" precision="FP16"/>
2181 </output>
2182 </layer>
2183 <layer id="241" name="4324432819458" type="Const" version="opset1">
2184 <data element_type="f16" offset="2196" shape="1,8,1,1" size="16"/>
2185 <output>
2186 <port id="0" precision="FP16">
2187 <dim>1</dim>
2188 <dim>8</dim>
2189 <dim>1</dim>
2190 <dim>1</dim>
2191 </port>
2192 </output>
2193 </layer>
2194 <layer id="242" name="4325432919383" type="Const" version="opset1">
2195 <data element_type="f16" offset="2212" shape="1,8,1,1" size="16"/>
2196 <output>
2197 <port id="0" precision="FP16">
2198 <dim>1</dim>
2199 <dim>8</dim>
2200 <dim>1</dim>
2201 <dim>1</dim>
2202 </port>
2203 </output>
2204 </layer>
2205 <layer id="243" name="4326433019566" type="Const" version="opset1">
2206 <data element_type="f16" offset="2196" shape="1,8,1,1" size="16"/>
2207 <output>
2208 <port id="0" precision="FP16">
2209 <dim>1</dim>
2210 <dim>8</dim>
2211 <dim>1</dim>
2212 <dim>1</dim>
2213 </port>
2214 </output>
2215 </layer>
2216 <layer id="244" name="4327433121792" type="Const" version="opset1">
2217 <data element_type="f16" offset="2212" shape="1,8,1,1" size="16"/>
2218 <output>
2219 <port id="0" precision="FP16">
2220 <dim>1</dim>
2221 <dim>8</dim>
2222 <dim>1</dim>
2223 <dim>1</dim>
2224 </port>
2225 </output>
2226 </layer>
2227 <layer id="245" name="bottleneck1_2/dim_red/bn/mean/Fused_Mul__copy77510091/quantized1273622236" type="Const" version="opset1">
2228 <data element_type="i8" offset="2228" shape="8,32,1,1" size="256"/>
2229 <output>
2230 <port id="0" precision="I8">
2231 <dim>8</dim>
2232 <dim>32</dim>
2233 <dim>1</dim>
2234 <dim>1</dim>
2235 </port>
2236 </output>
2237 </layer>
2238 <layer id="246" name="bottleneck1_2/dim_red/bn/mean/Fused_Mul__copy77510091/quantized/to_f16" type="Convert" version="opset1">
2239 <data destination_type="f16"/>
2240 <input>
2241 <port id="0">
2242 <dim>8</dim>
2243 <dim>32</dim>
2244 <dim>1</dim>
2245 <dim>1</dim>
2246 </port>
2247 </input>
2248 <output>
2249 <port id="1" precision="FP16">
2250 <dim>8</dim>
2251 <dim>32</dim>
2252 <dim>1</dim>
2253 <dim>1</dim>
2254 </port>
2255 </output>
2256 </layer>
2257 <layer id="247" name="bottleneck1_2/dim_red/conv/fq_weights_1/zero_point1274922707" type="Const" version="opset1">
2258 <data element_type="f16" offset="2484" shape="8,1,1,1" size="16"/>
2259 <output>
2260 <port id="0" precision="FP16">
2261 <dim>8</dim>
2262 <dim>1</dim>
2263 <dim>1</dim>
2264 <dim>1</dim>
2265 </port>
2266 </output>
2267 </layer>
2268 <layer id="248" name="bottleneck1_2/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
2269 <data auto_broadcast="numpy"/>
2270 <input>
2271 <port id="0">
2272 <dim>8</dim>
2273 <dim>32</dim>
2274 <dim>1</dim>
2275 <dim>1</dim>
2276 </port>
2277 <port id="1">
2278 <dim>8</dim>
2279 <dim>1</dim>
2280 <dim>1</dim>
2281 <dim>1</dim>
2282 </port>
2283 </input>
2284 <output>
2285 <port id="2" precision="FP16">
2286 <dim>8</dim>
2287 <dim>32</dim>
2288 <dim>1</dim>
2289 <dim>1</dim>
2290 </port>
2291 </output>
2292 </layer>
2293 <layer id="249" name="bottleneck1_2/dim_red/conv/fq_weights_1/scale1274420130" type="Const" version="opset1">
2294 <data element_type="f16" offset="2500" shape="8,1,1,1" size="16"/>
2295 <output>
2296 <port id="0" precision="FP16">
2297 <dim>8</dim>
2298 <dim>1</dim>
2299 <dim>1</dim>
2300 <dim>1</dim>
2301 </port>
2302 </output>
2303 </layer>
2304 <layer id="250" name="bottleneck1_2/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
2305 <data auto_broadcast="numpy"/>
2306 <input>
2307 <port id="0">
2308 <dim>8</dim>
2309 <dim>32</dim>
2310 <dim>1</dim>
2311 <dim>1</dim>
2312 </port>
2313 <port id="1">
2314 <dim>8</dim>
2315 <dim>1</dim>
2316 <dim>1</dim>
2317 <dim>1</dim>
2318 </port>
2319 </input>
2320 <output>
2321 <port id="2" precision="FP16">
2322 <dim>8</dim>
2323 <dim>32</dim>
2324 <dim>1</dim>
2325 <dim>1</dim>
2326 </port>
2327 </output>
2328 </layer>
2329 <layer id="251" name="bottleneck1_2/dim_red/conv" type="Convolution" version="opset1">
2330 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
2331 <input>
2332 <port id="0">
2333 <dim>1</dim>
2334 <dim>32</dim>
2335 <dim>160</dim>
2336 <dim>272</dim>
2337 </port>
2338 <port id="1">
2339 <dim>8</dim>
2340 <dim>32</dim>
2341 <dim>1</dim>
2342 <dim>1</dim>
2343 </port>
2344 </input>
2345 <output>
2346 <port id="2" precision="FP16">
2347 <dim>1</dim>
2348 <dim>8</dim>
2349 <dim>160</dim>
2350 <dim>272</dim>
2351 </port>
2352 </output>
2353 </layer>
2354 <layer id="252" name="data_add_236812368677719602" type="Const" version="opset1">
2355 <data element_type="f16" offset="2516" shape="1,8,1,1" size="16"/>
2356 <output>
2357 <port id="0" precision="FP16">
2358 <dim>1</dim>
2359 <dim>8</dim>
2360 <dim>1</dim>
2361 <dim>1</dim>
2362 </port>
2363 </output>
2364 </layer>
2365 <layer id="253" name="bottleneck1_2/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
2366 <data auto_broadcast="numpy"/>
2367 <input>
2368 <port id="0">
2369 <dim>1</dim>
2370 <dim>8</dim>
2371 <dim>160</dim>
2372 <dim>272</dim>
2373 </port>
2374 <port id="1">
2375 <dim>1</dim>
2376 <dim>8</dim>
2377 <dim>1</dim>
2378 <dim>1</dim>
2379 </port>
2380 </input>
2381 <output>
2382 <port id="2" names="bottleneck1_2/dim_red/conv" precision="FP16">
2383 <dim>1</dim>
2384 <dim>8</dim>
2385 <dim>160</dim>
2386 <dim>272</dim>
2387 </port>
2388 </output>
2389 </layer>
2390 <layer id="254" name="bottleneck1_2/dim_red/fn/weights3083240265779" type="Const" version="opset1">
2391 <data element_type="f32" offset="1576" shape="1" size="4"/>
2392 <output>
2393 <port id="0" precision="FP32">
2394 <dim>1</dim>
2395 </port>
2396 </output>
2397 </layer>
2398 <layer id="255" name="bottleneck1_2/dim_red/fn" type="PReLU" version="opset1">
2399 <input>
2400 <port id="0">
2401 <dim>1</dim>
2402 <dim>8</dim>
2403 <dim>160</dim>
2404 <dim>272</dim>
2405 </port>
2406 <port id="1">
2407 <dim>1</dim>
2408 </port>
2409 </input>
2410 <output>
2411 <port id="2" names="bottleneck1_2/dim_red/conv" precision="FP16">
2412 <dim>1</dim>
2413 <dim>8</dim>
2414 <dim>160</dim>
2415 <dim>272</dim>
2416 </port>
2417 </output>
2418 </layer>
2419 <layer id="256" name="bottleneck1_2/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
2420 <data auto_broadcast="numpy" levels="256"/>
2421 <input>
2422 <port id="0">
2423 <dim>1</dim>
2424 <dim>8</dim>
2425 <dim>160</dim>
2426 <dim>272</dim>
2427 </port>
2428 <port id="1">
2429 <dim>1</dim>
2430 <dim>8</dim>
2431 <dim>1</dim>
2432 <dim>1</dim>
2433 </port>
2434 <port id="2">
2435 <dim>1</dim>
2436 <dim>8</dim>
2437 <dim>1</dim>
2438 <dim>1</dim>
2439 </port>
2440 <port id="3">
2441 <dim>1</dim>
2442 <dim>8</dim>
2443 <dim>1</dim>
2444 <dim>1</dim>
2445 </port>
2446 <port id="4">
2447 <dim>1</dim>
2448 <dim>8</dim>
2449 <dim>1</dim>
2450 <dim>1</dim>
2451 </port>
2452 </input>
2453 <output>
2454 <port id="5" precision="FP16">
2455 <dim>1</dim>
2456 <dim>8</dim>
2457 <dim>160</dim>
2458 <dim>272</dim>
2459 </port>
2460 </output>
2461 </layer>
2462 <layer id="257" name="16887/value1688920700" type="Const" version="opset1">
2463 <data element_type="i64" offset="1580" shape="5" size="40"/>
2464 <output>
2465 <port id="0" precision="I64">
2466 <dim>5</dim>
2467 </port>
2468 </output>
2469 </layer>
2470 <layer id="258" name="bottleneck1_2/inner/dw1/bn/mean/Fused_Mul__copy78110094/quantized1196822821" type="Const" version="opset1">
2471 <data element_type="i8" offset="2532" shape="8,1,3,3" size="72"/>
2472 <output>
2473 <port id="0" precision="I8">
2474 <dim>8</dim>
2475 <dim>1</dim>
2476 <dim>3</dim>
2477 <dim>3</dim>
2478 </port>
2479 </output>
2480 </layer>
2481 <layer id="259" name="bottleneck1_2/inner/dw1/bn/mean/Fused_Mul__copy78110094/quantized/to_f16" type="Convert" version="opset1">
2482 <data destination_type="f16"/>
2483 <input>
2484 <port id="0">
2485 <dim>8</dim>
2486 <dim>1</dim>
2487 <dim>3</dim>
2488 <dim>3</dim>
2489 </port>
2490 </input>
2491 <output>
2492 <port id="1" precision="FP16">
2493 <dim>8</dim>
2494 <dim>1</dim>
2495 <dim>3</dim>
2496 <dim>3</dim>
2497 </port>
2498 </output>
2499 </layer>
2500 <layer id="260" name="bottleneck1_2/inner/dw1/conv/fq_weights_1/zero_point1198120871" type="Const" version="opset1">
2501 <data element_type="f16" offset="2604" shape="8,1,1,1" size="16"/>
2502 <output>
2503 <port id="0" precision="FP16">
2504 <dim>8</dim>
2505 <dim>1</dim>
2506 <dim>1</dim>
2507 <dim>1</dim>
2508 </port>
2509 </output>
2510 </layer>
2511 <layer id="261" name="bottleneck1_2/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
2512 <data auto_broadcast="numpy"/>
2513 <input>
2514 <port id="0">
2515 <dim>8</dim>
2516 <dim>1</dim>
2517 <dim>3</dim>
2518 <dim>3</dim>
2519 </port>
2520 <port id="1">
2521 <dim>8</dim>
2522 <dim>1</dim>
2523 <dim>1</dim>
2524 <dim>1</dim>
2525 </port>
2526 </input>
2527 <output>
2528 <port id="2" precision="FP16">
2529 <dim>8</dim>
2530 <dim>1</dim>
2531 <dim>3</dim>
2532 <dim>3</dim>
2533 </port>
2534 </output>
2535 </layer>
2536 <layer id="262" name="bottleneck1_2/inner/dw1/conv/fq_weights_1/scale1197621684" type="Const" version="opset1">
2537 <data element_type="f16" offset="2620" shape="8,1,1,1" size="16"/>
2538 <output>
2539 <port id="0" precision="FP16">
2540 <dim>8</dim>
2541 <dim>1</dim>
2542 <dim>1</dim>
2543 <dim>1</dim>
2544 </port>
2545 </output>
2546 </layer>
2547 <layer id="263" name="bottleneck1_2/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
2548 <data auto_broadcast="numpy"/>
2549 <input>
2550 <port id="0">
2551 <dim>8</dim>
2552 <dim>1</dim>
2553 <dim>3</dim>
2554 <dim>3</dim>
2555 </port>
2556 <port id="1">
2557 <dim>8</dim>
2558 <dim>1</dim>
2559 <dim>1</dim>
2560 <dim>1</dim>
2561 </port>
2562 </input>
2563 <output>
2564 <port id="2" precision="FP16">
2565 <dim>8</dim>
2566 <dim>1</dim>
2567 <dim>3</dim>
2568 <dim>3</dim>
2569 </port>
2570 </output>
2571 </layer>
2572 <layer id="264" name="16887" type="Reshape" version="opset1">
2573 <data special_zero="true"/>
2574 <input>
2575 <port id="0">
2576 <dim>8</dim>
2577 <dim>1</dim>
2578 <dim>3</dim>
2579 <dim>3</dim>
2580 </port>
2581 <port id="1">
2582 <dim>5</dim>
2583 </port>
2584 </input>
2585 <output>
2586 <port id="2" precision="FP16">
2587 <dim>8</dim>
2588 <dim>1</dim>
2589 <dim>1</dim>
2590 <dim>3</dim>
2591 <dim>3</dim>
2592 </port>
2593 </output>
2594 </layer>
2595 <layer id="265" name="bottleneck1_2/inner/dw1/conv" type="GroupConvolution" version="opset1">
2596 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
2597 <input>
2598 <port id="0">
2599 <dim>1</dim>
2600 <dim>8</dim>
2601 <dim>160</dim>
2602 <dim>272</dim>
2603 </port>
2604 <port id="1">
2605 <dim>8</dim>
2606 <dim>1</dim>
2607 <dim>1</dim>
2608 <dim>3</dim>
2609 <dim>3</dim>
2610 </port>
2611 </input>
2612 <output>
2613 <port id="2" precision="FP16">
2614 <dim>1</dim>
2615 <dim>8</dim>
2616 <dim>160</dim>
2617 <dim>272</dim>
2618 </port>
2619 </output>
2620 </layer>
2621 <layer id="266" name="data_add_236892369478320910" type="Const" version="opset1">
2622 <data element_type="f16" offset="2636" shape="1,8,1,1" size="16"/>
2623 <output>
2624 <port id="0" precision="FP16">
2625 <dim>1</dim>
2626 <dim>8</dim>
2627 <dim>1</dim>
2628 <dim>1</dim>
2629 </port>
2630 </output>
2631 </layer>
2632 <layer id="267" name="bottleneck1_2/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
2633 <data auto_broadcast="numpy"/>
2634 <input>
2635 <port id="0">
2636 <dim>1</dim>
2637 <dim>8</dim>
2638 <dim>160</dim>
2639 <dim>272</dim>
2640 </port>
2641 <port id="1">
2642 <dim>1</dim>
2643 <dim>8</dim>
2644 <dim>1</dim>
2645 <dim>1</dim>
2646 </port>
2647 </input>
2648 <output>
2649 <port id="2" names="bottleneck1_2/inner/dw1/conv" precision="FP16">
2650 <dim>1</dim>
2651 <dim>8</dim>
2652 <dim>160</dim>
2653 <dim>272</dim>
2654 </port>
2655 </output>
2656 </layer>
2657 <layer id="268" name="bottleneck1_2/inner/dw1/fn/weights3082440295785" type="Const" version="opset1">
2658 <data element_type="f32" offset="1576" shape="1" size="4"/>
2659 <output>
2660 <port id="0" precision="FP32">
2661 <dim>1</dim>
2662 </port>
2663 </output>
2664 </layer>
2665 <layer id="269" name="bottleneck1_2/inner/dw1/fn" type="PReLU" version="opset1">
2666 <input>
2667 <port id="0">
2668 <dim>1</dim>
2669 <dim>8</dim>
2670 <dim>160</dim>
2671 <dim>272</dim>
2672 </port>
2673 <port id="1">
2674 <dim>1</dim>
2675 </port>
2676 </input>
2677 <output>
2678 <port id="2" names="bottleneck1_2/inner/dw1/conv" precision="FP16">
2679 <dim>1</dim>
2680 <dim>8</dim>
2681 <dim>160</dim>
2682 <dim>272</dim>
2683 </port>
2684 </output>
2685 </layer>
2686 <layer id="270" name="bottleneck1_2/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
2687 <data auto_broadcast="numpy" levels="256"/>
2688 <input>
2689 <port id="0">
2690 <dim>1</dim>
2691 <dim>8</dim>
2692 <dim>160</dim>
2693 <dim>272</dim>
2694 </port>
2695 <port id="1"/>
2696 <port id="2"/>
2697 <port id="3"/>
2698 <port id="4"/>
2699 </input>
2700 <output>
2701 <port id="5" precision="FP16">
2702 <dim>1</dim>
2703 <dim>8</dim>
2704 <dim>160</dim>
2705 <dim>272</dim>
2706 </port>
2707 </output>
2708 </layer>
2709 <layer id="271" name="bottleneck1_2/dim_inc/bn/mean/Fused_Mul__copy78710097/quantized1285621807" type="Const" version="opset1">
2710 <data element_type="i8" offset="2652" shape="32,8,1,1" size="256"/>
2711 <output>
2712 <port id="0" precision="I8">
2713 <dim>32</dim>
2714 <dim>8</dim>
2715 <dim>1</dim>
2716 <dim>1</dim>
2717 </port>
2718 </output>
2719 </layer>
2720 <layer id="272" name="bottleneck1_2/dim_inc/bn/mean/Fused_Mul__copy78710097/quantized/to_f16" type="Convert" version="opset1">
2721 <data destination_type="f16"/>
2722 <input>
2723 <port id="0">
2724 <dim>32</dim>
2725 <dim>8</dim>
2726 <dim>1</dim>
2727 <dim>1</dim>
2728 </port>
2729 </input>
2730 <output>
2731 <port id="1" precision="FP16">
2732 <dim>32</dim>
2733 <dim>8</dim>
2734 <dim>1</dim>
2735 <dim>1</dim>
2736 </port>
2737 </output>
2738 </layer>
2739 <layer id="273" name="bottleneck1_2/dim_inc/conv/fq_weights_1/zero_point1286920469" type="Const" version="opset1">
2740 <data element_type="f16" offset="2908" shape="32,1,1,1" size="64"/>
2741 <output>
2742 <port id="0" precision="FP16">
2743 <dim>32</dim>
2744 <dim>1</dim>
2745 <dim>1</dim>
2746 <dim>1</dim>
2747 </port>
2748 </output>
2749 </layer>
2750 <layer id="274" name="bottleneck1_2/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
2751 <data auto_broadcast="numpy"/>
2752 <input>
2753 <port id="0">
2754 <dim>32</dim>
2755 <dim>8</dim>
2756 <dim>1</dim>
2757 <dim>1</dim>
2758 </port>
2759 <port id="1">
2760 <dim>32</dim>
2761 <dim>1</dim>
2762 <dim>1</dim>
2763 <dim>1</dim>
2764 </port>
2765 </input>
2766 <output>
2767 <port id="2" precision="FP16">
2768 <dim>32</dim>
2769 <dim>8</dim>
2770 <dim>1</dim>
2771 <dim>1</dim>
2772 </port>
2773 </output>
2774 </layer>
2775 <layer id="275" name="bottleneck1_2/dim_inc/conv/fq_weights_1/scale1286419986" type="Const" version="opset1">
2776 <data element_type="f16" offset="2972" shape="32,1,1,1" size="64"/>
2777 <output>
2778 <port id="0" precision="FP16">
2779 <dim>32</dim>
2780 <dim>1</dim>
2781 <dim>1</dim>
2782 <dim>1</dim>
2783 </port>
2784 </output>
2785 </layer>
2786 <layer id="276" name="bottleneck1_2/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
2787 <data auto_broadcast="numpy"/>
2788 <input>
2789 <port id="0">
2790 <dim>32</dim>
2791 <dim>8</dim>
2792 <dim>1</dim>
2793 <dim>1</dim>
2794 </port>
2795 <port id="1">
2796 <dim>32</dim>
2797 <dim>1</dim>
2798 <dim>1</dim>
2799 <dim>1</dim>
2800 </port>
2801 </input>
2802 <output>
2803 <port id="2" precision="FP16">
2804 <dim>32</dim>
2805 <dim>8</dim>
2806 <dim>1</dim>
2807 <dim>1</dim>
2808 </port>
2809 </output>
2810 </layer>
2811 <layer id="277" name="bottleneck1_2/dim_inc/conv" type="Convolution" version="opset1">
2812 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
2813 <input>
2814 <port id="0">
2815 <dim>1</dim>
2816 <dim>8</dim>
2817 <dim>160</dim>
2818 <dim>272</dim>
2819 </port>
2820 <port id="1">
2821 <dim>32</dim>
2822 <dim>8</dim>
2823 <dim>1</dim>
2824 <dim>1</dim>
2825 </port>
2826 </input>
2827 <output>
2828 <port id="2" precision="FP16">
2829 <dim>1</dim>
2830 <dim>32</dim>
2831 <dim>160</dim>
2832 <dim>272</dim>
2833 </port>
2834 </output>
2835 </layer>
2836 <layer id="278" name="data_add_236972370278919596" type="Const" version="opset1">
2837 <data element_type="f16" offset="3036" shape="1,32,1,1" size="64"/>
2838 <output>
2839 <port id="0" precision="FP16">
2840 <dim>1</dim>
2841 <dim>32</dim>
2842 <dim>1</dim>
2843 <dim>1</dim>
2844 </port>
2845 </output>
2846 </layer>
2847 <layer id="279" name="bottleneck1_2/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
2848 <data auto_broadcast="numpy"/>
2849 <input>
2850 <port id="0">
2851 <dim>1</dim>
2852 <dim>32</dim>
2853 <dim>160</dim>
2854 <dim>272</dim>
2855 </port>
2856 <port id="1">
2857 <dim>1</dim>
2858 <dim>32</dim>
2859 <dim>1</dim>
2860 <dim>1</dim>
2861 </port>
2862 </input>
2863 <output>
2864 <port id="2" names="bottleneck1_2/dim_inc/conv" precision="FP16">
2865 <dim>1</dim>
2866 <dim>32</dim>
2867 <dim>160</dim>
2868 <dim>272</dim>
2869 </port>
2870 </output>
2871 </layer>
2872 <layer id="280" name="bottleneck1_2/add/fq_input_1" type="FakeQuantize" version="opset1">
2873 <data auto_broadcast="numpy" levels="256"/>
2874 <input>
2875 <port id="0">
2876 <dim>1</dim>
2877 <dim>32</dim>
2878 <dim>160</dim>
2879 <dim>272</dim>
2880 </port>
2881 <port id="1"/>
2882 <port id="2"/>
2883 <port id="3"/>
2884 <port id="4"/>
2885 </input>
2886 <output>
2887 <port id="5" precision="FP16">
2888 <dim>1</dim>
2889 <dim>32</dim>
2890 <dim>160</dim>
2891 <dim>272</dim>
2892 </port>
2893 </output>
2894 </layer>
2895 <layer id="281" name="bottleneck1_2/add" type="Add" version="opset1">
2896 <data auto_broadcast="numpy"/>
2897 <input>
2898 <port id="0">
2899 <dim>1</dim>
2900 <dim>32</dim>
2901 <dim>160</dim>
2902 <dim>272</dim>
2903 </port>
2904 <port id="1">
2905 <dim>1</dim>
2906 <dim>32</dim>
2907 <dim>160</dim>
2908 <dim>272</dim>
2909 </port>
2910 </input>
2911 <output>
2912 <port id="2" names="bottleneck1_2/add" precision="FP16">
2913 <dim>1</dim>
2914 <dim>32</dim>
2915 <dim>160</dim>
2916 <dim>272</dim>
2917 </port>
2918 </output>
2919 </layer>
2920 <layer id="282" name="bottleneck1_2/fn/weights3106840700792" type="Const" version="opset1">
2921 <data element_type="f32" offset="1576" shape="1" size="4"/>
2922 <output>
2923 <port id="0" precision="FP32">
2924 <dim>1</dim>
2925 </port>
2926 </output>
2927 </layer>
2928 <layer id="283" name="bottleneck1_2/fn" type="PReLU" version="opset1">
2929 <input>
2930 <port id="0">
2931 <dim>1</dim>
2932 <dim>32</dim>
2933 <dim>160</dim>
2934 <dim>272</dim>
2935 </port>
2936 <port id="1">
2937 <dim>1</dim>
2938 </port>
2939 </input>
2940 <output>
2941 <port id="2" names="bottleneck1_2/add" precision="FP16">
2942 <dim>1</dim>
2943 <dim>32</dim>
2944 <dim>160</dim>
2945 <dim>272</dim>
2946 </port>
2947 </output>
2948 </layer>
2949 <layer id="284" name="bottleneck1_3/add/fq_input_0" type="FakeQuantize" version="opset1">
2950 <data auto_broadcast="numpy" levels="256"/>
2951 <input>
2952 <port id="0">
2953 <dim>1</dim>
2954 <dim>32</dim>
2955 <dim>160</dim>
2956 <dim>272</dim>
2957 </port>
2958 <port id="1"/>
2959 <port id="2"/>
2960 <port id="3"/>
2961 <port id="4"/>
2962 </input>
2963 <output>
2964 <port id="5" precision="FP16">
2965 <dim>1</dim>
2966 <dim>32</dim>
2967 <dim>160</dim>
2968 <dim>272</dim>
2969 </port>
2970 </output>
2971 </layer>
2972 <layer id="285" name="4774477819857" type="Const" version="opset1">
2973 <data element_type="f16" offset="3100" shape="" size="2"/>
2974 <output>
2975 <port id="0" precision="FP16"/>
2976 </output>
2977 </layer>
2978 <layer id="286" name="4775477922080" type="Const" version="opset1">
2979 <data element_type="f16" offset="3102" shape="" size="2"/>
2980 <output>
2981 <port id="0" precision="FP16"/>
2982 </output>
2983 </layer>
2984 <layer id="287" name="4776478019659" type="Const" version="opset1">
2985 <data element_type="f16" offset="3100" shape="" size="2"/>
2986 <output>
2987 <port id="0" precision="FP16"/>
2988 </output>
2989 </layer>
2990 <layer id="288" name="4777478120052" type="Const" version="opset1">
2991 <data element_type="f16" offset="3102" shape="" size="2"/>
2992 <output>
2993 <port id="0" precision="FP16"/>
2994 </output>
2995 </layer>
2996 <layer id="289" name="2784278821840" type="Const" version="opset1">
2997 <data element_type="f16" offset="3104" shape="" size="2"/>
2998 <output>
2999 <port id="0" precision="FP16"/>
3000 </output>
3001 </layer>
3002 <layer id="290" name="2785278921474" type="Const" version="opset1">
3003 <data element_type="f16" offset="3106" shape="" size="2"/>
3004 <output>
3005 <port id="0" precision="FP16"/>
3006 </output>
3007 </layer>
3008 <layer id="291" name="2786279021399" type="Const" version="opset1">
3009 <data element_type="f16" offset="3104" shape="" size="2"/>
3010 <output>
3011 <port id="0" precision="FP16"/>
3012 </output>
3013 </layer>
3014 <layer id="292" name="2787279122146" type="Const" version="opset1">
3015 <data element_type="f16" offset="3106" shape="" size="2"/>
3016 <output>
3017 <port id="0" precision="FP16"/>
3018 </output>
3019 </layer>
3020 <layer id="293" name="2864286822065" type="Const" version="opset1">
3021 <data element_type="f16" offset="3108" shape="1,8,1,1" size="16"/>
3022 <output>
3023 <port id="0" precision="FP16">
3024 <dim>1</dim>
3025 <dim>8</dim>
3026 <dim>1</dim>
3027 <dim>1</dim>
3028 </port>
3029 </output>
3030 </layer>
3031 <layer id="294" name="2865286922521" type="Const" version="opset1">
3032 <data element_type="f16" offset="3124" shape="1,8,1,1" size="16"/>
3033 <output>
3034 <port id="0" precision="FP16">
3035 <dim>1</dim>
3036 <dim>8</dim>
3037 <dim>1</dim>
3038 <dim>1</dim>
3039 </port>
3040 </output>
3041 </layer>
3042 <layer id="295" name="2866287022917" type="Const" version="opset1">
3043 <data element_type="f16" offset="3108" shape="1,8,1,1" size="16"/>
3044 <output>
3045 <port id="0" precision="FP16">
3046 <dim>1</dim>
3047 <dim>8</dim>
3048 <dim>1</dim>
3049 <dim>1</dim>
3050 </port>
3051 </output>
3052 </layer>
3053 <layer id="296" name="2867287122137" type="Const" version="opset1">
3054 <data element_type="f16" offset="3124" shape="1,8,1,1" size="16"/>
3055 <output>
3056 <port id="0" precision="FP16">
3057 <dim>1</dim>
3058 <dim>8</dim>
3059 <dim>1</dim>
3060 <dim>1</dim>
3061 </port>
3062 </output>
3063 </layer>
3064 <layer id="297" name="bottleneck1_3/dim_red/bn/mean/Fused_Mul__copy79410100/quantized1420021693" type="Const" version="opset1">
3065 <data element_type="i8" offset="3140" shape="8,32,1,1" size="256"/>
3066 <output>
3067 <port id="0" precision="I8">
3068 <dim>8</dim>
3069 <dim>32</dim>
3070 <dim>1</dim>
3071 <dim>1</dim>
3072 </port>
3073 </output>
3074 </layer>
3075 <layer id="298" name="bottleneck1_3/dim_red/bn/mean/Fused_Mul__copy79410100/quantized/to_f16" type="Convert" version="opset1">
3076 <data destination_type="f16"/>
3077 <input>
3078 <port id="0">
3079 <dim>8</dim>
3080 <dim>32</dim>
3081 <dim>1</dim>
3082 <dim>1</dim>
3083 </port>
3084 </input>
3085 <output>
3086 <port id="1" precision="FP16">
3087 <dim>8</dim>
3088 <dim>32</dim>
3089 <dim>1</dim>
3090 <dim>1</dim>
3091 </port>
3092 </output>
3093 </layer>
3094 <layer id="299" name="bottleneck1_3/dim_red/conv/fq_weights_1/zero_point1421320478" type="Const" version="opset1">
3095 <data element_type="f16" offset="3396" shape="8,1,1,1" size="16"/>
3096 <output>
3097 <port id="0" precision="FP16">
3098 <dim>8</dim>
3099 <dim>1</dim>
3100 <dim>1</dim>
3101 <dim>1</dim>
3102 </port>
3103 </output>
3104 </layer>
3105 <layer id="300" name="bottleneck1_3/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
3106 <data auto_broadcast="numpy"/>
3107 <input>
3108 <port id="0">
3109 <dim>8</dim>
3110 <dim>32</dim>
3111 <dim>1</dim>
3112 <dim>1</dim>
3113 </port>
3114 <port id="1">
3115 <dim>8</dim>
3116 <dim>1</dim>
3117 <dim>1</dim>
3118 <dim>1</dim>
3119 </port>
3120 </input>
3121 <output>
3122 <port id="2" precision="FP16">
3123 <dim>8</dim>
3124 <dim>32</dim>
3125 <dim>1</dim>
3126 <dim>1</dim>
3127 </port>
3128 </output>
3129 </layer>
3130 <layer id="301" name="bottleneck1_3/dim_red/conv/fq_weights_1/scale1420819449" type="Const" version="opset1">
3131 <data element_type="f16" offset="3412" shape="8,1,1,1" size="16"/>
3132 <output>
3133 <port id="0" precision="FP16">
3134 <dim>8</dim>
3135 <dim>1</dim>
3136 <dim>1</dim>
3137 <dim>1</dim>
3138 </port>
3139 </output>
3140 </layer>
3141 <layer id="302" name="bottleneck1_3/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
3142 <data auto_broadcast="numpy"/>
3143 <input>
3144 <port id="0">
3145 <dim>8</dim>
3146 <dim>32</dim>
3147 <dim>1</dim>
3148 <dim>1</dim>
3149 </port>
3150 <port id="1">
3151 <dim>8</dim>
3152 <dim>1</dim>
3153 <dim>1</dim>
3154 <dim>1</dim>
3155 </port>
3156 </input>
3157 <output>
3158 <port id="2" precision="FP16">
3159 <dim>8</dim>
3160 <dim>32</dim>
3161 <dim>1</dim>
3162 <dim>1</dim>
3163 </port>
3164 </output>
3165 </layer>
3166 <layer id="303" name="bottleneck1_3/dim_red/conv" type="Convolution" version="opset1">
3167 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
3168 <input>
3169 <port id="0">
3170 <dim>1</dim>
3171 <dim>32</dim>
3172 <dim>160</dim>
3173 <dim>272</dim>
3174 </port>
3175 <port id="1">
3176 <dim>8</dim>
3177 <dim>32</dim>
3178 <dim>1</dim>
3179 <dim>1</dim>
3180 </port>
3181 </input>
3182 <output>
3183 <port id="2" precision="FP16">
3184 <dim>1</dim>
3185 <dim>8</dim>
3186 <dim>160</dim>
3187 <dim>272</dim>
3188 </port>
3189 </output>
3190 </layer>
3191 <layer id="304" name="data_add_237052371079621483" type="Const" version="opset1">
3192 <data element_type="f16" offset="3428" shape="1,8,1,1" size="16"/>
3193 <output>
3194 <port id="0" precision="FP16">
3195 <dim>1</dim>
3196 <dim>8</dim>
3197 <dim>1</dim>
3198 <dim>1</dim>
3199 </port>
3200 </output>
3201 </layer>
3202 <layer id="305" name="bottleneck1_3/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
3203 <data auto_broadcast="numpy"/>
3204 <input>
3205 <port id="0">
3206 <dim>1</dim>
3207 <dim>8</dim>
3208 <dim>160</dim>
3209 <dim>272</dim>
3210 </port>
3211 <port id="1">
3212 <dim>1</dim>
3213 <dim>8</dim>
3214 <dim>1</dim>
3215 <dim>1</dim>
3216 </port>
3217 </input>
3218 <output>
3219 <port id="2" names="bottleneck1_3/dim_red/conv" precision="FP16">
3220 <dim>1</dim>
3221 <dim>8</dim>
3222 <dim>160</dim>
3223 <dim>272</dim>
3224 </port>
3225 </output>
3226 </layer>
3227 <layer id="306" name="bottleneck1_3/dim_red/fn/weights3116439665798" type="Const" version="opset1">
3228 <data element_type="f32" offset="1576" shape="1" size="4"/>
3229 <output>
3230 <port id="0" precision="FP32">
3231 <dim>1</dim>
3232 </port>
3233 </output>
3234 </layer>
3235 <layer id="307" name="bottleneck1_3/dim_red/fn" type="PReLU" version="opset1">
3236 <input>
3237 <port id="0">
3238 <dim>1</dim>
3239 <dim>8</dim>
3240 <dim>160</dim>
3241 <dim>272</dim>
3242 </port>
3243 <port id="1">
3244 <dim>1</dim>
3245 </port>
3246 </input>
3247 <output>
3248 <port id="2" names="bottleneck1_3/dim_red/conv" precision="FP16">
3249 <dim>1</dim>
3250 <dim>8</dim>
3251 <dim>160</dim>
3252 <dim>272</dim>
3253 </port>
3254 </output>
3255 </layer>
3256 <layer id="308" name="bottleneck1_3/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
3257 <data auto_broadcast="numpy" levels="256"/>
3258 <input>
3259 <port id="0">
3260 <dim>1</dim>
3261 <dim>8</dim>
3262 <dim>160</dim>
3263 <dim>272</dim>
3264 </port>
3265 <port id="1">
3266 <dim>1</dim>
3267 <dim>8</dim>
3268 <dim>1</dim>
3269 <dim>1</dim>
3270 </port>
3271 <port id="2">
3272 <dim>1</dim>
3273 <dim>8</dim>
3274 <dim>1</dim>
3275 <dim>1</dim>
3276 </port>
3277 <port id="3">
3278 <dim>1</dim>
3279 <dim>8</dim>
3280 <dim>1</dim>
3281 <dim>1</dim>
3282 </port>
3283 <port id="4">
3284 <dim>1</dim>
3285 <dim>8</dim>
3286 <dim>1</dim>
3287 <dim>1</dim>
3288 </port>
3289 </input>
3290 <output>
3291 <port id="5" precision="FP16">
3292 <dim>1</dim>
3293 <dim>8</dim>
3294 <dim>160</dim>
3295 <dim>272</dim>
3296 </port>
3297 </output>
3298 </layer>
3299 <layer id="309" name="16807/value1680921342" type="Const" version="opset1">
3300 <data element_type="i64" offset="1580" shape="5" size="40"/>
3301 <output>
3302 <port id="0" precision="I64">
3303 <dim>5</dim>
3304 </port>
3305 </output>
3306 </layer>
3307 <layer id="310" name="bottleneck1_3/inner/dw1/bn/mean/Fused_Mul__copy80010103/quantized1386420673" type="Const" version="opset1">
3308 <data element_type="i8" offset="3444" shape="8,1,3,3" size="72"/>
3309 <output>
3310 <port id="0" precision="I8">
3311 <dim>8</dim>
3312 <dim>1</dim>
3313 <dim>3</dim>
3314 <dim>3</dim>
3315 </port>
3316 </output>
3317 </layer>
3318 <layer id="311" name="bottleneck1_3/inner/dw1/bn/mean/Fused_Mul__copy80010103/quantized/to_f16" type="Convert" version="opset1">
3319 <data destination_type="f16"/>
3320 <input>
3321 <port id="0">
3322 <dim>8</dim>
3323 <dim>1</dim>
3324 <dim>3</dim>
3325 <dim>3</dim>
3326 </port>
3327 </input>
3328 <output>
3329 <port id="1" precision="FP16">
3330 <dim>8</dim>
3331 <dim>1</dim>
3332 <dim>3</dim>
3333 <dim>3</dim>
3334 </port>
3335 </output>
3336 </layer>
3337 <layer id="312" name="bottleneck1_3/inner/dw1/conv/fq_weights_1/zero_point1387720220" type="Const" version="opset1">
3338 <data element_type="f16" offset="3516" shape="8,1,1,1" size="16"/>
3339 <output>
3340 <port id="0" precision="FP16">
3341 <dim>8</dim>
3342 <dim>1</dim>
3343 <dim>1</dim>
3344 <dim>1</dim>
3345 </port>
3346 </output>
3347 </layer>
3348 <layer id="313" name="bottleneck1_3/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
3349 <data auto_broadcast="numpy"/>
3350 <input>
3351 <port id="0">
3352 <dim>8</dim>
3353 <dim>1</dim>
3354 <dim>3</dim>
3355 <dim>3</dim>
3356 </port>
3357 <port id="1">
3358 <dim>8</dim>
3359 <dim>1</dim>
3360 <dim>1</dim>
3361 <dim>1</dim>
3362 </port>
3363 </input>
3364 <output>
3365 <port id="2" precision="FP16">
3366 <dim>8</dim>
3367 <dim>1</dim>
3368 <dim>3</dim>
3369 <dim>3</dim>
3370 </port>
3371 </output>
3372 </layer>
3373 <layer id="314" name="bottleneck1_3/inner/dw1/conv/fq_weights_1/scale1387221195" type="Const" version="opset1">
3374 <data element_type="f16" offset="3532" shape="8,1,1,1" size="16"/>
3375 <output>
3376 <port id="0" precision="FP16">
3377 <dim>8</dim>
3378 <dim>1</dim>
3379 <dim>1</dim>
3380 <dim>1</dim>
3381 </port>
3382 </output>
3383 </layer>
3384 <layer id="315" name="bottleneck1_3/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
3385 <data auto_broadcast="numpy"/>
3386 <input>
3387 <port id="0">
3388 <dim>8</dim>
3389 <dim>1</dim>
3390 <dim>3</dim>
3391 <dim>3</dim>
3392 </port>
3393 <port id="1">
3394 <dim>8</dim>
3395 <dim>1</dim>
3396 <dim>1</dim>
3397 <dim>1</dim>
3398 </port>
3399 </input>
3400 <output>
3401 <port id="2" precision="FP16">
3402 <dim>8</dim>
3403 <dim>1</dim>
3404 <dim>3</dim>
3405 <dim>3</dim>
3406 </port>
3407 </output>
3408 </layer>
3409 <layer id="316" name="16807" type="Reshape" version="opset1">
3410 <data special_zero="true"/>
3411 <input>
3412 <port id="0">
3413 <dim>8</dim>
3414 <dim>1</dim>
3415 <dim>3</dim>
3416 <dim>3</dim>
3417 </port>
3418 <port id="1">
3419 <dim>5</dim>
3420 </port>
3421 </input>
3422 <output>
3423 <port id="2" precision="FP16">
3424 <dim>8</dim>
3425 <dim>1</dim>
3426 <dim>1</dim>
3427 <dim>3</dim>
3428 <dim>3</dim>
3429 </port>
3430 </output>
3431 </layer>
3432 <layer id="317" name="bottleneck1_3/inner/dw1/conv" type="GroupConvolution" version="opset1">
3433 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
3434 <input>
3435 <port id="0">
3436 <dim>1</dim>
3437 <dim>8</dim>
3438 <dim>160</dim>
3439 <dim>272</dim>
3440 </port>
3441 <port id="1">
3442 <dim>8</dim>
3443 <dim>1</dim>
3444 <dim>1</dim>
3445 <dim>3</dim>
3446 <dim>3</dim>
3447 </port>
3448 </input>
3449 <output>
3450 <port id="2" precision="FP16">
3451 <dim>1</dim>
3452 <dim>8</dim>
3453 <dim>160</dim>
3454 <dim>272</dim>
3455 </port>
3456 </output>
3457 </layer>
3458 <layer id="318" name="data_add_237132371880222077" type="Const" version="opset1">
3459 <data element_type="f16" offset="3548" shape="1,8,1,1" size="16"/>
3460 <output>
3461 <port id="0" precision="FP16">
3462 <dim>1</dim>
3463 <dim>8</dim>
3464 <dim>1</dim>
3465 <dim>1</dim>
3466 </port>
3467 </output>
3468 </layer>
3469 <layer id="319" name="bottleneck1_3/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
3470 <data auto_broadcast="numpy"/>
3471 <input>
3472 <port id="0">
3473 <dim>1</dim>
3474 <dim>8</dim>
3475 <dim>160</dim>
3476 <dim>272</dim>
3477 </port>
3478 <port id="1">
3479 <dim>1</dim>
3480 <dim>8</dim>
3481 <dim>1</dim>
3482 <dim>1</dim>
3483 </port>
3484 </input>
3485 <output>
3486 <port id="2" names="bottleneck1_3/inner/dw1/conv" precision="FP16">
3487 <dim>1</dim>
3488 <dim>8</dim>
3489 <dim>160</dim>
3490 <dim>272</dim>
3491 </port>
3492 </output>
3493 </layer>
3494 <layer id="320" name="bottleneck1_3/inner/dw1/fn/weights3087239692804" type="Const" version="opset1">
3495 <data element_type="f32" offset="1576" shape="1" size="4"/>
3496 <output>
3497 <port id="0" precision="FP32">
3498 <dim>1</dim>
3499 </port>
3500 </output>
3501 </layer>
3502 <layer id="321" name="bottleneck1_3/inner/dw1/fn" type="PReLU" version="opset1">
3503 <input>
3504 <port id="0">
3505 <dim>1</dim>
3506 <dim>8</dim>
3507 <dim>160</dim>
3508 <dim>272</dim>
3509 </port>
3510 <port id="1">
3511 <dim>1</dim>
3512 </port>
3513 </input>
3514 <output>
3515 <port id="2" names="bottleneck1_3/inner/dw1/conv" precision="FP16">
3516 <dim>1</dim>
3517 <dim>8</dim>
3518 <dim>160</dim>
3519 <dim>272</dim>
3520 </port>
3521 </output>
3522 </layer>
3523 <layer id="322" name="bottleneck1_3/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
3524 <data auto_broadcast="numpy" levels="256"/>
3525 <input>
3526 <port id="0">
3527 <dim>1</dim>
3528 <dim>8</dim>
3529 <dim>160</dim>
3530 <dim>272</dim>
3531 </port>
3532 <port id="1"/>
3533 <port id="2"/>
3534 <port id="3"/>
3535 <port id="4"/>
3536 </input>
3537 <output>
3538 <port id="5" precision="FP16">
3539 <dim>1</dim>
3540 <dim>8</dim>
3541 <dim>160</dim>
3542 <dim>272</dim>
3543 </port>
3544 </output>
3545 </layer>
3546 <layer id="323" name="bottleneck1_3/dim_inc/bn/mean/Fused_Mul__copy80610106/quantized1312021546" type="Const" version="opset1">
3547 <data element_type="i8" offset="3564" shape="32,8,1,1" size="256"/>
3548 <output>
3549 <port id="0" precision="I8">
3550 <dim>32</dim>
3551 <dim>8</dim>
3552 <dim>1</dim>
3553 <dim>1</dim>
3554 </port>
3555 </output>
3556 </layer>
3557 <layer id="324" name="bottleneck1_3/dim_inc/bn/mean/Fused_Mul__copy80610106/quantized/to_f16" type="Convert" version="opset1">
3558 <data destination_type="f16"/>
3559 <input>
3560 <port id="0">
3561 <dim>32</dim>
3562 <dim>8</dim>
3563 <dim>1</dim>
3564 <dim>1</dim>
3565 </port>
3566 </input>
3567 <output>
3568 <port id="1" precision="FP16">
3569 <dim>32</dim>
3570 <dim>8</dim>
3571 <dim>1</dim>
3572 <dim>1</dim>
3573 </port>
3574 </output>
3575 </layer>
3576 <layer id="325" name="bottleneck1_3/dim_inc/conv/fq_weights_1/zero_point1313321846" type="Const" version="opset1">
3577 <data element_type="f16" offset="3820" shape="32,1,1,1" size="64"/>
3578 <output>
3579 <port id="0" precision="FP16">
3580 <dim>32</dim>
3581 <dim>1</dim>
3582 <dim>1</dim>
3583 <dim>1</dim>
3584 </port>
3585 </output>
3586 </layer>
3587 <layer id="326" name="bottleneck1_3/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
3588 <data auto_broadcast="numpy"/>
3589 <input>
3590 <port id="0">
3591 <dim>32</dim>
3592 <dim>8</dim>
3593 <dim>1</dim>
3594 <dim>1</dim>
3595 </port>
3596 <port id="1">
3597 <dim>32</dim>
3598 <dim>1</dim>
3599 <dim>1</dim>
3600 <dim>1</dim>
3601 </port>
3602 </input>
3603 <output>
3604 <port id="2" precision="FP16">
3605 <dim>32</dim>
3606 <dim>8</dim>
3607 <dim>1</dim>
3608 <dim>1</dim>
3609 </port>
3610 </output>
3611 </layer>
3612 <layer id="327" name="bottleneck1_3/dim_inc/conv/fq_weights_1/scale1312822020" type="Const" version="opset1">
3613 <data element_type="f16" offset="3884" shape="32,1,1,1" size="64"/>
3614 <output>
3615 <port id="0" precision="FP16">
3616 <dim>32</dim>
3617 <dim>1</dim>
3618 <dim>1</dim>
3619 <dim>1</dim>
3620 </port>
3621 </output>
3622 </layer>
3623 <layer id="328" name="bottleneck1_3/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
3624 <data auto_broadcast="numpy"/>
3625 <input>
3626 <port id="0">
3627 <dim>32</dim>
3628 <dim>8</dim>
3629 <dim>1</dim>
3630 <dim>1</dim>
3631 </port>
3632 <port id="1">
3633 <dim>32</dim>
3634 <dim>1</dim>
3635 <dim>1</dim>
3636 <dim>1</dim>
3637 </port>
3638 </input>
3639 <output>
3640 <port id="2" precision="FP16">
3641 <dim>32</dim>
3642 <dim>8</dim>
3643 <dim>1</dim>
3644 <dim>1</dim>
3645 </port>
3646 </output>
3647 </layer>
3648 <layer id="329" name="bottleneck1_3/dim_inc/conv" type="Convolution" version="opset1">
3649 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
3650 <input>
3651 <port id="0">
3652 <dim>1</dim>
3653 <dim>8</dim>
3654 <dim>160</dim>
3655 <dim>272</dim>
3656 </port>
3657 <port id="1">
3658 <dim>32</dim>
3659 <dim>8</dim>
3660 <dim>1</dim>
3661 <dim>1</dim>
3662 </port>
3663 </input>
3664 <output>
3665 <port id="2" precision="FP16">
3666 <dim>1</dim>
3667 <dim>32</dim>
3668 <dim>160</dim>
3669 <dim>272</dim>
3670 </port>
3671 </output>
3672 </layer>
3673 <layer id="330" name="data_add_237212372680822044" type="Const" version="opset1">
3674 <data element_type="f16" offset="3948" shape="1,32,1,1" size="64"/>
3675 <output>
3676 <port id="0" precision="FP16">
3677 <dim>1</dim>
3678 <dim>32</dim>
3679 <dim>1</dim>
3680 <dim>1</dim>
3681 </port>
3682 </output>
3683 </layer>
3684 <layer id="331" name="bottleneck1_3/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
3685 <data auto_broadcast="numpy"/>
3686 <input>
3687 <port id="0">
3688 <dim>1</dim>
3689 <dim>32</dim>
3690 <dim>160</dim>
3691 <dim>272</dim>
3692 </port>
3693 <port id="1">
3694 <dim>1</dim>
3695 <dim>32</dim>
3696 <dim>1</dim>
3697 <dim>1</dim>
3698 </port>
3699 </input>
3700 <output>
3701 <port id="2" names="bottleneck1_3/dim_inc/conv" precision="FP16">
3702 <dim>1</dim>
3703 <dim>32</dim>
3704 <dim>160</dim>
3705 <dim>272</dim>
3706 </port>
3707 </output>
3708 </layer>
3709 <layer id="332" name="bottleneck1_3/add/fq_input_1" type="FakeQuantize" version="opset1">
3710 <data auto_broadcast="numpy" levels="256"/>
3711 <input>
3712 <port id="0">
3713 <dim>1</dim>
3714 <dim>32</dim>
3715 <dim>160</dim>
3716 <dim>272</dim>
3717 </port>
3718 <port id="1"/>
3719 <port id="2"/>
3720 <port id="3"/>
3721 <port id="4"/>
3722 </input>
3723 <output>
3724 <port id="5" precision="FP16">
3725 <dim>1</dim>
3726 <dim>32</dim>
3727 <dim>160</dim>
3728 <dim>272</dim>
3729 </port>
3730 </output>
3731 </layer>
3732 <layer id="333" name="bottleneck1_3/add" type="Add" version="opset1">
3733 <data auto_broadcast="numpy"/>
3734 <input>
3735 <port id="0">
3736 <dim>1</dim>
3737 <dim>32</dim>
3738 <dim>160</dim>
3739 <dim>272</dim>
3740 </port>
3741 <port id="1">
3742 <dim>1</dim>
3743 <dim>32</dim>
3744 <dim>160</dim>
3745 <dim>272</dim>
3746 </port>
3747 </input>
3748 <output>
3749 <port id="2" names="bottleneck1_3/add" precision="FP16">
3750 <dim>1</dim>
3751 <dim>32</dim>
3752 <dim>160</dim>
3753 <dim>272</dim>
3754 </port>
3755 </output>
3756 </layer>
3757 <layer id="334" name="bottleneck1_3/fn/weights3115639653811" type="Const" version="opset1">
3758 <data element_type="f32" offset="1576" shape="1" size="4"/>
3759 <output>
3760 <port id="0" precision="FP32">
3761 <dim>1</dim>
3762 </port>
3763 </output>
3764 </layer>
3765 <layer id="335" name="bottleneck1_3/fn" type="PReLU" version="opset1">
3766 <input>
3767 <port id="0">
3768 <dim>1</dim>
3769 <dim>32</dim>
3770 <dim>160</dim>
3771 <dim>272</dim>
3772 </port>
3773 <port id="1">
3774 <dim>1</dim>
3775 </port>
3776 </input>
3777 <output>
3778 <port id="2" names="bottleneck1_3/add" precision="FP16">
3779 <dim>1</dim>
3780 <dim>32</dim>
3781 <dim>160</dim>
3782 <dim>272</dim>
3783 </port>
3784 </output>
3785 </layer>
3786 <layer id="336" name="bottleneck1_4/add/fq_input_0" type="FakeQuantize" version="opset1">
3787 <data auto_broadcast="numpy" levels="256"/>
3788 <input>
3789 <port id="0">
3790 <dim>1</dim>
3791 <dim>32</dim>
3792 <dim>160</dim>
3793 <dim>272</dim>
3794 </port>
3795 <port id="1"/>
3796 <port id="2"/>
3797 <port id="3"/>
3798 <port id="4"/>
3799 </input>
3800 <output>
3801 <port id="5" precision="FP16">
3802 <dim>1</dim>
3803 <dim>32</dim>
3804 <dim>160</dim>
3805 <dim>272</dim>
3806 </port>
3807 </output>
3808 </layer>
3809 <layer id="337" name="4014401819608" type="Const" version="opset1">
3810 <data element_type="f16" offset="4012" shape="" size="2"/>
3811 <output>
3812 <port id="0" precision="FP16"/>
3813 </output>
3814 </layer>
3815 <layer id="338" name="4015401921417" type="Const" version="opset1">
3816 <data element_type="f16" offset="4014" shape="" size="2"/>
3817 <output>
3818 <port id="0" precision="FP16"/>
3819 </output>
3820 </layer>
3821 <layer id="339" name="4016402019419" type="Const" version="opset1">
3822 <data element_type="f16" offset="4012" shape="" size="2"/>
3823 <output>
3824 <port id="0" precision="FP16"/>
3825 </output>
3826 </layer>
3827 <layer id="340" name="4017402122053" type="Const" version="opset1">
3828 <data element_type="f16" offset="4014" shape="" size="2"/>
3829 <output>
3830 <port id="0" precision="FP16"/>
3831 </output>
3832 </layer>
3833 <layer id="341" name="5004500820823" type="Const" version="opset1">
3834 <data element_type="f16" offset="4016" shape="" size="2"/>
3835 <output>
3836 <port id="0" precision="FP16"/>
3837 </output>
3838 </layer>
3839 <layer id="342" name="5005500920274" type="Const" version="opset1">
3840 <data element_type="f16" offset="4018" shape="" size="2"/>
3841 <output>
3842 <port id="0" precision="FP16"/>
3843 </output>
3844 </layer>
3845 <layer id="343" name="5006501021309" type="Const" version="opset1">
3846 <data element_type="f16" offset="4016" shape="" size="2"/>
3847 <output>
3848 <port id="0" precision="FP16"/>
3849 </output>
3850 </layer>
3851 <layer id="344" name="5007501121627" type="Const" version="opset1">
3852 <data element_type="f16" offset="4018" shape="" size="2"/>
3853 <output>
3854 <port id="0" precision="FP16"/>
3855 </output>
3856 </layer>
3857 <layer id="345" name="3124312820454" type="Const" version="opset1">
3858 <data element_type="f16" offset="4020" shape="1,8,1,1" size="16"/>
3859 <output>
3860 <port id="0" precision="FP16">
3861 <dim>1</dim>
3862 <dim>8</dim>
3863 <dim>1</dim>
3864 <dim>1</dim>
3865 </port>
3866 </output>
3867 </layer>
3868 <layer id="346" name="3125312921702" type="Const" version="opset1">
3869 <data element_type="f16" offset="4036" shape="1,8,1,1" size="16"/>
3870 <output>
3871 <port id="0" precision="FP16">
3872 <dim>1</dim>
3873 <dim>8</dim>
3874 <dim>1</dim>
3875 <dim>1</dim>
3876 </port>
3877 </output>
3878 </layer>
3879 <layer id="347" name="3126313020085" type="Const" version="opset1">
3880 <data element_type="f16" offset="4020" shape="1,8,1,1" size="16"/>
3881 <output>
3882 <port id="0" precision="FP16">
3883 <dim>1</dim>
3884 <dim>8</dim>
3885 <dim>1</dim>
3886 <dim>1</dim>
3887 </port>
3888 </output>
3889 </layer>
3890 <layer id="348" name="3127313119653" type="Const" version="opset1">
3891 <data element_type="f16" offset="4036" shape="1,8,1,1" size="16"/>
3892 <output>
3893 <port id="0" precision="FP16">
3894 <dim>1</dim>
3895 <dim>8</dim>
3896 <dim>1</dim>
3897 <dim>1</dim>
3898 </port>
3899 </output>
3900 </layer>
3901 <layer id="349" name="bottleneck1_4/dim_red/bn/mean/Fused_Mul__copy81310109/quantized1278420061" type="Const" version="opset1">
3902 <data element_type="i8" offset="4052" shape="8,32,1,1" size="256"/>
3903 <output>
3904 <port id="0" precision="I8">
3905 <dim>8</dim>
3906 <dim>32</dim>
3907 <dim>1</dim>
3908 <dim>1</dim>
3909 </port>
3910 </output>
3911 </layer>
3912 <layer id="350" name="bottleneck1_4/dim_red/bn/mean/Fused_Mul__copy81310109/quantized/to_f16" type="Convert" version="opset1">
3913 <data destination_type="f16"/>
3914 <input>
3915 <port id="0">
3916 <dim>8</dim>
3917 <dim>32</dim>
3918 <dim>1</dim>
3919 <dim>1</dim>
3920 </port>
3921 </input>
3922 <output>
3923 <port id="1" precision="FP16">
3924 <dim>8</dim>
3925 <dim>32</dim>
3926 <dim>1</dim>
3927 <dim>1</dim>
3928 </port>
3929 </output>
3930 </layer>
3931 <layer id="351" name="bottleneck1_4/dim_red/conv/fq_weights_1/zero_point1279722296" type="Const" version="opset1">
3932 <data element_type="f16" offset="4308" shape="8,1,1,1" size="16"/>
3933 <output>
3934 <port id="0" precision="FP16">
3935 <dim>8</dim>
3936 <dim>1</dim>
3937 <dim>1</dim>
3938 <dim>1</dim>
3939 </port>
3940 </output>
3941 </layer>
3942 <layer id="352" name="bottleneck1_4/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
3943 <data auto_broadcast="numpy"/>
3944 <input>
3945 <port id="0">
3946 <dim>8</dim>
3947 <dim>32</dim>
3948 <dim>1</dim>
3949 <dim>1</dim>
3950 </port>
3951 <port id="1">
3952 <dim>8</dim>
3953 <dim>1</dim>
3954 <dim>1</dim>
3955 <dim>1</dim>
3956 </port>
3957 </input>
3958 <output>
3959 <port id="2" precision="FP16">
3960 <dim>8</dim>
3961 <dim>32</dim>
3962 <dim>1</dim>
3963 <dim>1</dim>
3964 </port>
3965 </output>
3966 </layer>
3967 <layer id="353" name="bottleneck1_4/dim_red/conv/fq_weights_1/scale1279221831" type="Const" version="opset1">
3968 <data element_type="f16" offset="4324" shape="8,1,1,1" size="16"/>
3969 <output>
3970 <port id="0" precision="FP16">
3971 <dim>8</dim>
3972 <dim>1</dim>
3973 <dim>1</dim>
3974 <dim>1</dim>
3975 </port>
3976 </output>
3977 </layer>
3978 <layer id="354" name="bottleneck1_4/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
3979 <data auto_broadcast="numpy"/>
3980 <input>
3981 <port id="0">
3982 <dim>8</dim>
3983 <dim>32</dim>
3984 <dim>1</dim>
3985 <dim>1</dim>
3986 </port>
3987 <port id="1">
3988 <dim>8</dim>
3989 <dim>1</dim>
3990 <dim>1</dim>
3991 <dim>1</dim>
3992 </port>
3993 </input>
3994 <output>
3995 <port id="2" precision="FP16">
3996 <dim>8</dim>
3997 <dim>32</dim>
3998 <dim>1</dim>
3999 <dim>1</dim>
4000 </port>
4001 </output>
4002 </layer>
4003 <layer id="355" name="bottleneck1_4/dim_red/conv" type="Convolution" version="opset1">
4004 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
4005 <input>
4006 <port id="0">
4007 <dim>1</dim>
4008 <dim>32</dim>
4009 <dim>160</dim>
4010 <dim>272</dim>
4011 </port>
4012 <port id="1">
4013 <dim>8</dim>
4014 <dim>32</dim>
4015 <dim>1</dim>
4016 <dim>1</dim>
4017 </port>
4018 </input>
4019 <output>
4020 <port id="2" precision="FP16">
4021 <dim>1</dim>
4022 <dim>8</dim>
4023 <dim>160</dim>
4024 <dim>272</dim>
4025 </port>
4026 </output>
4027 </layer>
4028 <layer id="356" name="data_add_237292373481520922" type="Const" version="opset1">
4029 <data element_type="f16" offset="4340" shape="1,8,1,1" size="16"/>
4030 <output>
4031 <port id="0" precision="FP16">
4032 <dim>1</dim>
4033 <dim>8</dim>
4034 <dim>1</dim>
4035 <dim>1</dim>
4036 </port>
4037 </output>
4038 </layer>
4039 <layer id="357" name="bottleneck1_4/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
4040 <data auto_broadcast="numpy"/>
4041 <input>
4042 <port id="0">
4043 <dim>1</dim>
4044 <dim>8</dim>
4045 <dim>160</dim>
4046 <dim>272</dim>
4047 </port>
4048 <port id="1">
4049 <dim>1</dim>
4050 <dim>8</dim>
4051 <dim>1</dim>
4052 <dim>1</dim>
4053 </port>
4054 </input>
4055 <output>
4056 <port id="2" names="bottleneck1_4/dim_red/conv" precision="FP16">
4057 <dim>1</dim>
4058 <dim>8</dim>
4059 <dim>160</dim>
4060 <dim>272</dim>
4061 </port>
4062 </output>
4063 </layer>
4064 <layer id="358" name="bottleneck1_4/dim_red/fn/weights3094840595817" type="Const" version="opset1">
4065 <data element_type="f32" offset="1576" shape="1" size="4"/>
4066 <output>
4067 <port id="0" precision="FP32">
4068 <dim>1</dim>
4069 </port>
4070 </output>
4071 </layer>
4072 <layer id="359" name="bottleneck1_4/dim_red/fn" type="PReLU" version="opset1">
4073 <input>
4074 <port id="0">
4075 <dim>1</dim>
4076 <dim>8</dim>
4077 <dim>160</dim>
4078 <dim>272</dim>
4079 </port>
4080 <port id="1">
4081 <dim>1</dim>
4082 </port>
4083 </input>
4084 <output>
4085 <port id="2" names="bottleneck1_4/dim_red/conv" precision="FP16">
4086 <dim>1</dim>
4087 <dim>8</dim>
4088 <dim>160</dim>
4089 <dim>272</dim>
4090 </port>
4091 </output>
4092 </layer>
4093 <layer id="360" name="bottleneck1_4/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
4094 <data auto_broadcast="numpy" levels="256"/>
4095 <input>
4096 <port id="0">
4097 <dim>1</dim>
4098 <dim>8</dim>
4099 <dim>160</dim>
4100 <dim>272</dim>
4101 </port>
4102 <port id="1">
4103 <dim>1</dim>
4104 <dim>8</dim>
4105 <dim>1</dim>
4106 <dim>1</dim>
4107 </port>
4108 <port id="2">
4109 <dim>1</dim>
4110 <dim>8</dim>
4111 <dim>1</dim>
4112 <dim>1</dim>
4113 </port>
4114 <port id="3">
4115 <dim>1</dim>
4116 <dim>8</dim>
4117 <dim>1</dim>
4118 <dim>1</dim>
4119 </port>
4120 <port id="4">
4121 <dim>1</dim>
4122 <dim>8</dim>
4123 <dim>1</dim>
4124 <dim>1</dim>
4125 </port>
4126 </input>
4127 <output>
4128 <port id="5" precision="FP16">
4129 <dim>1</dim>
4130 <dim>8</dim>
4131 <dim>160</dim>
4132 <dim>272</dim>
4133 </port>
4134 </output>
4135 </layer>
4136 <layer id="361" name="16815/value1681722659" type="Const" version="opset1">
4137 <data element_type="i64" offset="1580" shape="5" size="40"/>
4138 <output>
4139 <port id="0" precision="I64">
4140 <dim>5</dim>
4141 </port>
4142 </output>
4143 </layer>
4144 <layer id="362" name="bottleneck1_4/inner/dw1/bn/mean/Fused_Mul__copy81910112/quantized1350421549" type="Const" version="opset1">
4145 <data element_type="i8" offset="4356" shape="8,1,3,3" size="72"/>
4146 <output>
4147 <port id="0" precision="I8">
4148 <dim>8</dim>
4149 <dim>1</dim>
4150 <dim>3</dim>
4151 <dim>3</dim>
4152 </port>
4153 </output>
4154 </layer>
4155 <layer id="363" name="bottleneck1_4/inner/dw1/bn/mean/Fused_Mul__copy81910112/quantized/to_f16" type="Convert" version="opset1">
4156 <data destination_type="f16"/>
4157 <input>
4158 <port id="0">
4159 <dim>8</dim>
4160 <dim>1</dim>
4161 <dim>3</dim>
4162 <dim>3</dim>
4163 </port>
4164 </input>
4165 <output>
4166 <port id="1" precision="FP16">
4167 <dim>8</dim>
4168 <dim>1</dim>
4169 <dim>3</dim>
4170 <dim>3</dim>
4171 </port>
4172 </output>
4173 </layer>
4174 <layer id="364" name="bottleneck1_4/inner/dw1/conv/fq_weights_1/zero_point1351721285" type="Const" version="opset1">
4175 <data element_type="f16" offset="4428" shape="8,1,1,1" size="16"/>
4176 <output>
4177 <port id="0" precision="FP16">
4178 <dim>8</dim>
4179 <dim>1</dim>
4180 <dim>1</dim>
4181 <dim>1</dim>
4182 </port>
4183 </output>
4184 </layer>
4185 <layer id="365" name="bottleneck1_4/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
4186 <data auto_broadcast="numpy"/>
4187 <input>
4188 <port id="0">
4189 <dim>8</dim>
4190 <dim>1</dim>
4191 <dim>3</dim>
4192 <dim>3</dim>
4193 </port>
4194 <port id="1">
4195 <dim>8</dim>
4196 <dim>1</dim>
4197 <dim>1</dim>
4198 <dim>1</dim>
4199 </port>
4200 </input>
4201 <output>
4202 <port id="2" precision="FP16">
4203 <dim>8</dim>
4204 <dim>1</dim>
4205 <dim>3</dim>
4206 <dim>3</dim>
4207 </port>
4208 </output>
4209 </layer>
4210 <layer id="366" name="bottleneck1_4/inner/dw1/conv/fq_weights_1/scale1351221033" type="Const" version="opset1">
4211 <data element_type="f16" offset="4444" shape="8,1,1,1" size="16"/>
4212 <output>
4213 <port id="0" precision="FP16">
4214 <dim>8</dim>
4215 <dim>1</dim>
4216 <dim>1</dim>
4217 <dim>1</dim>
4218 </port>
4219 </output>
4220 </layer>
4221 <layer id="367" name="bottleneck1_4/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
4222 <data auto_broadcast="numpy"/>
4223 <input>
4224 <port id="0">
4225 <dim>8</dim>
4226 <dim>1</dim>
4227 <dim>3</dim>
4228 <dim>3</dim>
4229 </port>
4230 <port id="1">
4231 <dim>8</dim>
4232 <dim>1</dim>
4233 <dim>1</dim>
4234 <dim>1</dim>
4235 </port>
4236 </input>
4237 <output>
4238 <port id="2" precision="FP16">
4239 <dim>8</dim>
4240 <dim>1</dim>
4241 <dim>3</dim>
4242 <dim>3</dim>
4243 </port>
4244 </output>
4245 </layer>
4246 <layer id="368" name="16815" type="Reshape" version="opset1">
4247 <data special_zero="true"/>
4248 <input>
4249 <port id="0">
4250 <dim>8</dim>
4251 <dim>1</dim>
4252 <dim>3</dim>
4253 <dim>3</dim>
4254 </port>
4255 <port id="1">
4256 <dim>5</dim>
4257 </port>
4258 </input>
4259 <output>
4260 <port id="2" precision="FP16">
4261 <dim>8</dim>
4262 <dim>1</dim>
4263 <dim>1</dim>
4264 <dim>3</dim>
4265 <dim>3</dim>
4266 </port>
4267 </output>
4268 </layer>
4269 <layer id="369" name="bottleneck1_4/inner/dw1/conv" type="GroupConvolution" version="opset1">
4270 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
4271 <input>
4272 <port id="0">
4273 <dim>1</dim>
4274 <dim>8</dim>
4275 <dim>160</dim>
4276 <dim>272</dim>
4277 </port>
4278 <port id="1">
4279 <dim>8</dim>
4280 <dim>1</dim>
4281 <dim>1</dim>
4282 <dim>3</dim>
4283 <dim>3</dim>
4284 </port>
4285 </input>
4286 <output>
4287 <port id="2" precision="FP16">
4288 <dim>1</dim>
4289 <dim>8</dim>
4290 <dim>160</dim>
4291 <dim>272</dim>
4292 </port>
4293 </output>
4294 </layer>
4295 <layer id="370" name="data_add_237372374282122335" type="Const" version="opset1">
4296 <data element_type="f16" offset="4460" shape="1,8,1,1" size="16"/>
4297 <output>
4298 <port id="0" precision="FP16">
4299 <dim>1</dim>
4300 <dim>8</dim>
4301 <dim>1</dim>
4302 <dim>1</dim>
4303 </port>
4304 </output>
4305 </layer>
4306 <layer id="371" name="bottleneck1_4/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
4307 <data auto_broadcast="numpy"/>
4308 <input>
4309 <port id="0">
4310 <dim>1</dim>
4311 <dim>8</dim>
4312 <dim>160</dim>
4313 <dim>272</dim>
4314 </port>
4315 <port id="1">
4316 <dim>1</dim>
4317 <dim>8</dim>
4318 <dim>1</dim>
4319 <dim>1</dim>
4320 </port>
4321 </input>
4322 <output>
4323 <port id="2" names="bottleneck1_4/inner/dw1/conv" precision="FP16">
4324 <dim>1</dim>
4325 <dim>8</dim>
4326 <dim>160</dim>
4327 <dim>272</dim>
4328 </port>
4329 </output>
4330 </layer>
4331 <layer id="372" name="bottleneck1_4/inner/dw1/fn/weights3104439704823" type="Const" version="opset1">
4332 <data element_type="f32" offset="1576" shape="1" size="4"/>
4333 <output>
4334 <port id="0" precision="FP32">
4335 <dim>1</dim>
4336 </port>
4337 </output>
4338 </layer>
4339 <layer id="373" name="bottleneck1_4/inner/dw1/fn" type="PReLU" version="opset1">
4340 <input>
4341 <port id="0">
4342 <dim>1</dim>
4343 <dim>8</dim>
4344 <dim>160</dim>
4345 <dim>272</dim>
4346 </port>
4347 <port id="1">
4348 <dim>1</dim>
4349 </port>
4350 </input>
4351 <output>
4352 <port id="2" names="bottleneck1_4/inner/dw1/conv" precision="FP16">
4353 <dim>1</dim>
4354 <dim>8</dim>
4355 <dim>160</dim>
4356 <dim>272</dim>
4357 </port>
4358 </output>
4359 </layer>
4360 <layer id="374" name="bottleneck1_4/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
4361 <data auto_broadcast="numpy" levels="256"/>
4362 <input>
4363 <port id="0">
4364 <dim>1</dim>
4365 <dim>8</dim>
4366 <dim>160</dim>
4367 <dim>272</dim>
4368 </port>
4369 <port id="1"/>
4370 <port id="2"/>
4371 <port id="3"/>
4372 <port id="4"/>
4373 </input>
4374 <output>
4375 <port id="5" precision="FP16">
4376 <dim>1</dim>
4377 <dim>8</dim>
4378 <dim>160</dim>
4379 <dim>272</dim>
4380 </port>
4381 </output>
4382 </layer>
4383 <layer id="375" name="bottleneck1_4/dim_inc/bn/mean/Fused_Mul__copy82510115/quantized1232820436" type="Const" version="opset1">
4384 <data element_type="i8" offset="4476" shape="32,8,1,1" size="256"/>
4385 <output>
4386 <port id="0" precision="I8">
4387 <dim>32</dim>
4388 <dim>8</dim>
4389 <dim>1</dim>
4390 <dim>1</dim>
4391 </port>
4392 </output>
4393 </layer>
4394 <layer id="376" name="bottleneck1_4/dim_inc/bn/mean/Fused_Mul__copy82510115/quantized/to_f16" type="Convert" version="opset1">
4395 <data destination_type="f16"/>
4396 <input>
4397 <port id="0">
4398 <dim>32</dim>
4399 <dim>8</dim>
4400 <dim>1</dim>
4401 <dim>1</dim>
4402 </port>
4403 </input>
4404 <output>
4405 <port id="1" precision="FP16">
4406 <dim>32</dim>
4407 <dim>8</dim>
4408 <dim>1</dim>
4409 <dim>1</dim>
4410 </port>
4411 </output>
4412 </layer>
4413 <layer id="377" name="bottleneck1_4/dim_inc/conv/fq_weights_1/zero_point1234119761" type="Const" version="opset1">
4414 <data element_type="f16" offset="4732" shape="32,1,1,1" size="64"/>
4415 <output>
4416 <port id="0" precision="FP16">
4417 <dim>32</dim>
4418 <dim>1</dim>
4419 <dim>1</dim>
4420 <dim>1</dim>
4421 </port>
4422 </output>
4423 </layer>
4424 <layer id="378" name="bottleneck1_4/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
4425 <data auto_broadcast="numpy"/>
4426 <input>
4427 <port id="0">
4428 <dim>32</dim>
4429 <dim>8</dim>
4430 <dim>1</dim>
4431 <dim>1</dim>
4432 </port>
4433 <port id="1">
4434 <dim>32</dim>
4435 <dim>1</dim>
4436 <dim>1</dim>
4437 <dim>1</dim>
4438 </port>
4439 </input>
4440 <output>
4441 <port id="2" precision="FP16">
4442 <dim>32</dim>
4443 <dim>8</dim>
4444 <dim>1</dim>
4445 <dim>1</dim>
4446 </port>
4447 </output>
4448 </layer>
4449 <layer id="379" name="bottleneck1_4/dim_inc/conv/fq_weights_1/scale1233622032" type="Const" version="opset1">
4450 <data element_type="f16" offset="4796" shape="32,1,1,1" size="64"/>
4451 <output>
4452 <port id="0" precision="FP16">
4453 <dim>32</dim>
4454 <dim>1</dim>
4455 <dim>1</dim>
4456 <dim>1</dim>
4457 </port>
4458 </output>
4459 </layer>
4460 <layer id="380" name="bottleneck1_4/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
4461 <data auto_broadcast="numpy"/>
4462 <input>
4463 <port id="0">
4464 <dim>32</dim>
4465 <dim>8</dim>
4466 <dim>1</dim>
4467 <dim>1</dim>
4468 </port>
4469 <port id="1">
4470 <dim>32</dim>
4471 <dim>1</dim>
4472 <dim>1</dim>
4473 <dim>1</dim>
4474 </port>
4475 </input>
4476 <output>
4477 <port id="2" precision="FP16">
4478 <dim>32</dim>
4479 <dim>8</dim>
4480 <dim>1</dim>
4481 <dim>1</dim>
4482 </port>
4483 </output>
4484 </layer>
4485 <layer id="381" name="bottleneck1_4/dim_inc/conv" type="Convolution" version="opset1">
4486 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
4487 <input>
4488 <port id="0">
4489 <dim>1</dim>
4490 <dim>8</dim>
4491 <dim>160</dim>
4492 <dim>272</dim>
4493 </port>
4494 <port id="1">
4495 <dim>32</dim>
4496 <dim>8</dim>
4497 <dim>1</dim>
4498 <dim>1</dim>
4499 </port>
4500 </input>
4501 <output>
4502 <port id="2" precision="FP16">
4503 <dim>1</dim>
4504 <dim>32</dim>
4505 <dim>160</dim>
4506 <dim>272</dim>
4507 </port>
4508 </output>
4509 </layer>
4510 <layer id="382" name="data_add_237452375082720163" type="Const" version="opset1">
4511 <data element_type="f16" offset="4860" shape="1,32,1,1" size="64"/>
4512 <output>
4513 <port id="0" precision="FP16">
4514 <dim>1</dim>
4515 <dim>32</dim>
4516 <dim>1</dim>
4517 <dim>1</dim>
4518 </port>
4519 </output>
4520 </layer>
4521 <layer id="383" name="bottleneck1_4/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
4522 <data auto_broadcast="numpy"/>
4523 <input>
4524 <port id="0">
4525 <dim>1</dim>
4526 <dim>32</dim>
4527 <dim>160</dim>
4528 <dim>272</dim>
4529 </port>
4530 <port id="1">
4531 <dim>1</dim>
4532 <dim>32</dim>
4533 <dim>1</dim>
4534 <dim>1</dim>
4535 </port>
4536 </input>
4537 <output>
4538 <port id="2" names="bottleneck1_4/dim_inc/conv" precision="FP16">
4539 <dim>1</dim>
4540 <dim>32</dim>
4541 <dim>160</dim>
4542 <dim>272</dim>
4543 </port>
4544 </output>
4545 </layer>
4546 <layer id="384" name="bottleneck1_4/add/fq_input_1" type="FakeQuantize" version="opset1">
4547 <data auto_broadcast="numpy" levels="256"/>
4548 <input>
4549 <port id="0">
4550 <dim>1</dim>
4551 <dim>32</dim>
4552 <dim>160</dim>
4553 <dim>272</dim>
4554 </port>
4555 <port id="1"/>
4556 <port id="2"/>
4557 <port id="3"/>
4558 <port id="4"/>
4559 </input>
4560 <output>
4561 <port id="5" precision="FP16">
4562 <dim>1</dim>
4563 <dim>32</dim>
4564 <dim>160</dim>
4565 <dim>272</dim>
4566 </port>
4567 </output>
4568 </layer>
4569 <layer id="385" name="bottleneck1_4/add" type="Add" version="opset1">
4570 <data auto_broadcast="numpy"/>
4571 <input>
4572 <port id="0">
4573 <dim>1</dim>
4574 <dim>32</dim>
4575 <dim>160</dim>
4576 <dim>272</dim>
4577 </port>
4578 <port id="1">
4579 <dim>1</dim>
4580 <dim>32</dim>
4581 <dim>160</dim>
4582 <dim>272</dim>
4583 </port>
4584 </input>
4585 <output>
4586 <port id="2" names="bottleneck1_4/add" precision="FP16">
4587 <dim>1</dim>
4588 <dim>32</dim>
4589 <dim>160</dim>
4590 <dim>272</dim>
4591 </port>
4592 </output>
4593 </layer>
4594 <layer id="386" name="bottleneck1_4/fn/weights3103640535830" type="Const" version="opset1">
4595 <data element_type="f32" offset="1576" shape="1" size="4"/>
4596 <output>
4597 <port id="0" precision="FP32">
4598 <dim>1</dim>
4599 </port>
4600 </output>
4601 </layer>
4602 <layer id="387" name="bottleneck1_4/fn" type="PReLU" version="opset1">
4603 <input>
4604 <port id="0">
4605 <dim>1</dim>
4606 <dim>32</dim>
4607 <dim>160</dim>
4608 <dim>272</dim>
4609 </port>
4610 <port id="1">
4611 <dim>1</dim>
4612 </port>
4613 </input>
4614 <output>
4615 <port id="2" names="bottleneck1_4/add" precision="FP16">
4616 <dim>1</dim>
4617 <dim>32</dim>
4618 <dim>160</dim>
4619 <dim>272</dim>
4620 </port>
4621 </output>
4622 </layer>
4623 <layer id="388" name="bottleneck2_0/dim_red/conv/fq_input_0" type="FakeQuantize" version="opset1">
4624 <data auto_broadcast="numpy" levels="256"/>
4625 <input>
4626 <port id="0">
4627 <dim>1</dim>
4628 <dim>32</dim>
4629 <dim>160</dim>
4630 <dim>272</dim>
4631 </port>
4632 <port id="1"/>
4633 <port id="2"/>
4634 <port id="3"/>
4635 <port id="4"/>
4636 </input>
4637 <output>
4638 <port id="5" precision="FP16">
4639 <dim>1</dim>
4640 <dim>32</dim>
4641 <dim>160</dim>
4642 <dim>272</dim>
4643 </port>
4644 </output>
4645 </layer>
4646 <layer id="389" name="bottleneck2_0/skip/pooling" type="MaxPool" version="opset1">
4647 <data auto_pad="explicit" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="ceil" strides="2,2"/>
4648 <input>
4649 <port id="0">
4650 <dim>1</dim>
4651 <dim>32</dim>
4652 <dim>160</dim>
4653 <dim>272</dim>
4654 </port>
4655 </input>
4656 <output>
4657 <port id="1" names="bottleneck2_0/skip/pooling" precision="FP16">
4658 <dim>1</dim>
4659 <dim>32</dim>
4660 <dim>80</dim>
4661 <dim>136</dim>
4662 </port>
4663 </output>
4664 </layer>
4665 <layer id="390" name="bottleneck2_0/skip/bn/mean/Fused_Mul__copy83310118/quantized1230422248" type="Const" version="opset1">
4666 <data element_type="i8" offset="4924" shape="64,32,1,1" size="2048"/>
4667 <output>
4668 <port id="0" precision="I8">
4669 <dim>64</dim>
4670 <dim>32</dim>
4671 <dim>1</dim>
4672 <dim>1</dim>
4673 </port>
4674 </output>
4675 </layer>
4676 <layer id="391" name="bottleneck2_0/skip/bn/mean/Fused_Mul__copy83310118/quantized/to_f16" type="Convert" version="opset1">
4677 <data destination_type="f16"/>
4678 <input>
4679 <port id="0">
4680 <dim>64</dim>
4681 <dim>32</dim>
4682 <dim>1</dim>
4683 <dim>1</dim>
4684 </port>
4685 </input>
4686 <output>
4687 <port id="1" precision="FP16">
4688 <dim>64</dim>
4689 <dim>32</dim>
4690 <dim>1</dim>
4691 <dim>1</dim>
4692 </port>
4693 </output>
4694 </layer>
4695 <layer id="392" name="bottleneck2_0/skip/conv/fq_weights_1/zero_point1231721753" type="Const" version="opset1">
4696 <data element_type="f16" offset="6972" shape="64,1,1,1" size="128"/>
4697 <output>
4698 <port id="0" precision="FP16">
4699 <dim>64</dim>
4700 <dim>1</dim>
4701 <dim>1</dim>
4702 <dim>1</dim>
4703 </port>
4704 </output>
4705 </layer>
4706 <layer id="393" name="bottleneck2_0/skip/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
4707 <data auto_broadcast="numpy"/>
4708 <input>
4709 <port id="0">
4710 <dim>64</dim>
4711 <dim>32</dim>
4712 <dim>1</dim>
4713 <dim>1</dim>
4714 </port>
4715 <port id="1">
4716 <dim>64</dim>
4717 <dim>1</dim>
4718 <dim>1</dim>
4719 <dim>1</dim>
4720 </port>
4721 </input>
4722 <output>
4723 <port id="2" precision="FP16">
4724 <dim>64</dim>
4725 <dim>32</dim>
4726 <dim>1</dim>
4727 <dim>1</dim>
4728 </port>
4729 </output>
4730 </layer>
4731 <layer id="394" name="bottleneck2_0/skip/conv/fq_weights_1/scale1231221261" type="Const" version="opset1">
4732 <data element_type="f16" offset="7100" shape="64,1,1,1" size="128"/>
4733 <output>
4734 <port id="0" precision="FP16">
4735 <dim>64</dim>
4736 <dim>1</dim>
4737 <dim>1</dim>
4738 <dim>1</dim>
4739 </port>
4740 </output>
4741 </layer>
4742 <layer id="395" name="bottleneck2_0/skip/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
4743 <data auto_broadcast="numpy"/>
4744 <input>
4745 <port id="0">
4746 <dim>64</dim>
4747 <dim>32</dim>
4748 <dim>1</dim>
4749 <dim>1</dim>
4750 </port>
4751 <port id="1">
4752 <dim>64</dim>
4753 <dim>1</dim>
4754 <dim>1</dim>
4755 <dim>1</dim>
4756 </port>
4757 </input>
4758 <output>
4759 <port id="2" precision="FP16">
4760 <dim>64</dim>
4761 <dim>32</dim>
4762 <dim>1</dim>
4763 <dim>1</dim>
4764 </port>
4765 </output>
4766 </layer>
4767 <layer id="396" name="bottleneck2_0/skip/conv" type="Convolution" version="opset1">
4768 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
4769 <input>
4770 <port id="0">
4771 <dim>1</dim>
4772 <dim>32</dim>
4773 <dim>80</dim>
4774 <dim>136</dim>
4775 </port>
4776 <port id="1">
4777 <dim>64</dim>
4778 <dim>32</dim>
4779 <dim>1</dim>
4780 <dim>1</dim>
4781 </port>
4782 </input>
4783 <output>
4784 <port id="2" precision="FP16">
4785 <dim>1</dim>
4786 <dim>64</dim>
4787 <dim>80</dim>
4788 <dim>136</dim>
4789 </port>
4790 </output>
4791 </layer>
4792 <layer id="397" name="data_add_237532375883522425" type="Const" version="opset1">
4793 <data element_type="f16" offset="7228" shape="1,64,1,1" size="128"/>
4794 <output>
4795 <port id="0" precision="FP16">
4796 <dim>1</dim>
4797 <dim>64</dim>
4798 <dim>1</dim>
4799 <dim>1</dim>
4800 </port>
4801 </output>
4802 </layer>
4803 <layer id="398" name="bottleneck2_0/skip/bn/variance/Fused_Add_" type="Add" version="opset1">
4804 <data auto_broadcast="numpy"/>
4805 <input>
4806 <port id="0">
4807 <dim>1</dim>
4808 <dim>64</dim>
4809 <dim>80</dim>
4810 <dim>136</dim>
4811 </port>
4812 <port id="1">
4813 <dim>1</dim>
4814 <dim>64</dim>
4815 <dim>1</dim>
4816 <dim>1</dim>
4817 </port>
4818 </input>
4819 <output>
4820 <port id="2" names="bottleneck2_0/skip/conv" precision="FP16">
4821 <dim>1</dim>
4822 <dim>64</dim>
4823 <dim>80</dim>
4824 <dim>136</dim>
4825 </port>
4826 </output>
4827 </layer>
4828 <layer id="399" name="bottleneck2_0/add/fq_input_0" type="FakeQuantize" version="opset1">
4829 <data auto_broadcast="numpy" levels="256"/>
4830 <input>
4831 <port id="0">
4832 <dim>1</dim>
4833 <dim>64</dim>
4834 <dim>80</dim>
4835 <dim>136</dim>
4836 </port>
4837 <port id="1"/>
4838 <port id="2"/>
4839 <port id="3"/>
4840 <port id="4"/>
4841 </input>
4842 <output>
4843 <port id="5" precision="FP16">
4844 <dim>1</dim>
4845 <dim>64</dim>
4846 <dim>80</dim>
4847 <dim>136</dim>
4848 </port>
4849 </output>
4850 </layer>
4851 <layer id="400" name="2734273822632" type="Const" version="opset1">
4852 <data element_type="f16" offset="7356" shape="" size="2"/>
4853 <output>
4854 <port id="0" precision="FP16"/>
4855 </output>
4856 </layer>
4857 <layer id="401" name="2735273921387" type="Const" version="opset1">
4858 <data element_type="f16" offset="7358" shape="" size="2"/>
4859 <output>
4860 <port id="0" precision="FP16"/>
4861 </output>
4862 </layer>
4863 <layer id="402" name="2736274020394" type="Const" version="opset1">
4864 <data element_type="f16" offset="7356" shape="" size="2"/>
4865 <output>
4866 <port id="0" precision="FP16"/>
4867 </output>
4868 </layer>
4869 <layer id="403" name="2737274121651" type="Const" version="opset1">
4870 <data element_type="f16" offset="7358" shape="" size="2"/>
4871 <output>
4872 <port id="0" precision="FP16"/>
4873 </output>
4874 </layer>
4875 <layer id="404" name="5244524821498" type="Const" version="opset1">
4876 <data element_type="f16" offset="7360" shape="" size="2"/>
4877 <output>
4878 <port id="0" precision="FP16"/>
4879 </output>
4880 </layer>
4881 <layer id="405" name="5245524919710" type="Const" version="opset1">
4882 <data element_type="f16" offset="7362" shape="" size="2"/>
4883 <output>
4884 <port id="0" precision="FP16"/>
4885 </output>
4886 </layer>
4887 <layer id="406" name="5246525019878" type="Const" version="opset1">
4888 <data element_type="f16" offset="7360" shape="" size="2"/>
4889 <output>
4890 <port id="0" precision="FP16"/>
4891 </output>
4892 </layer>
4893 <layer id="407" name="5247525121837" type="Const" version="opset1">
4894 <data element_type="f16" offset="7362" shape="" size="2"/>
4895 <output>
4896 <port id="0" precision="FP16"/>
4897 </output>
4898 </layer>
4899 <layer id="408" name="2564256820559" type="Const" version="opset1">
4900 <data element_type="f16" offset="7364" shape="1,16,1,1" size="32"/>
4901 <output>
4902 <port id="0" precision="FP16">
4903 <dim>1</dim>
4904 <dim>16</dim>
4905 <dim>1</dim>
4906 <dim>1</dim>
4907 </port>
4908 </output>
4909 </layer>
4910 <layer id="409" name="2565256922602" type="Const" version="opset1">
4911 <data element_type="f16" offset="7396" shape="1,16,1,1" size="32"/>
4912 <output>
4913 <port id="0" precision="FP16">
4914 <dim>1</dim>
4915 <dim>16</dim>
4916 <dim>1</dim>
4917 <dim>1</dim>
4918 </port>
4919 </output>
4920 </layer>
4921 <layer id="410" name="2566257020511" type="Const" version="opset1">
4922 <data element_type="f16" offset="7364" shape="1,16,1,1" size="32"/>
4923 <output>
4924 <port id="0" precision="FP16">
4925 <dim>1</dim>
4926 <dim>16</dim>
4927 <dim>1</dim>
4928 <dim>1</dim>
4929 </port>
4930 </output>
4931 </layer>
4932 <layer id="411" name="2567257122908" type="Const" version="opset1">
4933 <data element_type="f16" offset="7396" shape="1,16,1,1" size="32"/>
4934 <output>
4935 <port id="0" precision="FP16">
4936 <dim>1</dim>
4937 <dim>16</dim>
4938 <dim>1</dim>
4939 <dim>1</dim>
4940 </port>
4941 </output>
4942 </layer>
4943 <layer id="412" name="bottleneck2_0/dim_red/bn/mean/Fused_Mul__copy83710120/quantized1213621963" type="Const" version="opset1">
4944 <data element_type="i8" offset="7428" shape="16,32,1,1" size="512"/>
4945 <output>
4946 <port id="0" precision="I8">
4947 <dim>16</dim>
4948 <dim>32</dim>
4949 <dim>1</dim>
4950 <dim>1</dim>
4951 </port>
4952 </output>
4953 </layer>
4954 <layer id="413" name="bottleneck2_0/dim_red/bn/mean/Fused_Mul__copy83710120/quantized/to_f16" type="Convert" version="opset1">
4955 <data destination_type="f16"/>
4956 <input>
4957 <port id="0">
4958 <dim>16</dim>
4959 <dim>32</dim>
4960 <dim>1</dim>
4961 <dim>1</dim>
4962 </port>
4963 </input>
4964 <output>
4965 <port id="1" precision="FP16">
4966 <dim>16</dim>
4967 <dim>32</dim>
4968 <dim>1</dim>
4969 <dim>1</dim>
4970 </port>
4971 </output>
4972 </layer>
4973 <layer id="414" name="bottleneck2_0/dim_red/conv/fq_weights_1/zero_point1214921390" type="Const" version="opset1">
4974 <data element_type="f16" offset="7940" shape="16,1,1,1" size="32"/>
4975 <output>
4976 <port id="0" precision="FP16">
4977 <dim>16</dim>
4978 <dim>1</dim>
4979 <dim>1</dim>
4980 <dim>1</dim>
4981 </port>
4982 </output>
4983 </layer>
4984 <layer id="415" name="bottleneck2_0/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
4985 <data auto_broadcast="numpy"/>
4986 <input>
4987 <port id="0">
4988 <dim>16</dim>
4989 <dim>32</dim>
4990 <dim>1</dim>
4991 <dim>1</dim>
4992 </port>
4993 <port id="1">
4994 <dim>16</dim>
4995 <dim>1</dim>
4996 <dim>1</dim>
4997 <dim>1</dim>
4998 </port>
4999 </input>
5000 <output>
5001 <port id="2" precision="FP16">
5002 <dim>16</dim>
5003 <dim>32</dim>
5004 <dim>1</dim>
5005 <dim>1</dim>
5006 </port>
5007 </output>
5008 </layer>
5009 <layer id="416" name="bottleneck2_0/dim_red/conv/fq_weights_1/scale1214419473" type="Const" version="opset1">
5010 <data element_type="f16" offset="7972" shape="16,1,1,1" size="32"/>
5011 <output>
5012 <port id="0" precision="FP16">
5013 <dim>16</dim>
5014 <dim>1</dim>
5015 <dim>1</dim>
5016 <dim>1</dim>
5017 </port>
5018 </output>
5019 </layer>
5020 <layer id="417" name="bottleneck2_0/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
5021 <data auto_broadcast="numpy"/>
5022 <input>
5023 <port id="0">
5024 <dim>16</dim>
5025 <dim>32</dim>
5026 <dim>1</dim>
5027 <dim>1</dim>
5028 </port>
5029 <port id="1">
5030 <dim>16</dim>
5031 <dim>1</dim>
5032 <dim>1</dim>
5033 <dim>1</dim>
5034 </port>
5035 </input>
5036 <output>
5037 <port id="2" precision="FP16">
5038 <dim>16</dim>
5039 <dim>32</dim>
5040 <dim>1</dim>
5041 <dim>1</dim>
5042 </port>
5043 </output>
5044 </layer>
5045 <layer id="418" name="bottleneck2_0/dim_red/conv" type="Convolution" version="opset1">
5046 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
5047 <input>
5048 <port id="0">
5049 <dim>1</dim>
5050 <dim>32</dim>
5051 <dim>160</dim>
5052 <dim>272</dim>
5053 </port>
5054 <port id="1">
5055 <dim>16</dim>
5056 <dim>32</dim>
5057 <dim>1</dim>
5058 <dim>1</dim>
5059 </port>
5060 </input>
5061 <output>
5062 <port id="2" precision="FP16">
5063 <dim>1</dim>
5064 <dim>16</dim>
5065 <dim>160</dim>
5066 <dim>272</dim>
5067 </port>
5068 </output>
5069 </layer>
5070 <layer id="419" name="data_add_237612376683920037" type="Const" version="opset1">
5071 <data element_type="f16" offset="8004" shape="1,16,1,1" size="32"/>
5072 <output>
5073 <port id="0" precision="FP16">
5074 <dim>1</dim>
5075 <dim>16</dim>
5076 <dim>1</dim>
5077 <dim>1</dim>
5078 </port>
5079 </output>
5080 </layer>
5081 <layer id="420" name="bottleneck2_0/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
5082 <data auto_broadcast="numpy"/>
5083 <input>
5084 <port id="0">
5085 <dim>1</dim>
5086 <dim>16</dim>
5087 <dim>160</dim>
5088 <dim>272</dim>
5089 </port>
5090 <port id="1">
5091 <dim>1</dim>
5092 <dim>16</dim>
5093 <dim>1</dim>
5094 <dim>1</dim>
5095 </port>
5096 </input>
5097 <output>
5098 <port id="2" names="bottleneck2_0/dim_red/conv" precision="FP16">
5099 <dim>1</dim>
5100 <dim>16</dim>
5101 <dim>160</dim>
5102 <dim>272</dim>
5103 </port>
5104 </output>
5105 </layer>
5106 <layer id="421" name="bottleneck2_0/dim_red/fn/weights3109240148841" type="Const" version="opset1">
5107 <data element_type="f32" offset="1576" shape="1" size="4"/>
5108 <output>
5109 <port id="0" precision="FP32">
5110 <dim>1</dim>
5111 </port>
5112 </output>
5113 </layer>
5114 <layer id="422" name="bottleneck2_0/dim_red/fn" type="PReLU" version="opset1">
5115 <input>
5116 <port id="0">
5117 <dim>1</dim>
5118 <dim>16</dim>
5119 <dim>160</dim>
5120 <dim>272</dim>
5121 </port>
5122 <port id="1">
5123 <dim>1</dim>
5124 </port>
5125 </input>
5126 <output>
5127 <port id="2" names="bottleneck2_0/dim_red/conv" precision="FP16">
5128 <dim>1</dim>
5129 <dim>16</dim>
5130 <dim>160</dim>
5131 <dim>272</dim>
5132 </port>
5133 </output>
5134 </layer>
5135 <layer id="423" name="bottleneck2_0/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
5136 <data auto_broadcast="numpy" levels="256"/>
5137 <input>
5138 <port id="0">
5139 <dim>1</dim>
5140 <dim>16</dim>
5141 <dim>160</dim>
5142 <dim>272</dim>
5143 </port>
5144 <port id="1">
5145 <dim>1</dim>
5146 <dim>16</dim>
5147 <dim>1</dim>
5148 <dim>1</dim>
5149 </port>
5150 <port id="2">
5151 <dim>1</dim>
5152 <dim>16</dim>
5153 <dim>1</dim>
5154 <dim>1</dim>
5155 </port>
5156 <port id="3">
5157 <dim>1</dim>
5158 <dim>16</dim>
5159 <dim>1</dim>
5160 <dim>1</dim>
5161 </port>
5162 <port id="4">
5163 <dim>1</dim>
5164 <dim>16</dim>
5165 <dim>1</dim>
5166 <dim>1</dim>
5167 </port>
5168 </input>
5169 <output>
5170 <port id="5" precision="FP16">
5171 <dim>1</dim>
5172 <dim>16</dim>
5173 <dim>160</dim>
5174 <dim>272</dim>
5175 </port>
5176 </output>
5177 </layer>
5178 <layer id="424" name="16799/value1680122737" type="Const" version="opset1">
5179 <data element_type="i64" offset="8036" shape="5" size="40"/>
5180 <output>
5181 <port id="0" precision="I64">
5182 <dim>5</dim>
5183 </port>
5184 </output>
5185 </layer>
5186 <layer id="425" name="bottleneck2_0/inner/dw1/bn/mean/Fused_Mul__copy84310123/quantized1314419509" type="Const" version="opset1">
5187 <data element_type="i8" offset="8076" shape="16,1,3,3" size="144"/>
5188 <output>
5189 <port id="0" precision="I8">
5190 <dim>16</dim>
5191 <dim>1</dim>
5192 <dim>3</dim>
5193 <dim>3</dim>
5194 </port>
5195 </output>
5196 </layer>
5197 <layer id="426" name="bottleneck2_0/inner/dw1/bn/mean/Fused_Mul__copy84310123/quantized/to_f16" type="Convert" version="opset1">
5198 <data destination_type="f16"/>
5199 <input>
5200 <port id="0">
5201 <dim>16</dim>
5202 <dim>1</dim>
5203 <dim>3</dim>
5204 <dim>3</dim>
5205 </port>
5206 </input>
5207 <output>
5208 <port id="1" precision="FP16">
5209 <dim>16</dim>
5210 <dim>1</dim>
5211 <dim>3</dim>
5212 <dim>3</dim>
5213 </port>
5214 </output>
5215 </layer>
5216 <layer id="427" name="bottleneck2_0/inner/dw1/conv/fq_weights_1/zero_point1315720619" type="Const" version="opset1">
5217 <data element_type="f16" offset="8220" shape="16,1,1,1" size="32"/>
5218 <output>
5219 <port id="0" precision="FP16">
5220 <dim>16</dim>
5221 <dim>1</dim>
5222 <dim>1</dim>
5223 <dim>1</dim>
5224 </port>
5225 </output>
5226 </layer>
5227 <layer id="428" name="bottleneck2_0/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
5228 <data auto_broadcast="numpy"/>
5229 <input>
5230 <port id="0">
5231 <dim>16</dim>
5232 <dim>1</dim>
5233 <dim>3</dim>
5234 <dim>3</dim>
5235 </port>
5236 <port id="1">
5237 <dim>16</dim>
5238 <dim>1</dim>
5239 <dim>1</dim>
5240 <dim>1</dim>
5241 </port>
5242 </input>
5243 <output>
5244 <port id="2" precision="FP16">
5245 <dim>16</dim>
5246 <dim>1</dim>
5247 <dim>3</dim>
5248 <dim>3</dim>
5249 </port>
5250 </output>
5251 </layer>
5252 <layer id="429" name="bottleneck2_0/inner/dw1/conv/fq_weights_1/scale1315221030" type="Const" version="opset1">
5253 <data element_type="f16" offset="8252" shape="16,1,1,1" size="32"/>
5254 <output>
5255 <port id="0" precision="FP16">
5256 <dim>16</dim>
5257 <dim>1</dim>
5258 <dim>1</dim>
5259 <dim>1</dim>
5260 </port>
5261 </output>
5262 </layer>
5263 <layer id="430" name="bottleneck2_0/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
5264 <data auto_broadcast="numpy"/>
5265 <input>
5266 <port id="0">
5267 <dim>16</dim>
5268 <dim>1</dim>
5269 <dim>3</dim>
5270 <dim>3</dim>
5271 </port>
5272 <port id="1">
5273 <dim>16</dim>
5274 <dim>1</dim>
5275 <dim>1</dim>
5276 <dim>1</dim>
5277 </port>
5278 </input>
5279 <output>
5280 <port id="2" precision="FP16">
5281 <dim>16</dim>
5282 <dim>1</dim>
5283 <dim>3</dim>
5284 <dim>3</dim>
5285 </port>
5286 </output>
5287 </layer>
5288 <layer id="431" name="16799" type="Reshape" version="opset1">
5289 <data special_zero="true"/>
5290 <input>
5291 <port id="0">
5292 <dim>16</dim>
5293 <dim>1</dim>
5294 <dim>3</dim>
5295 <dim>3</dim>
5296 </port>
5297 <port id="1">
5298 <dim>5</dim>
5299 </port>
5300 </input>
5301 <output>
5302 <port id="2" precision="FP16">
5303 <dim>16</dim>
5304 <dim>1</dim>
5305 <dim>1</dim>
5306 <dim>3</dim>
5307 <dim>3</dim>
5308 </port>
5309 </output>
5310 </layer>
5311 <layer id="432" name="bottleneck2_0/inner/dw1/conv" type="GroupConvolution" version="opset1">
5312 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
5313 <input>
5314 <port id="0">
5315 <dim>1</dim>
5316 <dim>16</dim>
5317 <dim>160</dim>
5318 <dim>272</dim>
5319 </port>
5320 <port id="1">
5321 <dim>16</dim>
5322 <dim>1</dim>
5323 <dim>1</dim>
5324 <dim>3</dim>
5325 <dim>3</dim>
5326 </port>
5327 </input>
5328 <output>
5329 <port id="2" precision="FP16">
5330 <dim>1</dim>
5331 <dim>16</dim>
5332 <dim>80</dim>
5333 <dim>136</dim>
5334 </port>
5335 </output>
5336 </layer>
5337 <layer id="433" name="data_add_237692377484521879" type="Const" version="opset1">
5338 <data element_type="f16" offset="8284" shape="1,16,1,1" size="32"/>
5339 <output>
5340 <port id="0" precision="FP16">
5341 <dim>1</dim>
5342 <dim>16</dim>
5343 <dim>1</dim>
5344 <dim>1</dim>
5345 </port>
5346 </output>
5347 </layer>
5348 <layer id="434" name="bottleneck2_0/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
5349 <data auto_broadcast="numpy"/>
5350 <input>
5351 <port id="0">
5352 <dim>1</dim>
5353 <dim>16</dim>
5354 <dim>80</dim>
5355 <dim>136</dim>
5356 </port>
5357 <port id="1">
5358 <dim>1</dim>
5359 <dim>16</dim>
5360 <dim>1</dim>
5361 <dim>1</dim>
5362 </port>
5363 </input>
5364 <output>
5365 <port id="2" names="bottleneck2_0/inner/dw1/conv" precision="FP16">
5366 <dim>1</dim>
5367 <dim>16</dim>
5368 <dim>80</dim>
5369 <dim>136</dim>
5370 </port>
5371 </output>
5372 </layer>
5373 <layer id="435" name="bottleneck2_0/inner/dw1/fn/weights3088440433847" type="Const" version="opset1">
5374 <data element_type="f32" offset="1576" shape="1" size="4"/>
5375 <output>
5376 <port id="0" precision="FP32">
5377 <dim>1</dim>
5378 </port>
5379 </output>
5380 </layer>
5381 <layer id="436" name="bottleneck2_0/inner/dw1/fn" type="PReLU" version="opset1">
5382 <input>
5383 <port id="0">
5384 <dim>1</dim>
5385 <dim>16</dim>
5386 <dim>80</dim>
5387 <dim>136</dim>
5388 </port>
5389 <port id="1">
5390 <dim>1</dim>
5391 </port>
5392 </input>
5393 <output>
5394 <port id="2" names="bottleneck2_0/inner/dw1/conv" precision="FP16">
5395 <dim>1</dim>
5396 <dim>16</dim>
5397 <dim>80</dim>
5398 <dim>136</dim>
5399 </port>
5400 </output>
5401 </layer>
5402 <layer id="437" name="bottleneck2_0/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
5403 <data auto_broadcast="numpy" levels="256"/>
5404 <input>
5405 <port id="0">
5406 <dim>1</dim>
5407 <dim>16</dim>
5408 <dim>80</dim>
5409 <dim>136</dim>
5410 </port>
5411 <port id="1"/>
5412 <port id="2"/>
5413 <port id="3"/>
5414 <port id="4"/>
5415 </input>
5416 <output>
5417 <port id="5" precision="FP16">
5418 <dim>1</dim>
5419 <dim>16</dim>
5420 <dim>80</dim>
5421 <dim>136</dim>
5422 </port>
5423 </output>
5424 </layer>
5425 <layer id="438" name="bottleneck2_0/dim_inc/bn/mean/Fused_Mul__copy84910126/quantized1172822140" type="Const" version="opset1">
5426 <data element_type="i8" offset="8316" shape="64,16,1,1" size="1024"/>
5427 <output>
5428 <port id="0" precision="I8">
5429 <dim>64</dim>
5430 <dim>16</dim>
5431 <dim>1</dim>
5432 <dim>1</dim>
5433 </port>
5434 </output>
5435 </layer>
5436 <layer id="439" name="bottleneck2_0/dim_inc/bn/mean/Fused_Mul__copy84910126/quantized/to_f16" type="Convert" version="opset1">
5437 <data destination_type="f16"/>
5438 <input>
5439 <port id="0">
5440 <dim>64</dim>
5441 <dim>16</dim>
5442 <dim>1</dim>
5443 <dim>1</dim>
5444 </port>
5445 </input>
5446 <output>
5447 <port id="1" precision="FP16">
5448 <dim>64</dim>
5449 <dim>16</dim>
5450 <dim>1</dim>
5451 <dim>1</dim>
5452 </port>
5453 </output>
5454 </layer>
5455 <layer id="440" name="bottleneck2_0/dim_inc/conv/fq_weights_1/zero_point1174120334" type="Const" version="opset1">
5456 <data element_type="f16" offset="9340" shape="64,1,1,1" size="128"/>
5457 <output>
5458 <port id="0" precision="FP16">
5459 <dim>64</dim>
5460 <dim>1</dim>
5461 <dim>1</dim>
5462 <dim>1</dim>
5463 </port>
5464 </output>
5465 </layer>
5466 <layer id="441" name="bottleneck2_0/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
5467 <data auto_broadcast="numpy"/>
5468 <input>
5469 <port id="0">
5470 <dim>64</dim>
5471 <dim>16</dim>
5472 <dim>1</dim>
5473 <dim>1</dim>
5474 </port>
5475 <port id="1">
5476 <dim>64</dim>
5477 <dim>1</dim>
5478 <dim>1</dim>
5479 <dim>1</dim>
5480 </port>
5481 </input>
5482 <output>
5483 <port id="2" precision="FP16">
5484 <dim>64</dim>
5485 <dim>16</dim>
5486 <dim>1</dim>
5487 <dim>1</dim>
5488 </port>
5489 </output>
5490 </layer>
5491 <layer id="442" name="bottleneck2_0/dim_inc/conv/fq_weights_1/scale1173620184" type="Const" version="opset1">
5492 <data element_type="f16" offset="9468" shape="64,1,1,1" size="128"/>
5493 <output>
5494 <port id="0" precision="FP16">
5495 <dim>64</dim>
5496 <dim>1</dim>
5497 <dim>1</dim>
5498 <dim>1</dim>
5499 </port>
5500 </output>
5501 </layer>
5502 <layer id="443" name="bottleneck2_0/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
5503 <data auto_broadcast="numpy"/>
5504 <input>
5505 <port id="0">
5506 <dim>64</dim>
5507 <dim>16</dim>
5508 <dim>1</dim>
5509 <dim>1</dim>
5510 </port>
5511 <port id="1">
5512 <dim>64</dim>
5513 <dim>1</dim>
5514 <dim>1</dim>
5515 <dim>1</dim>
5516 </port>
5517 </input>
5518 <output>
5519 <port id="2" precision="FP16">
5520 <dim>64</dim>
5521 <dim>16</dim>
5522 <dim>1</dim>
5523 <dim>1</dim>
5524 </port>
5525 </output>
5526 </layer>
5527 <layer id="444" name="bottleneck2_0/dim_inc/conv" type="Convolution" version="opset1">
5528 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
5529 <input>
5530 <port id="0">
5531 <dim>1</dim>
5532 <dim>16</dim>
5533 <dim>80</dim>
5534 <dim>136</dim>
5535 </port>
5536 <port id="1">
5537 <dim>64</dim>
5538 <dim>16</dim>
5539 <dim>1</dim>
5540 <dim>1</dim>
5541 </port>
5542 </input>
5543 <output>
5544 <port id="2" precision="FP16">
5545 <dim>1</dim>
5546 <dim>64</dim>
5547 <dim>80</dim>
5548 <dim>136</dim>
5549 </port>
5550 </output>
5551 </layer>
5552 <layer id="445" name="data_add_237772378285120616" type="Const" version="opset1">
5553 <data element_type="f16" offset="9596" shape="1,64,1,1" size="128"/>
5554 <output>
5555 <port id="0" precision="FP16">
5556 <dim>1</dim>
5557 <dim>64</dim>
5558 <dim>1</dim>
5559 <dim>1</dim>
5560 </port>
5561 </output>
5562 </layer>
5563 <layer id="446" name="bottleneck2_0/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
5564 <data auto_broadcast="numpy"/>
5565 <input>
5566 <port id="0">
5567 <dim>1</dim>
5568 <dim>64</dim>
5569 <dim>80</dim>
5570 <dim>136</dim>
5571 </port>
5572 <port id="1">
5573 <dim>1</dim>
5574 <dim>64</dim>
5575 <dim>1</dim>
5576 <dim>1</dim>
5577 </port>
5578 </input>
5579 <output>
5580 <port id="2" names="bottleneck2_0/dim_inc/conv" precision="FP16">
5581 <dim>1</dim>
5582 <dim>64</dim>
5583 <dim>80</dim>
5584 <dim>136</dim>
5585 </port>
5586 </output>
5587 </layer>
5588 <layer id="447" name="bottleneck2_0/add/fq_input_1" type="FakeQuantize" version="opset1">
5589 <data auto_broadcast="numpy" levels="256"/>
5590 <input>
5591 <port id="0">
5592 <dim>1</dim>
5593 <dim>64</dim>
5594 <dim>80</dim>
5595 <dim>136</dim>
5596 </port>
5597 <port id="1"/>
5598 <port id="2"/>
5599 <port id="3"/>
5600 <port id="4"/>
5601 </input>
5602 <output>
5603 <port id="5" precision="FP16">
5604 <dim>1</dim>
5605 <dim>64</dim>
5606 <dim>80</dim>
5607 <dim>136</dim>
5608 </port>
5609 </output>
5610 </layer>
5611 <layer id="448" name="bottleneck2_0/add" type="Add" version="opset1">
5612 <data auto_broadcast="numpy"/>
5613 <input>
5614 <port id="0">
5615 <dim>1</dim>
5616 <dim>64</dim>
5617 <dim>80</dim>
5618 <dim>136</dim>
5619 </port>
5620 <port id="1">
5621 <dim>1</dim>
5622 <dim>64</dim>
5623 <dim>80</dim>
5624 <dim>136</dim>
5625 </port>
5626 </input>
5627 <output>
5628 <port id="2" names="bottleneck2_0/add" precision="FP16">
5629 <dim>1</dim>
5630 <dim>64</dim>
5631 <dim>80</dim>
5632 <dim>136</dim>
5633 </port>
5634 </output>
5635 </layer>
5636 <layer id="449" name="bottleneck2_0/fn/weights3105639815854" type="Const" version="opset1">
5637 <data element_type="f32" offset="1576" shape="1" size="4"/>
5638 <output>
5639 <port id="0" precision="FP32">
5640 <dim>1</dim>
5641 </port>
5642 </output>
5643 </layer>
5644 <layer id="450" name="bottleneck2_0/fn" type="PReLU" version="opset1">
5645 <input>
5646 <port id="0">
5647 <dim>1</dim>
5648 <dim>64</dim>
5649 <dim>80</dim>
5650 <dim>136</dim>
5651 </port>
5652 <port id="1">
5653 <dim>1</dim>
5654 </port>
5655 </input>
5656 <output>
5657 <port id="2" names="bottleneck2_0/add" precision="FP16">
5658 <dim>1</dim>
5659 <dim>64</dim>
5660 <dim>80</dim>
5661 <dim>136</dim>
5662 </port>
5663 </output>
5664 </layer>
5665 <layer id="451" name="bottleneck2_1/add/fq_input_0" type="FakeQuantize" version="opset1">
5666 <data auto_broadcast="numpy" levels="256"/>
5667 <input>
5668 <port id="0">
5669 <dim>1</dim>
5670 <dim>64</dim>
5671 <dim>80</dim>
5672 <dim>136</dim>
5673 </port>
5674 <port id="1"/>
5675 <port id="2"/>
5676 <port id="3"/>
5677 <port id="4"/>
5678 </input>
5679 <output>
5680 <port id="5" precision="FP16">
5681 <dim>1</dim>
5682 <dim>64</dim>
5683 <dim>80</dim>
5684 <dim>136</dim>
5685 </port>
5686 </output>
5687 </layer>
5688 <layer id="452" name="5314531820286" type="Const" version="opset1">
5689 <data element_type="f16" offset="9724" shape="" size="2"/>
5690 <output>
5691 <port id="0" precision="FP16"/>
5692 </output>
5693 </layer>
5694 <layer id="453" name="5315531919620" type="Const" version="opset1">
5695 <data element_type="f16" offset="9726" shape="" size="2"/>
5696 <output>
5697 <port id="0" precision="FP16"/>
5698 </output>
5699 </layer>
5700 <layer id="454" name="5316532021444" type="Const" version="opset1">
5701 <data element_type="f16" offset="9724" shape="" size="2"/>
5702 <output>
5703 <port id="0" precision="FP16"/>
5704 </output>
5705 </layer>
5706 <layer id="455" name="5317532122701" type="Const" version="opset1">
5707 <data element_type="f16" offset="9726" shape="" size="2"/>
5708 <output>
5709 <port id="0" precision="FP16"/>
5710 </output>
5711 </layer>
5712 <layer id="456" name="5424542822068" type="Const" version="opset1">
5713 <data element_type="f16" offset="9728" shape="" size="2"/>
5714 <output>
5715 <port id="0" precision="FP16"/>
5716 </output>
5717 </layer>
5718 <layer id="457" name="5425542920760" type="Const" version="opset1">
5719 <data element_type="f16" offset="9730" shape="" size="2"/>
5720 <output>
5721 <port id="0" precision="FP16"/>
5722 </output>
5723 </layer>
5724 <layer id="458" name="5426543021957" type="Const" version="opset1">
5725 <data element_type="f16" offset="9728" shape="" size="2"/>
5726 <output>
5727 <port id="0" precision="FP16"/>
5728 </output>
5729 </layer>
5730 <layer id="459" name="5427543119944" type="Const" version="opset1">
5731 <data element_type="f16" offset="9730" shape="" size="2"/>
5732 <output>
5733 <port id="0" precision="FP16"/>
5734 </output>
5735 </layer>
5736 <layer id="460" name="3864386820688" type="Const" version="opset1">
5737 <data element_type="f16" offset="9732" shape="1,16,1,1" size="32"/>
5738 <output>
5739 <port id="0" precision="FP16">
5740 <dim>1</dim>
5741 <dim>16</dim>
5742 <dim>1</dim>
5743 <dim>1</dim>
5744 </port>
5745 </output>
5746 </layer>
5747 <layer id="461" name="3865386919656" type="Const" version="opset1">
5748 <data element_type="f16" offset="9764" shape="1,16,1,1" size="32"/>
5749 <output>
5750 <port id="0" precision="FP16">
5751 <dim>1</dim>
5752 <dim>16</dim>
5753 <dim>1</dim>
5754 <dim>1</dim>
5755 </port>
5756 </output>
5757 </layer>
5758 <layer id="462" name="3866387022311" type="Const" version="opset1">
5759 <data element_type="f16" offset="9732" shape="1,16,1,1" size="32"/>
5760 <output>
5761 <port id="0" precision="FP16">
5762 <dim>1</dim>
5763 <dim>16</dim>
5764 <dim>1</dim>
5765 <dim>1</dim>
5766 </port>
5767 </output>
5768 </layer>
5769 <layer id="463" name="3867387121090" type="Const" version="opset1">
5770 <data element_type="f16" offset="9764" shape="1,16,1,1" size="32"/>
5771 <output>
5772 <port id="0" precision="FP16">
5773 <dim>1</dim>
5774 <dim>16</dim>
5775 <dim>1</dim>
5776 <dim>1</dim>
5777 </port>
5778 </output>
5779 </layer>
5780 <layer id="464" name="bottleneck2_1/dim_red/bn/mean/Fused_Mul__copy85610129/quantized1324022005" type="Const" version="opset1">
5781 <data element_type="i8" offset="9796" shape="16,64,1,1" size="1024"/>
5782 <output>
5783 <port id="0" precision="I8">
5784 <dim>16</dim>
5785 <dim>64</dim>
5786 <dim>1</dim>
5787 <dim>1</dim>
5788 </port>
5789 </output>
5790 </layer>
5791 <layer id="465" name="bottleneck2_1/dim_red/bn/mean/Fused_Mul__copy85610129/quantized/to_f16" type="Convert" version="opset1">
5792 <data destination_type="f16"/>
5793 <input>
5794 <port id="0">
5795 <dim>16</dim>
5796 <dim>64</dim>
5797 <dim>1</dim>
5798 <dim>1</dim>
5799 </port>
5800 </input>
5801 <output>
5802 <port id="1" precision="FP16">
5803 <dim>16</dim>
5804 <dim>64</dim>
5805 <dim>1</dim>
5806 <dim>1</dim>
5807 </port>
5808 </output>
5809 </layer>
5810 <layer id="466" name="bottleneck2_1/dim_red/conv/fq_weights_1/zero_point1325321120" type="Const" version="opset1">
5811 <data element_type="f16" offset="10820" shape="16,1,1,1" size="32"/>
5812 <output>
5813 <port id="0" precision="FP16">
5814 <dim>16</dim>
5815 <dim>1</dim>
5816 <dim>1</dim>
5817 <dim>1</dim>
5818 </port>
5819 </output>
5820 </layer>
5821 <layer id="467" name="bottleneck2_1/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
5822 <data auto_broadcast="numpy"/>
5823 <input>
5824 <port id="0">
5825 <dim>16</dim>
5826 <dim>64</dim>
5827 <dim>1</dim>
5828 <dim>1</dim>
5829 </port>
5830 <port id="1">
5831 <dim>16</dim>
5832 <dim>1</dim>
5833 <dim>1</dim>
5834 <dim>1</dim>
5835 </port>
5836 </input>
5837 <output>
5838 <port id="2" precision="FP16">
5839 <dim>16</dim>
5840 <dim>64</dim>
5841 <dim>1</dim>
5842 <dim>1</dim>
5843 </port>
5844 </output>
5845 </layer>
5846 <layer id="468" name="bottleneck2_1/dim_red/conv/fq_weights_1/scale1324822167" type="Const" version="opset1">
5847 <data element_type="f16" offset="10852" shape="16,1,1,1" size="32"/>
5848 <output>
5849 <port id="0" precision="FP16">
5850 <dim>16</dim>
5851 <dim>1</dim>
5852 <dim>1</dim>
5853 <dim>1</dim>
5854 </port>
5855 </output>
5856 </layer>
5857 <layer id="469" name="bottleneck2_1/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
5858 <data auto_broadcast="numpy"/>
5859 <input>
5860 <port id="0">
5861 <dim>16</dim>
5862 <dim>64</dim>
5863 <dim>1</dim>
5864 <dim>1</dim>
5865 </port>
5866 <port id="1">
5867 <dim>16</dim>
5868 <dim>1</dim>
5869 <dim>1</dim>
5870 <dim>1</dim>
5871 </port>
5872 </input>
5873 <output>
5874 <port id="2" precision="FP16">
5875 <dim>16</dim>
5876 <dim>64</dim>
5877 <dim>1</dim>
5878 <dim>1</dim>
5879 </port>
5880 </output>
5881 </layer>
5882 <layer id="470" name="bottleneck2_1/dim_red/conv" type="Convolution" version="opset1">
5883 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
5884 <input>
5885 <port id="0">
5886 <dim>1</dim>
5887 <dim>64</dim>
5888 <dim>80</dim>
5889 <dim>136</dim>
5890 </port>
5891 <port id="1">
5892 <dim>16</dim>
5893 <dim>64</dim>
5894 <dim>1</dim>
5895 <dim>1</dim>
5896 </port>
5897 </input>
5898 <output>
5899 <port id="2" precision="FP16">
5900 <dim>1</dim>
5901 <dim>16</dim>
5902 <dim>80</dim>
5903 <dim>136</dim>
5904 </port>
5905 </output>
5906 </layer>
5907 <layer id="471" name="data_add_237852379085821495" type="Const" version="opset1">
5908 <data element_type="f16" offset="10884" shape="1,16,1,1" size="32"/>
5909 <output>
5910 <port id="0" precision="FP16">
5911 <dim>1</dim>
5912 <dim>16</dim>
5913 <dim>1</dim>
5914 <dim>1</dim>
5915 </port>
5916 </output>
5917 </layer>
5918 <layer id="472" name="bottleneck2_1/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
5919 <data auto_broadcast="numpy"/>
5920 <input>
5921 <port id="0">
5922 <dim>1</dim>
5923 <dim>16</dim>
5924 <dim>80</dim>
5925 <dim>136</dim>
5926 </port>
5927 <port id="1">
5928 <dim>1</dim>
5929 <dim>16</dim>
5930 <dim>1</dim>
5931 <dim>1</dim>
5932 </port>
5933 </input>
5934 <output>
5935 <port id="2" names="bottleneck2_1/dim_red/conv" precision="FP16">
5936 <dim>1</dim>
5937 <dim>16</dim>
5938 <dim>80</dim>
5939 <dim>136</dim>
5940 </port>
5941 </output>
5942 </layer>
5943 <layer id="473" name="bottleneck2_1/dim_red/fn/weights3104840109860" type="Const" version="opset1">
5944 <data element_type="f32" offset="1576" shape="1" size="4"/>
5945 <output>
5946 <port id="0" precision="FP32">
5947 <dim>1</dim>
5948 </port>
5949 </output>
5950 </layer>
5951 <layer id="474" name="bottleneck2_1/dim_red/fn" type="PReLU" version="opset1">
5952 <input>
5953 <port id="0">
5954 <dim>1</dim>
5955 <dim>16</dim>
5956 <dim>80</dim>
5957 <dim>136</dim>
5958 </port>
5959 <port id="1">
5960 <dim>1</dim>
5961 </port>
5962 </input>
5963 <output>
5964 <port id="2" names="bottleneck2_1/dim_red/conv" precision="FP16">
5965 <dim>1</dim>
5966 <dim>16</dim>
5967 <dim>80</dim>
5968 <dim>136</dim>
5969 </port>
5970 </output>
5971 </layer>
5972 <layer id="475" name="bottleneck2_1/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
5973 <data auto_broadcast="numpy" levels="256"/>
5974 <input>
5975 <port id="0">
5976 <dim>1</dim>
5977 <dim>16</dim>
5978 <dim>80</dim>
5979 <dim>136</dim>
5980 </port>
5981 <port id="1">
5982 <dim>1</dim>
5983 <dim>16</dim>
5984 <dim>1</dim>
5985 <dim>1</dim>
5986 </port>
5987 <port id="2">
5988 <dim>1</dim>
5989 <dim>16</dim>
5990 <dim>1</dim>
5991 <dim>1</dim>
5992 </port>
5993 <port id="3">
5994 <dim>1</dim>
5995 <dim>16</dim>
5996 <dim>1</dim>
5997 <dim>1</dim>
5998 </port>
5999 <port id="4">
6000 <dim>1</dim>
6001 <dim>16</dim>
6002 <dim>1</dim>
6003 <dim>1</dim>
6004 </port>
6005 </input>
6006 <output>
6007 <port id="5" precision="FP16">
6008 <dim>1</dim>
6009 <dim>16</dim>
6010 <dim>80</dim>
6011 <dim>136</dim>
6012 </port>
6013 </output>
6014 </layer>
6015 <layer id="476" name="16859/value1686121339" type="Const" version="opset1">
6016 <data element_type="i64" offset="8036" shape="5" size="40"/>
6017 <output>
6018 <port id="0" precision="I64">
6019 <dim>5</dim>
6020 </port>
6021 </output>
6022 </layer>
6023 <layer id="477" name="bottleneck2_1/inner/dw1/bn/mean/Fused_Mul__copy86210132/quantized1391221243" type="Const" version="opset1">
6024 <data element_type="i8" offset="10916" shape="16,1,3,3" size="144"/>
6025 <output>
6026 <port id="0" precision="I8">
6027 <dim>16</dim>
6028 <dim>1</dim>
6029 <dim>3</dim>
6030 <dim>3</dim>
6031 </port>
6032 </output>
6033 </layer>
6034 <layer id="478" name="bottleneck2_1/inner/dw1/bn/mean/Fused_Mul__copy86210132/quantized/to_f16" type="Convert" version="opset1">
6035 <data destination_type="f16"/>
6036 <input>
6037 <port id="0">
6038 <dim>16</dim>
6039 <dim>1</dim>
6040 <dim>3</dim>
6041 <dim>3</dim>
6042 </port>
6043 </input>
6044 <output>
6045 <port id="1" precision="FP16">
6046 <dim>16</dim>
6047 <dim>1</dim>
6048 <dim>3</dim>
6049 <dim>3</dim>
6050 </port>
6051 </output>
6052 </layer>
6053 <layer id="479" name="bottleneck2_1/inner/dw1/conv/fq_weights_1/zero_point1392520535" type="Const" version="opset1">
6054 <data element_type="f16" offset="11060" shape="16,1,1,1" size="32"/>
6055 <output>
6056 <port id="0" precision="FP16">
6057 <dim>16</dim>
6058 <dim>1</dim>
6059 <dim>1</dim>
6060 <dim>1</dim>
6061 </port>
6062 </output>
6063 </layer>
6064 <layer id="480" name="bottleneck2_1/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
6065 <data auto_broadcast="numpy"/>
6066 <input>
6067 <port id="0">
6068 <dim>16</dim>
6069 <dim>1</dim>
6070 <dim>3</dim>
6071 <dim>3</dim>
6072 </port>
6073 <port id="1">
6074 <dim>16</dim>
6075 <dim>1</dim>
6076 <dim>1</dim>
6077 <dim>1</dim>
6078 </port>
6079 </input>
6080 <output>
6081 <port id="2" precision="FP16">
6082 <dim>16</dim>
6083 <dim>1</dim>
6084 <dim>3</dim>
6085 <dim>3</dim>
6086 </port>
6087 </output>
6088 </layer>
6089 <layer id="481" name="bottleneck2_1/inner/dw1/conv/fq_weights_1/scale1392021039" type="Const" version="opset1">
6090 <data element_type="f16" offset="11092" shape="16,1,1,1" size="32"/>
6091 <output>
6092 <port id="0" precision="FP16">
6093 <dim>16</dim>
6094 <dim>1</dim>
6095 <dim>1</dim>
6096 <dim>1</dim>
6097 </port>
6098 </output>
6099 </layer>
6100 <layer id="482" name="bottleneck2_1/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
6101 <data auto_broadcast="numpy"/>
6102 <input>
6103 <port id="0">
6104 <dim>16</dim>
6105 <dim>1</dim>
6106 <dim>3</dim>
6107 <dim>3</dim>
6108 </port>
6109 <port id="1">
6110 <dim>16</dim>
6111 <dim>1</dim>
6112 <dim>1</dim>
6113 <dim>1</dim>
6114 </port>
6115 </input>
6116 <output>
6117 <port id="2" precision="FP16">
6118 <dim>16</dim>
6119 <dim>1</dim>
6120 <dim>3</dim>
6121 <dim>3</dim>
6122 </port>
6123 </output>
6124 </layer>
6125 <layer id="483" name="16859" type="Reshape" version="opset1">
6126 <data special_zero="true"/>
6127 <input>
6128 <port id="0">
6129 <dim>16</dim>
6130 <dim>1</dim>
6131 <dim>3</dim>
6132 <dim>3</dim>
6133 </port>
6134 <port id="1">
6135 <dim>5</dim>
6136 </port>
6137 </input>
6138 <output>
6139 <port id="2" precision="FP16">
6140 <dim>16</dim>
6141 <dim>1</dim>
6142 <dim>1</dim>
6143 <dim>3</dim>
6144 <dim>3</dim>
6145 </port>
6146 </output>
6147 </layer>
6148 <layer id="484" name="bottleneck2_1/inner/dw1/conv" type="GroupConvolution" version="opset1">
6149 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
6150 <input>
6151 <port id="0">
6152 <dim>1</dim>
6153 <dim>16</dim>
6154 <dim>80</dim>
6155 <dim>136</dim>
6156 </port>
6157 <port id="1">
6158 <dim>16</dim>
6159 <dim>1</dim>
6160 <dim>1</dim>
6161 <dim>3</dim>
6162 <dim>3</dim>
6163 </port>
6164 </input>
6165 <output>
6166 <port id="2" precision="FP16">
6167 <dim>1</dim>
6168 <dim>16</dim>
6169 <dim>80</dim>
6170 <dim>136</dim>
6171 </port>
6172 </output>
6173 </layer>
6174 <layer id="485" name="data_add_237932379886422692" type="Const" version="opset1">
6175 <data element_type="f16" offset="11124" shape="1,16,1,1" size="32"/>
6176 <output>
6177 <port id="0" precision="FP16">
6178 <dim>1</dim>
6179 <dim>16</dim>
6180 <dim>1</dim>
6181 <dim>1</dim>
6182 </port>
6183 </output>
6184 </layer>
6185 <layer id="486" name="bottleneck2_1/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
6186 <data auto_broadcast="numpy"/>
6187 <input>
6188 <port id="0">
6189 <dim>1</dim>
6190 <dim>16</dim>
6191 <dim>80</dim>
6192 <dim>136</dim>
6193 </port>
6194 <port id="1">
6195 <dim>1</dim>
6196 <dim>16</dim>
6197 <dim>1</dim>
6198 <dim>1</dim>
6199 </port>
6200 </input>
6201 <output>
6202 <port id="2" names="bottleneck2_1/inner/dw1/conv" precision="FP16">
6203 <dim>1</dim>
6204 <dim>16</dim>
6205 <dim>80</dim>
6206 <dim>136</dim>
6207 </port>
6208 </output>
6209 </layer>
6210 <layer id="487" name="bottleneck2_1/inner/dw1/fn/weights3086040310866" type="Const" version="opset1">
6211 <data element_type="f32" offset="1576" shape="1" size="4"/>
6212 <output>
6213 <port id="0" precision="FP32">
6214 <dim>1</dim>
6215 </port>
6216 </output>
6217 </layer>
6218 <layer id="488" name="bottleneck2_1/inner/dw1/fn" type="PReLU" version="opset1">
6219 <input>
6220 <port id="0">
6221 <dim>1</dim>
6222 <dim>16</dim>
6223 <dim>80</dim>
6224 <dim>136</dim>
6225 </port>
6226 <port id="1">
6227 <dim>1</dim>
6228 </port>
6229 </input>
6230 <output>
6231 <port id="2" names="bottleneck2_1/inner/dw1/conv" precision="FP16">
6232 <dim>1</dim>
6233 <dim>16</dim>
6234 <dim>80</dim>
6235 <dim>136</dim>
6236 </port>
6237 </output>
6238 </layer>
6239 <layer id="489" name="bottleneck2_1/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
6240 <data auto_broadcast="numpy" levels="256"/>
6241 <input>
6242 <port id="0">
6243 <dim>1</dim>
6244 <dim>16</dim>
6245 <dim>80</dim>
6246 <dim>136</dim>
6247 </port>
6248 <port id="1"/>
6249 <port id="2"/>
6250 <port id="3"/>
6251 <port id="4"/>
6252 </input>
6253 <output>
6254 <port id="5" precision="FP16">
6255 <dim>1</dim>
6256 <dim>16</dim>
6257 <dim>80</dim>
6258 <dim>136</dim>
6259 </port>
6260 </output>
6261 </layer>
6262 <layer id="490" name="bottleneck2_1/dim_inc/bn/mean/Fused_Mul__copy86810135/quantized1360021273" type="Const" version="opset1">
6263 <data element_type="i8" offset="11156" shape="64,16,1,1" size="1024"/>
6264 <output>
6265 <port id="0" precision="I8">
6266 <dim>64</dim>
6267 <dim>16</dim>
6268 <dim>1</dim>
6269 <dim>1</dim>
6270 </port>
6271 </output>
6272 </layer>
6273 <layer id="491" name="bottleneck2_1/dim_inc/bn/mean/Fused_Mul__copy86810135/quantized/to_f16" type="Convert" version="opset1">
6274 <data destination_type="f16"/>
6275 <input>
6276 <port id="0">
6277 <dim>64</dim>
6278 <dim>16</dim>
6279 <dim>1</dim>
6280 <dim>1</dim>
6281 </port>
6282 </input>
6283 <output>
6284 <port id="1" precision="FP16">
6285 <dim>64</dim>
6286 <dim>16</dim>
6287 <dim>1</dim>
6288 <dim>1</dim>
6289 </port>
6290 </output>
6291 </layer>
6292 <layer id="492" name="bottleneck2_1/dim_inc/conv/fq_weights_1/zero_point1361322104" type="Const" version="opset1">
6293 <data element_type="f16" offset="12180" shape="64,1,1,1" size="128"/>
6294 <output>
6295 <port id="0" precision="FP16">
6296 <dim>64</dim>
6297 <dim>1</dim>
6298 <dim>1</dim>
6299 <dim>1</dim>
6300 </port>
6301 </output>
6302 </layer>
6303 <layer id="493" name="bottleneck2_1/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
6304 <data auto_broadcast="numpy"/>
6305 <input>
6306 <port id="0">
6307 <dim>64</dim>
6308 <dim>16</dim>
6309 <dim>1</dim>
6310 <dim>1</dim>
6311 </port>
6312 <port id="1">
6313 <dim>64</dim>
6314 <dim>1</dim>
6315 <dim>1</dim>
6316 <dim>1</dim>
6317 </port>
6318 </input>
6319 <output>
6320 <port id="2" precision="FP16">
6321 <dim>64</dim>
6322 <dim>16</dim>
6323 <dim>1</dim>
6324 <dim>1</dim>
6325 </port>
6326 </output>
6327 </layer>
6328 <layer id="494" name="bottleneck2_1/dim_inc/conv/fq_weights_1/scale1360821006" type="Const" version="opset1">
6329 <data element_type="f16" offset="12308" shape="64,1,1,1" size="128"/>
6330 <output>
6331 <port id="0" precision="FP16">
6332 <dim>64</dim>
6333 <dim>1</dim>
6334 <dim>1</dim>
6335 <dim>1</dim>
6336 </port>
6337 </output>
6338 </layer>
6339 <layer id="495" name="bottleneck2_1/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
6340 <data auto_broadcast="numpy"/>
6341 <input>
6342 <port id="0">
6343 <dim>64</dim>
6344 <dim>16</dim>
6345 <dim>1</dim>
6346 <dim>1</dim>
6347 </port>
6348 <port id="1">
6349 <dim>64</dim>
6350 <dim>1</dim>
6351 <dim>1</dim>
6352 <dim>1</dim>
6353 </port>
6354 </input>
6355 <output>
6356 <port id="2" precision="FP16">
6357 <dim>64</dim>
6358 <dim>16</dim>
6359 <dim>1</dim>
6360 <dim>1</dim>
6361 </port>
6362 </output>
6363 </layer>
6364 <layer id="496" name="bottleneck2_1/dim_inc/conv" type="Convolution" version="opset1">
6365 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
6366 <input>
6367 <port id="0">
6368 <dim>1</dim>
6369 <dim>16</dim>
6370 <dim>80</dim>
6371 <dim>136</dim>
6372 </port>
6373 <port id="1">
6374 <dim>64</dim>
6375 <dim>16</dim>
6376 <dim>1</dim>
6377 <dim>1</dim>
6378 </port>
6379 </input>
6380 <output>
6381 <port id="2" precision="FP16">
6382 <dim>1</dim>
6383 <dim>64</dim>
6384 <dim>80</dim>
6385 <dim>136</dim>
6386 </port>
6387 </output>
6388 </layer>
6389 <layer id="497" name="data_add_238012380687019767" type="Const" version="opset1">
6390 <data element_type="f16" offset="12436" shape="1,64,1,1" size="128"/>
6391 <output>
6392 <port id="0" precision="FP16">
6393 <dim>1</dim>
6394 <dim>64</dim>
6395 <dim>1</dim>
6396 <dim>1</dim>
6397 </port>
6398 </output>
6399 </layer>
6400 <layer id="498" name="bottleneck2_1/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
6401 <data auto_broadcast="numpy"/>
6402 <input>
6403 <port id="0">
6404 <dim>1</dim>
6405 <dim>64</dim>
6406 <dim>80</dim>
6407 <dim>136</dim>
6408 </port>
6409 <port id="1">
6410 <dim>1</dim>
6411 <dim>64</dim>
6412 <dim>1</dim>
6413 <dim>1</dim>
6414 </port>
6415 </input>
6416 <output>
6417 <port id="2" names="bottleneck2_1/dim_inc/conv" precision="FP16">
6418 <dim>1</dim>
6419 <dim>64</dim>
6420 <dim>80</dim>
6421 <dim>136</dim>
6422 </port>
6423 </output>
6424 </layer>
6425 <layer id="499" name="bottleneck2_1/add/fq_input_1" type="FakeQuantize" version="opset1">
6426 <data auto_broadcast="numpy" levels="256"/>
6427 <input>
6428 <port id="0">
6429 <dim>1</dim>
6430 <dim>64</dim>
6431 <dim>80</dim>
6432 <dim>136</dim>
6433 </port>
6434 <port id="1"/>
6435 <port id="2"/>
6436 <port id="3"/>
6437 <port id="4"/>
6438 </input>
6439 <output>
6440 <port id="5" precision="FP16">
6441 <dim>1</dim>
6442 <dim>64</dim>
6443 <dim>80</dim>
6444 <dim>136</dim>
6445 </port>
6446 </output>
6447 </layer>
6448 <layer id="500" name="bottleneck2_1/add" type="Add" version="opset1">
6449 <data auto_broadcast="numpy"/>
6450 <input>
6451 <port id="0">
6452 <dim>1</dim>
6453 <dim>64</dim>
6454 <dim>80</dim>
6455 <dim>136</dim>
6456 </port>
6457 <port id="1">
6458 <dim>1</dim>
6459 <dim>64</dim>
6460 <dim>80</dim>
6461 <dim>136</dim>
6462 </port>
6463 </input>
6464 <output>
6465 <port id="2" names="bottleneck2_1/add" precision="FP16">
6466 <dim>1</dim>
6467 <dim>64</dim>
6468 <dim>80</dim>
6469 <dim>136</dim>
6470 </port>
6471 </output>
6472 </layer>
6473 <layer id="501" name="bottleneck2_1/fn/weights3117640334873" type="Const" version="opset1">
6474 <data element_type="f32" offset="1576" shape="1" size="4"/>
6475 <output>
6476 <port id="0" precision="FP32">
6477 <dim>1</dim>
6478 </port>
6479 </output>
6480 </layer>
6481 <layer id="502" name="bottleneck2_1/fn" type="PReLU" version="opset1">
6482 <input>
6483 <port id="0">
6484 <dim>1</dim>
6485 <dim>64</dim>
6486 <dim>80</dim>
6487 <dim>136</dim>
6488 </port>
6489 <port id="1">
6490 <dim>1</dim>
6491 </port>
6492 </input>
6493 <output>
6494 <port id="2" names="bottleneck2_1/add" precision="FP16">
6495 <dim>1</dim>
6496 <dim>64</dim>
6497 <dim>80</dim>
6498 <dim>136</dim>
6499 </port>
6500 </output>
6501 </layer>
6502 <layer id="503" name="bottleneck2_2/add/fq_input_0" type="FakeQuantize" version="opset1">
6503 <data auto_broadcast="numpy" levels="256"/>
6504 <input>
6505 <port id="0">
6506 <dim>1</dim>
6507 <dim>64</dim>
6508 <dim>80</dim>
6509 <dim>136</dim>
6510 </port>
6511 <port id="1"/>
6512 <port id="2"/>
6513 <port id="3"/>
6514 <port id="4"/>
6515 </input>
6516 <output>
6517 <port id="5" precision="FP16">
6518 <dim>1</dim>
6519 <dim>64</dim>
6520 <dim>80</dim>
6521 <dim>136</dim>
6522 </port>
6523 </output>
6524 </layer>
6525 <layer id="504" name="4974497821771" type="Const" version="opset1">
6526 <data element_type="f16" offset="12564" shape="" size="2"/>
6527 <output>
6528 <port id="0" precision="FP16"/>
6529 </output>
6530 </layer>
6531 <layer id="505" name="4975497920907" type="Const" version="opset1">
6532 <data element_type="f16" offset="12566" shape="" size="2"/>
6533 <output>
6534 <port id="0" precision="FP16"/>
6535 </output>
6536 </layer>
6537 <layer id="506" name="4976498020433" type="Const" version="opset1">
6538 <data element_type="f16" offset="12564" shape="" size="2"/>
6539 <output>
6540 <port id="0" precision="FP16"/>
6541 </output>
6542 </layer>
6543 <layer id="507" name="4977498120295" type="Const" version="opset1">
6544 <data element_type="f16" offset="12566" shape="" size="2"/>
6545 <output>
6546 <port id="0" precision="FP16"/>
6547 </output>
6548 </layer>
6549 <layer id="508" name="3244324821267" type="Const" version="opset1">
6550 <data element_type="f16" offset="12568" shape="" size="2"/>
6551 <output>
6552 <port id="0" precision="FP16"/>
6553 </output>
6554 </layer>
6555 <layer id="509" name="3245324920004" type="Const" version="opset1">
6556 <data element_type="f16" offset="12570" shape="" size="2"/>
6557 <output>
6558 <port id="0" precision="FP16"/>
6559 </output>
6560 </layer>
6561 <layer id="510" name="3246325021405" type="Const" version="opset1">
6562 <data element_type="f16" offset="12568" shape="" size="2"/>
6563 <output>
6564 <port id="0" precision="FP16"/>
6565 </output>
6566 </layer>
6567 <layer id="511" name="3247325121666" type="Const" version="opset1">
6568 <data element_type="f16" offset="12570" shape="" size="2"/>
6569 <output>
6570 <port id="0" precision="FP16"/>
6571 </output>
6572 </layer>
6573 <layer id="512" name="4724472819536" type="Const" version="opset1">
6574 <data element_type="f16" offset="12572" shape="1,16,1,1" size="32"/>
6575 <output>
6576 <port id="0" precision="FP16">
6577 <dim>1</dim>
6578 <dim>16</dim>
6579 <dim>1</dim>
6580 <dim>1</dim>
6581 </port>
6582 </output>
6583 </layer>
6584 <layer id="513" name="4725472919467" type="Const" version="opset1">
6585 <data element_type="f16" offset="12604" shape="1,16,1,1" size="32"/>
6586 <output>
6587 <port id="0" precision="FP16">
6588 <dim>1</dim>
6589 <dim>16</dim>
6590 <dim>1</dim>
6591 <dim>1</dim>
6592 </port>
6593 </output>
6594 </layer>
6595 <layer id="514" name="4726473022512" type="Const" version="opset1">
6596 <data element_type="f16" offset="12572" shape="1,16,1,1" size="32"/>
6597 <output>
6598 <port id="0" precision="FP16">
6599 <dim>1</dim>
6600 <dim>16</dim>
6601 <dim>1</dim>
6602 <dim>1</dim>
6603 </port>
6604 </output>
6605 </layer>
6606 <layer id="515" name="4727473120211" type="Const" version="opset1">
6607 <data element_type="f16" offset="12604" shape="1,16,1,1" size="32"/>
6608 <output>
6609 <port id="0" precision="FP16">
6610 <dim>1</dim>
6611 <dim>16</dim>
6612 <dim>1</dim>
6613 <dim>1</dim>
6614 </port>
6615 </output>
6616 </layer>
6617 <layer id="516" name="bottleneck2_2/dim_red/bn/mean/Fused_Mul__copy87510138/quantized1410422479" type="Const" version="opset1">
6618 <data element_type="i8" offset="12636" shape="16,64,1,1" size="1024"/>
6619 <output>
6620 <port id="0" precision="I8">
6621 <dim>16</dim>
6622 <dim>64</dim>
6623 <dim>1</dim>
6624 <dim>1</dim>
6625 </port>
6626 </output>
6627 </layer>
6628 <layer id="517" name="bottleneck2_2/dim_red/bn/mean/Fused_Mul__copy87510138/quantized/to_f16" type="Convert" version="opset1">
6629 <data destination_type="f16"/>
6630 <input>
6631 <port id="0">
6632 <dim>16</dim>
6633 <dim>64</dim>
6634 <dim>1</dim>
6635 <dim>1</dim>
6636 </port>
6637 </input>
6638 <output>
6639 <port id="1" precision="FP16">
6640 <dim>16</dim>
6641 <dim>64</dim>
6642 <dim>1</dim>
6643 <dim>1</dim>
6644 </port>
6645 </output>
6646 </layer>
6647 <layer id="518" name="bottleneck2_2/dim_red/conv/fq_weights_1/zero_point1411722911" type="Const" version="opset1">
6648 <data element_type="f16" offset="13660" shape="16,1,1,1" size="32"/>
6649 <output>
6650 <port id="0" precision="FP16">
6651 <dim>16</dim>
6652 <dim>1</dim>
6653 <dim>1</dim>
6654 <dim>1</dim>
6655 </port>
6656 </output>
6657 </layer>
6658 <layer id="519" name="bottleneck2_2/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
6659 <data auto_broadcast="numpy"/>
6660 <input>
6661 <port id="0">
6662 <dim>16</dim>
6663 <dim>64</dim>
6664 <dim>1</dim>
6665 <dim>1</dim>
6666 </port>
6667 <port id="1">
6668 <dim>16</dim>
6669 <dim>1</dim>
6670 <dim>1</dim>
6671 <dim>1</dim>
6672 </port>
6673 </input>
6674 <output>
6675 <port id="2" precision="FP16">
6676 <dim>16</dim>
6677 <dim>64</dim>
6678 <dim>1</dim>
6679 <dim>1</dim>
6680 </port>
6681 </output>
6682 </layer>
6683 <layer id="520" name="bottleneck2_2/dim_red/conv/fq_weights_1/scale1411222305" type="Const" version="opset1">
6684 <data element_type="f16" offset="13692" shape="16,1,1,1" size="32"/>
6685 <output>
6686 <port id="0" precision="FP16">
6687 <dim>16</dim>
6688 <dim>1</dim>
6689 <dim>1</dim>
6690 <dim>1</dim>
6691 </port>
6692 </output>
6693 </layer>
6694 <layer id="521" name="bottleneck2_2/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
6695 <data auto_broadcast="numpy"/>
6696 <input>
6697 <port id="0">
6698 <dim>16</dim>
6699 <dim>64</dim>
6700 <dim>1</dim>
6701 <dim>1</dim>
6702 </port>
6703 <port id="1">
6704 <dim>16</dim>
6705 <dim>1</dim>
6706 <dim>1</dim>
6707 <dim>1</dim>
6708 </port>
6709 </input>
6710 <output>
6711 <port id="2" precision="FP16">
6712 <dim>16</dim>
6713 <dim>64</dim>
6714 <dim>1</dim>
6715 <dim>1</dim>
6716 </port>
6717 </output>
6718 </layer>
6719 <layer id="522" name="bottleneck2_2/dim_red/conv" type="Convolution" version="opset1">
6720 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
6721 <input>
6722 <port id="0">
6723 <dim>1</dim>
6724 <dim>64</dim>
6725 <dim>80</dim>
6726 <dim>136</dim>
6727 </port>
6728 <port id="1">
6729 <dim>16</dim>
6730 <dim>64</dim>
6731 <dim>1</dim>
6732 <dim>1</dim>
6733 </port>
6734 </input>
6735 <output>
6736 <port id="2" precision="FP16">
6737 <dim>1</dim>
6738 <dim>16</dim>
6739 <dim>80</dim>
6740 <dim>136</dim>
6741 </port>
6742 </output>
6743 </layer>
6744 <layer id="523" name="data_add_238092381487722668" type="Const" version="opset1">
6745 <data element_type="f16" offset="13724" shape="1,16,1,1" size="32"/>
6746 <output>
6747 <port id="0" precision="FP16">
6748 <dim>1</dim>
6749 <dim>16</dim>
6750 <dim>1</dim>
6751 <dim>1</dim>
6752 </port>
6753 </output>
6754 </layer>
6755 <layer id="524" name="bottleneck2_2/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
6756 <data auto_broadcast="numpy"/>
6757 <input>
6758 <port id="0">
6759 <dim>1</dim>
6760 <dim>16</dim>
6761 <dim>80</dim>
6762 <dim>136</dim>
6763 </port>
6764 <port id="1">
6765 <dim>1</dim>
6766 <dim>16</dim>
6767 <dim>1</dim>
6768 <dim>1</dim>
6769 </port>
6770 </input>
6771 <output>
6772 <port id="2" names="bottleneck2_2/dim_red/conv" precision="FP16">
6773 <dim>1</dim>
6774 <dim>16</dim>
6775 <dim>80</dim>
6776 <dim>136</dim>
6777 </port>
6778 </output>
6779 </layer>
6780 <layer id="525" name="bottleneck2_2/dim_red/fn/weights3090840547879" type="Const" version="opset1">
6781 <data element_type="f32" offset="1576" shape="1" size="4"/>
6782 <output>
6783 <port id="0" precision="FP32">
6784 <dim>1</dim>
6785 </port>
6786 </output>
6787 </layer>
6788 <layer id="526" name="bottleneck2_2/dim_red/fn" type="PReLU" version="opset1">
6789 <input>
6790 <port id="0">
6791 <dim>1</dim>
6792 <dim>16</dim>
6793 <dim>80</dim>
6794 <dim>136</dim>
6795 </port>
6796 <port id="1">
6797 <dim>1</dim>
6798 </port>
6799 </input>
6800 <output>
6801 <port id="2" names="bottleneck2_2/dim_red/conv" precision="FP16">
6802 <dim>1</dim>
6803 <dim>16</dim>
6804 <dim>80</dim>
6805 <dim>136</dim>
6806 </port>
6807 </output>
6808 </layer>
6809 <layer id="527" name="bottleneck2_2/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
6810 <data auto_broadcast="numpy" levels="256"/>
6811 <input>
6812 <port id="0">
6813 <dim>1</dim>
6814 <dim>16</dim>
6815 <dim>80</dim>
6816 <dim>136</dim>
6817 </port>
6818 <port id="1">
6819 <dim>1</dim>
6820 <dim>16</dim>
6821 <dim>1</dim>
6822 <dim>1</dim>
6823 </port>
6824 <port id="2">
6825 <dim>1</dim>
6826 <dim>16</dim>
6827 <dim>1</dim>
6828 <dim>1</dim>
6829 </port>
6830 <port id="3">
6831 <dim>1</dim>
6832 <dim>16</dim>
6833 <dim>1</dim>
6834 <dim>1</dim>
6835 </port>
6836 <port id="4">
6837 <dim>1</dim>
6838 <dim>16</dim>
6839 <dim>1</dim>
6840 <dim>1</dim>
6841 </port>
6842 </input>
6843 <output>
6844 <port id="5" precision="FP16">
6845 <dim>1</dim>
6846 <dim>16</dim>
6847 <dim>80</dim>
6848 <dim>136</dim>
6849 </port>
6850 </output>
6851 </layer>
6852 <layer id="528" name="16895/value1689719797" type="Const" version="opset1">
6853 <data element_type="i64" offset="8036" shape="5" size="40"/>
6854 <output>
6855 <port id="0" precision="I64">
6856 <dim>5</dim>
6857 </port>
6858 </output>
6859 </layer>
6860 <layer id="529" name="bottleneck2_2/inner/dw1/bn/mean/Fused_Mul__copy88110141/quantized1158420547" type="Const" version="opset1">
6861 <data element_type="i8" offset="13756" shape="16,1,3,3" size="144"/>
6862 <output>
6863 <port id="0" precision="I8">
6864 <dim>16</dim>
6865 <dim>1</dim>
6866 <dim>3</dim>
6867 <dim>3</dim>
6868 </port>
6869 </output>
6870 </layer>
6871 <layer id="530" name="bottleneck2_2/inner/dw1/bn/mean/Fused_Mul__copy88110141/quantized/to_f16" type="Convert" version="opset1">
6872 <data destination_type="f16"/>
6873 <input>
6874 <port id="0">
6875 <dim>16</dim>
6876 <dim>1</dim>
6877 <dim>3</dim>
6878 <dim>3</dim>
6879 </port>
6880 </input>
6881 <output>
6882 <port id="1" precision="FP16">
6883 <dim>16</dim>
6884 <dim>1</dim>
6885 <dim>3</dim>
6886 <dim>3</dim>
6887 </port>
6888 </output>
6889 </layer>
6890 <layer id="531" name="bottleneck2_2/inner/dw1/conv/fq_weights_1/zero_point1159721558" type="Const" version="opset1">
6891 <data element_type="f16" offset="13900" shape="16,1,1,1" size="32"/>
6892 <output>
6893 <port id="0" precision="FP16">
6894 <dim>16</dim>
6895 <dim>1</dim>
6896 <dim>1</dim>
6897 <dim>1</dim>
6898 </port>
6899 </output>
6900 </layer>
6901 <layer id="532" name="bottleneck2_2/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
6902 <data auto_broadcast="numpy"/>
6903 <input>
6904 <port id="0">
6905 <dim>16</dim>
6906 <dim>1</dim>
6907 <dim>3</dim>
6908 <dim>3</dim>
6909 </port>
6910 <port id="1">
6911 <dim>16</dim>
6912 <dim>1</dim>
6913 <dim>1</dim>
6914 <dim>1</dim>
6915 </port>
6916 </input>
6917 <output>
6918 <port id="2" precision="FP16">
6919 <dim>16</dim>
6920 <dim>1</dim>
6921 <dim>3</dim>
6922 <dim>3</dim>
6923 </port>
6924 </output>
6925 </layer>
6926 <layer id="533" name="bottleneck2_2/inner/dw1/conv/fq_weights_1/scale1159219389" type="Const" version="opset1">
6927 <data element_type="f16" offset="13932" shape="16,1,1,1" size="32"/>
6928 <output>
6929 <port id="0" precision="FP16">
6930 <dim>16</dim>
6931 <dim>1</dim>
6932 <dim>1</dim>
6933 <dim>1</dim>
6934 </port>
6935 </output>
6936 </layer>
6937 <layer id="534" name="bottleneck2_2/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
6938 <data auto_broadcast="numpy"/>
6939 <input>
6940 <port id="0">
6941 <dim>16</dim>
6942 <dim>1</dim>
6943 <dim>3</dim>
6944 <dim>3</dim>
6945 </port>
6946 <port id="1">
6947 <dim>16</dim>
6948 <dim>1</dim>
6949 <dim>1</dim>
6950 <dim>1</dim>
6951 </port>
6952 </input>
6953 <output>
6954 <port id="2" precision="FP16">
6955 <dim>16</dim>
6956 <dim>1</dim>
6957 <dim>3</dim>
6958 <dim>3</dim>
6959 </port>
6960 </output>
6961 </layer>
6962 <layer id="535" name="16895" type="Reshape" version="opset1">
6963 <data special_zero="true"/>
6964 <input>
6965 <port id="0">
6966 <dim>16</dim>
6967 <dim>1</dim>
6968 <dim>3</dim>
6969 <dim>3</dim>
6970 </port>
6971 <port id="1">
6972 <dim>5</dim>
6973 </port>
6974 </input>
6975 <output>
6976 <port id="2" precision="FP16">
6977 <dim>16</dim>
6978 <dim>1</dim>
6979 <dim>1</dim>
6980 <dim>3</dim>
6981 <dim>3</dim>
6982 </port>
6983 </output>
6984 </layer>
6985 <layer id="536" name="bottleneck2_2/inner/dw1/conv" type="GroupConvolution" version="opset1">
6986 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
6987 <input>
6988 <port id="0">
6989 <dim>1</dim>
6990 <dim>16</dim>
6991 <dim>80</dim>
6992 <dim>136</dim>
6993 </port>
6994 <port id="1">
6995 <dim>16</dim>
6996 <dim>1</dim>
6997 <dim>1</dim>
6998 <dim>3</dim>
6999 <dim>3</dim>
7000 </port>
7001 </input>
7002 <output>
7003 <port id="2" precision="FP16">
7004 <dim>1</dim>
7005 <dim>16</dim>
7006 <dim>80</dim>
7007 <dim>136</dim>
7008 </port>
7009 </output>
7010 </layer>
7011 <layer id="537" name="data_add_238172382288322365" type="Const" version="opset1">
7012 <data element_type="f16" offset="13964" shape="1,16,1,1" size="32"/>
7013 <output>
7014 <port id="0" precision="FP16">
7015 <dim>1</dim>
7016 <dim>16</dim>
7017 <dim>1</dim>
7018 <dim>1</dim>
7019 </port>
7020 </output>
7021 </layer>
7022 <layer id="538" name="bottleneck2_2/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
7023 <data auto_broadcast="numpy"/>
7024 <input>
7025 <port id="0">
7026 <dim>1</dim>
7027 <dim>16</dim>
7028 <dim>80</dim>
7029 <dim>136</dim>
7030 </port>
7031 <port id="1">
7032 <dim>1</dim>
7033 <dim>16</dim>
7034 <dim>1</dim>
7035 <dim>1</dim>
7036 </port>
7037 </input>
7038 <output>
7039 <port id="2" names="bottleneck2_2/inner/dw1/conv" precision="FP16">
7040 <dim>1</dim>
7041 <dim>16</dim>
7042 <dim>80</dim>
7043 <dim>136</dim>
7044 </port>
7045 </output>
7046 </layer>
7047 <layer id="539" name="bottleneck2_2/inner/dw1/fn/weights3118440412885" type="Const" version="opset1">
7048 <data element_type="f32" offset="1576" shape="1" size="4"/>
7049 <output>
7050 <port id="0" precision="FP32">
7051 <dim>1</dim>
7052 </port>
7053 </output>
7054 </layer>
7055 <layer id="540" name="bottleneck2_2/inner/dw1/fn" type="PReLU" version="opset1">
7056 <input>
7057 <port id="0">
7058 <dim>1</dim>
7059 <dim>16</dim>
7060 <dim>80</dim>
7061 <dim>136</dim>
7062 </port>
7063 <port id="1">
7064 <dim>1</dim>
7065 </port>
7066 </input>
7067 <output>
7068 <port id="2" names="bottleneck2_2/inner/dw1/conv" precision="FP16">
7069 <dim>1</dim>
7070 <dim>16</dim>
7071 <dim>80</dim>
7072 <dim>136</dim>
7073 </port>
7074 </output>
7075 </layer>
7076 <layer id="541" name="bottleneck2_2/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
7077 <data auto_broadcast="numpy" levels="256"/>
7078 <input>
7079 <port id="0">
7080 <dim>1</dim>
7081 <dim>16</dim>
7082 <dim>80</dim>
7083 <dim>136</dim>
7084 </port>
7085 <port id="1"/>
7086 <port id="2"/>
7087 <port id="3"/>
7088 <port id="4"/>
7089 </input>
7090 <output>
7091 <port id="5" precision="FP16">
7092 <dim>1</dim>
7093 <dim>16</dim>
7094 <dim>80</dim>
7095 <dim>136</dim>
7096 </port>
7097 </output>
7098 </layer>
7099 <layer id="542" name="bottleneck2_2/dim_inc/bn/mean/Fused_Mul__copy88710144/quantized1206419590" type="Const" version="opset1">
7100 <data element_type="i8" offset="13996" shape="64,16,1,1" size="1024"/>
7101 <output>
7102 <port id="0" precision="I8">
7103 <dim>64</dim>
7104 <dim>16</dim>
7105 <dim>1</dim>
7106 <dim>1</dim>
7107 </port>
7108 </output>
7109 </layer>
7110 <layer id="543" name="bottleneck2_2/dim_inc/bn/mean/Fused_Mul__copy88710144/quantized/to_f16" type="Convert" version="opset1">
7111 <data destination_type="f16"/>
7112 <input>
7113 <port id="0">
7114 <dim>64</dim>
7115 <dim>16</dim>
7116 <dim>1</dim>
7117 <dim>1</dim>
7118 </port>
7119 </input>
7120 <output>
7121 <port id="1" precision="FP16">
7122 <dim>64</dim>
7123 <dim>16</dim>
7124 <dim>1</dim>
7125 <dim>1</dim>
7126 </port>
7127 </output>
7128 </layer>
7129 <layer id="544" name="bottleneck2_2/dim_inc/conv/fq_weights_1/zero_point1207720709" type="Const" version="opset1">
7130 <data element_type="f16" offset="15020" shape="64,1,1,1" size="128"/>
7131 <output>
7132 <port id="0" precision="FP16">
7133 <dim>64</dim>
7134 <dim>1</dim>
7135 <dim>1</dim>
7136 <dim>1</dim>
7137 </port>
7138 </output>
7139 </layer>
7140 <layer id="545" name="bottleneck2_2/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
7141 <data auto_broadcast="numpy"/>
7142 <input>
7143 <port id="0">
7144 <dim>64</dim>
7145 <dim>16</dim>
7146 <dim>1</dim>
7147 <dim>1</dim>
7148 </port>
7149 <port id="1">
7150 <dim>64</dim>
7151 <dim>1</dim>
7152 <dim>1</dim>
7153 <dim>1</dim>
7154 </port>
7155 </input>
7156 <output>
7157 <port id="2" precision="FP16">
7158 <dim>64</dim>
7159 <dim>16</dim>
7160 <dim>1</dim>
7161 <dim>1</dim>
7162 </port>
7163 </output>
7164 </layer>
7165 <layer id="546" name="bottleneck2_2/dim_inc/conv/fq_weights_1/scale1207219722" type="Const" version="opset1">
7166 <data element_type="f16" offset="15148" shape="64,1,1,1" size="128"/>
7167 <output>
7168 <port id="0" precision="FP16">
7169 <dim>64</dim>
7170 <dim>1</dim>
7171 <dim>1</dim>
7172 <dim>1</dim>
7173 </port>
7174 </output>
7175 </layer>
7176 <layer id="547" name="bottleneck2_2/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
7177 <data auto_broadcast="numpy"/>
7178 <input>
7179 <port id="0">
7180 <dim>64</dim>
7181 <dim>16</dim>
7182 <dim>1</dim>
7183 <dim>1</dim>
7184 </port>
7185 <port id="1">
7186 <dim>64</dim>
7187 <dim>1</dim>
7188 <dim>1</dim>
7189 <dim>1</dim>
7190 </port>
7191 </input>
7192 <output>
7193 <port id="2" precision="FP16">
7194 <dim>64</dim>
7195 <dim>16</dim>
7196 <dim>1</dim>
7197 <dim>1</dim>
7198 </port>
7199 </output>
7200 </layer>
7201 <layer id="548" name="bottleneck2_2/dim_inc/conv" type="Convolution" version="opset1">
7202 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
7203 <input>
7204 <port id="0">
7205 <dim>1</dim>
7206 <dim>16</dim>
7207 <dim>80</dim>
7208 <dim>136</dim>
7209 </port>
7210 <port id="1">
7211 <dim>64</dim>
7212 <dim>16</dim>
7213 <dim>1</dim>
7214 <dim>1</dim>
7215 </port>
7216 </input>
7217 <output>
7218 <port id="2" precision="FP16">
7219 <dim>1</dim>
7220 <dim>64</dim>
7221 <dim>80</dim>
7222 <dim>136</dim>
7223 </port>
7224 </output>
7225 </layer>
7226 <layer id="549" name="data_add_238252383088922611" type="Const" version="opset1">
7227 <data element_type="f16" offset="15276" shape="1,64,1,1" size="128"/>
7228 <output>
7229 <port id="0" precision="FP16">
7230 <dim>1</dim>
7231 <dim>64</dim>
7232 <dim>1</dim>
7233 <dim>1</dim>
7234 </port>
7235 </output>
7236 </layer>
7237 <layer id="550" name="bottleneck2_2/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
7238 <data auto_broadcast="numpy"/>
7239 <input>
7240 <port id="0">
7241 <dim>1</dim>
7242 <dim>64</dim>
7243 <dim>80</dim>
7244 <dim>136</dim>
7245 </port>
7246 <port id="1">
7247 <dim>1</dim>
7248 <dim>64</dim>
7249 <dim>1</dim>
7250 <dim>1</dim>
7251 </port>
7252 </input>
7253 <output>
7254 <port id="2" names="bottleneck2_2/dim_inc/conv" precision="FP16">
7255 <dim>1</dim>
7256 <dim>64</dim>
7257 <dim>80</dim>
7258 <dim>136</dim>
7259 </port>
7260 </output>
7261 </layer>
7262 <layer id="551" name="bottleneck2_2/add/fq_input_1" type="FakeQuantize" version="opset1">
7263 <data auto_broadcast="numpy" levels="256"/>
7264 <input>
7265 <port id="0">
7266 <dim>1</dim>
7267 <dim>64</dim>
7268 <dim>80</dim>
7269 <dim>136</dim>
7270 </port>
7271 <port id="1"/>
7272 <port id="2"/>
7273 <port id="3"/>
7274 <port id="4"/>
7275 </input>
7276 <output>
7277 <port id="5" precision="FP16">
7278 <dim>1</dim>
7279 <dim>64</dim>
7280 <dim>80</dim>
7281 <dim>136</dim>
7282 </port>
7283 </output>
7284 </layer>
7285 <layer id="552" name="bottleneck2_2/add" type="Add" version="opset1">
7286 <data auto_broadcast="numpy"/>
7287 <input>
7288 <port id="0">
7289 <dim>1</dim>
7290 <dim>64</dim>
7291 <dim>80</dim>
7292 <dim>136</dim>
7293 </port>
7294 <port id="1">
7295 <dim>1</dim>
7296 <dim>64</dim>
7297 <dim>80</dim>
7298 <dim>136</dim>
7299 </port>
7300 </input>
7301 <output>
7302 <port id="2" names="bottleneck2_2/add" precision="FP16">
7303 <dim>1</dim>
7304 <dim>64</dim>
7305 <dim>80</dim>
7306 <dim>136</dim>
7307 </port>
7308 </output>
7309 </layer>
7310 <layer id="553" name="bottleneck2_2/fn/weights3106040232892" type="Const" version="opset1">
7311 <data element_type="f32" offset="1576" shape="1" size="4"/>
7312 <output>
7313 <port id="0" precision="FP32">
7314 <dim>1</dim>
7315 </port>
7316 </output>
7317 </layer>
7318 <layer id="554" name="bottleneck2_2/fn" type="PReLU" version="opset1">
7319 <input>
7320 <port id="0">
7321 <dim>1</dim>
7322 <dim>64</dim>
7323 <dim>80</dim>
7324 <dim>136</dim>
7325 </port>
7326 <port id="1">
7327 <dim>1</dim>
7328 </port>
7329 </input>
7330 <output>
7331 <port id="2" names="bottleneck2_2/add" precision="FP16">
7332 <dim>1</dim>
7333 <dim>64</dim>
7334 <dim>80</dim>
7335 <dim>136</dim>
7336 </port>
7337 </output>
7338 </layer>
7339 <layer id="555" name="bottleneck2_3/add/fq_input_0" type="FakeQuantize" version="opset1">
7340 <data auto_broadcast="numpy" levels="256"/>
7341 <input>
7342 <port id="0">
7343 <dim>1</dim>
7344 <dim>64</dim>
7345 <dim>80</dim>
7346 <dim>136</dim>
7347 </port>
7348 <port id="1"/>
7349 <port id="2"/>
7350 <port id="3"/>
7351 <port id="4"/>
7352 </input>
7353 <output>
7354 <port id="5" precision="FP16">
7355 <dim>1</dim>
7356 <dim>64</dim>
7357 <dim>80</dim>
7358 <dim>136</dim>
7359 </port>
7360 </output>
7361 </layer>
7362 <layer id="556" name="5414541820298" type="Const" version="opset1">
7363 <data element_type="f16" offset="15404" shape="" size="2"/>
7364 <output>
7365 <port id="0" precision="FP16"/>
7366 </output>
7367 </layer>
7368 <layer id="557" name="5415541922815" type="Const" version="opset1">
7369 <data element_type="f16" offset="15406" shape="" size="2"/>
7370 <output>
7371 <port id="0" precision="FP16"/>
7372 </output>
7373 </layer>
7374 <layer id="558" name="5416542021462" type="Const" version="opset1">
7375 <data element_type="f16" offset="15404" shape="" size="2"/>
7376 <output>
7377 <port id="0" precision="FP16"/>
7378 </output>
7379 </layer>
7380 <layer id="559" name="5417542122590" type="Const" version="opset1">
7381 <data element_type="f16" offset="15406" shape="" size="2"/>
7382 <output>
7383 <port id="0" precision="FP16"/>
7384 </output>
7385 </layer>
7386 <layer id="560" name="4564456822848" type="Const" version="opset1">
7387 <data element_type="f16" offset="15408" shape="" size="2"/>
7388 <output>
7389 <port id="0" precision="FP16"/>
7390 </output>
7391 </layer>
7392 <layer id="561" name="4565456920025" type="Const" version="opset1">
7393 <data element_type="f16" offset="15410" shape="" size="2"/>
7394 <output>
7395 <port id="0" precision="FP16"/>
7396 </output>
7397 </layer>
7398 <layer id="562" name="4566457020655" type="Const" version="opset1">
7399 <data element_type="f16" offset="15408" shape="" size="2"/>
7400 <output>
7401 <port id="0" precision="FP16"/>
7402 </output>
7403 </layer>
7404 <layer id="563" name="4567457121222" type="Const" version="opset1">
7405 <data element_type="f16" offset="15410" shape="" size="2"/>
7406 <output>
7407 <port id="0" precision="FP16"/>
7408 </output>
7409 </layer>
7410 <layer id="564" name="4104410820580" type="Const" version="opset1">
7411 <data element_type="f16" offset="15412" shape="1,16,1,1" size="32"/>
7412 <output>
7413 <port id="0" precision="FP16">
7414 <dim>1</dim>
7415 <dim>16</dim>
7416 <dim>1</dim>
7417 <dim>1</dim>
7418 </port>
7419 </output>
7420 </layer>
7421 <layer id="565" name="4105410921249" type="Const" version="opset1">
7422 <data element_type="f16" offset="15444" shape="1,16,1,1" size="32"/>
7423 <output>
7424 <port id="0" precision="FP16">
7425 <dim>1</dim>
7426 <dim>16</dim>
7427 <dim>1</dim>
7428 <dim>1</dim>
7429 </port>
7430 </output>
7431 </layer>
7432 <layer id="566" name="4106411021675" type="Const" version="opset1">
7433 <data element_type="f16" offset="15412" shape="1,16,1,1" size="32"/>
7434 <output>
7435 <port id="0" precision="FP16">
7436 <dim>1</dim>
7437 <dim>16</dim>
7438 <dim>1</dim>
7439 <dim>1</dim>
7440 </port>
7441 </output>
7442 </layer>
7443 <layer id="567" name="4107411121321" type="Const" version="opset1">
7444 <data element_type="f16" offset="15444" shape="1,16,1,1" size="32"/>
7445 <output>
7446 <port id="0" precision="FP16">
7447 <dim>1</dim>
7448 <dim>16</dim>
7449 <dim>1</dim>
7450 <dim>1</dim>
7451 </port>
7452 </output>
7453 </layer>
7454 <layer id="568" name="bottleneck2_3/dim_red/bn/mean/Fused_Mul__copy89410147/quantized1177619557" type="Const" version="opset1">
7455 <data element_type="i8" offset="15476" shape="16,64,1,1" size="1024"/>
7456 <output>
7457 <port id="0" precision="I8">
7458 <dim>16</dim>
7459 <dim>64</dim>
7460 <dim>1</dim>
7461 <dim>1</dim>
7462 </port>
7463 </output>
7464 </layer>
7465 <layer id="569" name="bottleneck2_3/dim_red/bn/mean/Fused_Mul__copy89410147/quantized/to_f16" type="Convert" version="opset1">
7466 <data destination_type="f16"/>
7467 <input>
7468 <port id="0">
7469 <dim>16</dim>
7470 <dim>64</dim>
7471 <dim>1</dim>
7472 <dim>1</dim>
7473 </port>
7474 </input>
7475 <output>
7476 <port id="1" precision="FP16">
7477 <dim>16</dim>
7478 <dim>64</dim>
7479 <dim>1</dim>
7480 <dim>1</dim>
7481 </port>
7482 </output>
7483 </layer>
7484 <layer id="570" name="bottleneck2_3/dim_red/conv/fq_weights_1/zero_point1178920415" type="Const" version="opset1">
7485 <data element_type="f16" offset="16500" shape="16,1,1,1" size="32"/>
7486 <output>
7487 <port id="0" precision="FP16">
7488 <dim>16</dim>
7489 <dim>1</dim>
7490 <dim>1</dim>
7491 <dim>1</dim>
7492 </port>
7493 </output>
7494 </layer>
7495 <layer id="571" name="bottleneck2_3/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
7496 <data auto_broadcast="numpy"/>
7497 <input>
7498 <port id="0">
7499 <dim>16</dim>
7500 <dim>64</dim>
7501 <dim>1</dim>
7502 <dim>1</dim>
7503 </port>
7504 <port id="1">
7505 <dim>16</dim>
7506 <dim>1</dim>
7507 <dim>1</dim>
7508 <dim>1</dim>
7509 </port>
7510 </input>
7511 <output>
7512 <port id="2" precision="FP16">
7513 <dim>16</dim>
7514 <dim>64</dim>
7515 <dim>1</dim>
7516 <dim>1</dim>
7517 </port>
7518 </output>
7519 </layer>
7520 <layer id="572" name="bottleneck2_3/dim_red/conv/fq_weights_1/scale1178419392" type="Const" version="opset1">
7521 <data element_type="f16" offset="16532" shape="16,1,1,1" size="32"/>
7522 <output>
7523 <port id="0" precision="FP16">
7524 <dim>16</dim>
7525 <dim>1</dim>
7526 <dim>1</dim>
7527 <dim>1</dim>
7528 </port>
7529 </output>
7530 </layer>
7531 <layer id="573" name="bottleneck2_3/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
7532 <data auto_broadcast="numpy"/>
7533 <input>
7534 <port id="0">
7535 <dim>16</dim>
7536 <dim>64</dim>
7537 <dim>1</dim>
7538 <dim>1</dim>
7539 </port>
7540 <port id="1">
7541 <dim>16</dim>
7542 <dim>1</dim>
7543 <dim>1</dim>
7544 <dim>1</dim>
7545 </port>
7546 </input>
7547 <output>
7548 <port id="2" precision="FP16">
7549 <dim>16</dim>
7550 <dim>64</dim>
7551 <dim>1</dim>
7552 <dim>1</dim>
7553 </port>
7554 </output>
7555 </layer>
7556 <layer id="574" name="bottleneck2_3/dim_red/conv" type="Convolution" version="opset1">
7557 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
7558 <input>
7559 <port id="0">
7560 <dim>1</dim>
7561 <dim>64</dim>
7562 <dim>80</dim>
7563 <dim>136</dim>
7564 </port>
7565 <port id="1">
7566 <dim>16</dim>
7567 <dim>64</dim>
7568 <dim>1</dim>
7569 <dim>1</dim>
7570 </port>
7571 </input>
7572 <output>
7573 <port id="2" precision="FP16">
7574 <dim>1</dim>
7575 <dim>16</dim>
7576 <dim>80</dim>
7577 <dim>136</dim>
7578 </port>
7579 </output>
7580 </layer>
7581 <layer id="575" name="data_add_238332383889619500" type="Const" version="opset1">
7582 <data element_type="f16" offset="16564" shape="1,16,1,1" size="32"/>
7583 <output>
7584 <port id="0" precision="FP16">
7585 <dim>1</dim>
7586 <dim>16</dim>
7587 <dim>1</dim>
7588 <dim>1</dim>
7589 </port>
7590 </output>
7591 </layer>
7592 <layer id="576" name="bottleneck2_3/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
7593 <data auto_broadcast="numpy"/>
7594 <input>
7595 <port id="0">
7596 <dim>1</dim>
7597 <dim>16</dim>
7598 <dim>80</dim>
7599 <dim>136</dim>
7600 </port>
7601 <port id="1">
7602 <dim>1</dim>
7603 <dim>16</dim>
7604 <dim>1</dim>
7605 <dim>1</dim>
7606 </port>
7607 </input>
7608 <output>
7609 <port id="2" names="bottleneck2_3/dim_red/conv" precision="FP16">
7610 <dim>1</dim>
7611 <dim>16</dim>
7612 <dim>80</dim>
7613 <dim>136</dim>
7614 </port>
7615 </output>
7616 </layer>
7617 <layer id="577" name="bottleneck2_3/dim_red/fn/weights3109640157898" type="Const" version="opset1">
7618 <data element_type="f32" offset="1576" shape="1" size="4"/>
7619 <output>
7620 <port id="0" precision="FP32">
7621 <dim>1</dim>
7622 </port>
7623 </output>
7624 </layer>
7625 <layer id="578" name="bottleneck2_3/dim_red/fn" type="PReLU" version="opset1">
7626 <input>
7627 <port id="0">
7628 <dim>1</dim>
7629 <dim>16</dim>
7630 <dim>80</dim>
7631 <dim>136</dim>
7632 </port>
7633 <port id="1">
7634 <dim>1</dim>
7635 </port>
7636 </input>
7637 <output>
7638 <port id="2" names="bottleneck2_3/dim_red/conv" precision="FP16">
7639 <dim>1</dim>
7640 <dim>16</dim>
7641 <dim>80</dim>
7642 <dim>136</dim>
7643 </port>
7644 </output>
7645 </layer>
7646 <layer id="579" name="bottleneck2_3/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
7647 <data auto_broadcast="numpy" levels="256"/>
7648 <input>
7649 <port id="0">
7650 <dim>1</dim>
7651 <dim>16</dim>
7652 <dim>80</dim>
7653 <dim>136</dim>
7654 </port>
7655 <port id="1">
7656 <dim>1</dim>
7657 <dim>16</dim>
7658 <dim>1</dim>
7659 <dim>1</dim>
7660 </port>
7661 <port id="2">
7662 <dim>1</dim>
7663 <dim>16</dim>
7664 <dim>1</dim>
7665 <dim>1</dim>
7666 </port>
7667 <port id="3">
7668 <dim>1</dim>
7669 <dim>16</dim>
7670 <dim>1</dim>
7671 <dim>1</dim>
7672 </port>
7673 <port id="4">
7674 <dim>1</dim>
7675 <dim>16</dim>
7676 <dim>1</dim>
7677 <dim>1</dim>
7678 </port>
7679 </input>
7680 <output>
7681 <port id="5" precision="FP16">
7682 <dim>1</dim>
7683 <dim>16</dim>
7684 <dim>80</dim>
7685 <dim>136</dim>
7686 </port>
7687 </output>
7688 </layer>
7689 <layer id="580" name="16871/value1687322308" type="Const" version="opset1">
7690 <data element_type="i64" offset="8036" shape="5" size="40"/>
7691 <output>
7692 <port id="0" precision="I64">
7693 <dim>5</dim>
7694 </port>
7695 </output>
7696 </layer>
7697 <layer id="581" name="bottleneck2_3/inner/dw1/bn/mean/Fused_Mul__copy90010150/quantized1208821009" type="Const" version="opset1">
7698 <data element_type="i8" offset="16596" shape="16,1,3,3" size="144"/>
7699 <output>
7700 <port id="0" precision="I8">
7701 <dim>16</dim>
7702 <dim>1</dim>
7703 <dim>3</dim>
7704 <dim>3</dim>
7705 </port>
7706 </output>
7707 </layer>
7708 <layer id="582" name="bottleneck2_3/inner/dw1/bn/mean/Fused_Mul__copy90010150/quantized/to_f16" type="Convert" version="opset1">
7709 <data destination_type="f16"/>
7710 <input>
7711 <port id="0">
7712 <dim>16</dim>
7713 <dim>1</dim>
7714 <dim>3</dim>
7715 <dim>3</dim>
7716 </port>
7717 </input>
7718 <output>
7719 <port id="1" precision="FP16">
7720 <dim>16</dim>
7721 <dim>1</dim>
7722 <dim>3</dim>
7723 <dim>3</dim>
7724 </port>
7725 </output>
7726 </layer>
7727 <layer id="583" name="bottleneck2_3/inner/dw1/conv/fq_weights_1/zero_point1210121105" type="Const" version="opset1">
7728 <data element_type="f16" offset="16740" shape="16,1,1,1" size="32"/>
7729 <output>
7730 <port id="0" precision="FP16">
7731 <dim>16</dim>
7732 <dim>1</dim>
7733 <dim>1</dim>
7734 <dim>1</dim>
7735 </port>
7736 </output>
7737 </layer>
7738 <layer id="584" name="bottleneck2_3/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
7739 <data auto_broadcast="numpy"/>
7740 <input>
7741 <port id="0">
7742 <dim>16</dim>
7743 <dim>1</dim>
7744 <dim>3</dim>
7745 <dim>3</dim>
7746 </port>
7747 <port id="1">
7748 <dim>16</dim>
7749 <dim>1</dim>
7750 <dim>1</dim>
7751 <dim>1</dim>
7752 </port>
7753 </input>
7754 <output>
7755 <port id="2" precision="FP16">
7756 <dim>16</dim>
7757 <dim>1</dim>
7758 <dim>3</dim>
7759 <dim>3</dim>
7760 </port>
7761 </output>
7762 </layer>
7763 <layer id="585" name="bottleneck2_3/inner/dw1/conv/fq_weights_1/scale1209619950" type="Const" version="opset1">
7764 <data element_type="f16" offset="16772" shape="16,1,1,1" size="32"/>
7765 <output>
7766 <port id="0" precision="FP16">
7767 <dim>16</dim>
7768 <dim>1</dim>
7769 <dim>1</dim>
7770 <dim>1</dim>
7771 </port>
7772 </output>
7773 </layer>
7774 <layer id="586" name="bottleneck2_3/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
7775 <data auto_broadcast="numpy"/>
7776 <input>
7777 <port id="0">
7778 <dim>16</dim>
7779 <dim>1</dim>
7780 <dim>3</dim>
7781 <dim>3</dim>
7782 </port>
7783 <port id="1">
7784 <dim>16</dim>
7785 <dim>1</dim>
7786 <dim>1</dim>
7787 <dim>1</dim>
7788 </port>
7789 </input>
7790 <output>
7791 <port id="2" precision="FP16">
7792 <dim>16</dim>
7793 <dim>1</dim>
7794 <dim>3</dim>
7795 <dim>3</dim>
7796 </port>
7797 </output>
7798 </layer>
7799 <layer id="587" name="16871" type="Reshape" version="opset1">
7800 <data special_zero="true"/>
7801 <input>
7802 <port id="0">
7803 <dim>16</dim>
7804 <dim>1</dim>
7805 <dim>3</dim>
7806 <dim>3</dim>
7807 </port>
7808 <port id="1">
7809 <dim>5</dim>
7810 </port>
7811 </input>
7812 <output>
7813 <port id="2" precision="FP16">
7814 <dim>16</dim>
7815 <dim>1</dim>
7816 <dim>1</dim>
7817 <dim>3</dim>
7818 <dim>3</dim>
7819 </port>
7820 </output>
7821 </layer>
7822 <layer id="588" name="bottleneck2_3/inner/dw1/conv" type="GroupConvolution" version="opset1">
7823 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
7824 <input>
7825 <port id="0">
7826 <dim>1</dim>
7827 <dim>16</dim>
7828 <dim>80</dim>
7829 <dim>136</dim>
7830 </port>
7831 <port id="1">
7832 <dim>16</dim>
7833 <dim>1</dim>
7834 <dim>1</dim>
7835 <dim>3</dim>
7836 <dim>3</dim>
7837 </port>
7838 </input>
7839 <output>
7840 <port id="2" precision="FP16">
7841 <dim>1</dim>
7842 <dim>16</dim>
7843 <dim>80</dim>
7844 <dim>136</dim>
7845 </port>
7846 </output>
7847 </layer>
7848 <layer id="589" name="data_add_238412384690221054" type="Const" version="opset1">
7849 <data element_type="f16" offset="16804" shape="1,16,1,1" size="32"/>
7850 <output>
7851 <port id="0" precision="FP16">
7852 <dim>1</dim>
7853 <dim>16</dim>
7854 <dim>1</dim>
7855 <dim>1</dim>
7856 </port>
7857 </output>
7858 </layer>
7859 <layer id="590" name="bottleneck2_3/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
7860 <data auto_broadcast="numpy"/>
7861 <input>
7862 <port id="0">
7863 <dim>1</dim>
7864 <dim>16</dim>
7865 <dim>80</dim>
7866 <dim>136</dim>
7867 </port>
7868 <port id="1">
7869 <dim>1</dim>
7870 <dim>16</dim>
7871 <dim>1</dim>
7872 <dim>1</dim>
7873 </port>
7874 </input>
7875 <output>
7876 <port id="2" names="bottleneck2_3/inner/dw1/conv" precision="FP16">
7877 <dim>1</dim>
7878 <dim>16</dim>
7879 <dim>80</dim>
7880 <dim>136</dim>
7881 </port>
7882 </output>
7883 </layer>
7884 <layer id="591" name="bottleneck2_3/inner/dw1/fn/weights3097239851904" type="Const" version="opset1">
7885 <data element_type="f32" offset="1576" shape="1" size="4"/>
7886 <output>
7887 <port id="0" precision="FP32">
7888 <dim>1</dim>
7889 </port>
7890 </output>
7891 </layer>
7892 <layer id="592" name="bottleneck2_3/inner/dw1/fn" type="PReLU" version="opset1">
7893 <input>
7894 <port id="0">
7895 <dim>1</dim>
7896 <dim>16</dim>
7897 <dim>80</dim>
7898 <dim>136</dim>
7899 </port>
7900 <port id="1">
7901 <dim>1</dim>
7902 </port>
7903 </input>
7904 <output>
7905 <port id="2" names="bottleneck2_3/inner/dw1/conv" precision="FP16">
7906 <dim>1</dim>
7907 <dim>16</dim>
7908 <dim>80</dim>
7909 <dim>136</dim>
7910 </port>
7911 </output>
7912 </layer>
7913 <layer id="593" name="bottleneck2_3/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
7914 <data auto_broadcast="numpy" levels="256"/>
7915 <input>
7916 <port id="0">
7917 <dim>1</dim>
7918 <dim>16</dim>
7919 <dim>80</dim>
7920 <dim>136</dim>
7921 </port>
7922 <port id="1"/>
7923 <port id="2"/>
7924 <port id="3"/>
7925 <port id="4"/>
7926 </input>
7927 <output>
7928 <port id="5" precision="FP16">
7929 <dim>1</dim>
7930 <dim>16</dim>
7931 <dim>80</dim>
7932 <dim>136</dim>
7933 </port>
7934 </output>
7935 </layer>
7936 <layer id="594" name="bottleneck2_3/dim_inc/bn/mean/Fused_Mul__copy90610153/quantized1408019776" type="Const" version="opset1">
7937 <data element_type="i8" offset="16836" shape="64,16,1,1" size="1024"/>
7938 <output>
7939 <port id="0" precision="I8">
7940 <dim>64</dim>
7941 <dim>16</dim>
7942 <dim>1</dim>
7943 <dim>1</dim>
7944 </port>
7945 </output>
7946 </layer>
7947 <layer id="595" name="bottleneck2_3/dim_inc/bn/mean/Fused_Mul__copy90610153/quantized/to_f16" type="Convert" version="opset1">
7948 <data destination_type="f16"/>
7949 <input>
7950 <port id="0">
7951 <dim>64</dim>
7952 <dim>16</dim>
7953 <dim>1</dim>
7954 <dim>1</dim>
7955 </port>
7956 </input>
7957 <output>
7958 <port id="1" precision="FP16">
7959 <dim>64</dim>
7960 <dim>16</dim>
7961 <dim>1</dim>
7962 <dim>1</dim>
7963 </port>
7964 </output>
7965 </layer>
7966 <layer id="596" name="bottleneck2_3/dim_inc/conv/fq_weights_1/zero_point1409320313" type="Const" version="opset1">
7967 <data element_type="f16" offset="17860" shape="64,1,1,1" size="128"/>
7968 <output>
7969 <port id="0" precision="FP16">
7970 <dim>64</dim>
7971 <dim>1</dim>
7972 <dim>1</dim>
7973 <dim>1</dim>
7974 </port>
7975 </output>
7976 </layer>
7977 <layer id="597" name="bottleneck2_3/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
7978 <data auto_broadcast="numpy"/>
7979 <input>
7980 <port id="0">
7981 <dim>64</dim>
7982 <dim>16</dim>
7983 <dim>1</dim>
7984 <dim>1</dim>
7985 </port>
7986 <port id="1">
7987 <dim>64</dim>
7988 <dim>1</dim>
7989 <dim>1</dim>
7990 <dim>1</dim>
7991 </port>
7992 </input>
7993 <output>
7994 <port id="2" precision="FP16">
7995 <dim>64</dim>
7996 <dim>16</dim>
7997 <dim>1</dim>
7998 <dim>1</dim>
7999 </port>
8000 </output>
8001 </layer>
8002 <layer id="598" name="bottleneck2_3/dim_inc/conv/fq_weights_1/scale1408822134" type="Const" version="opset1">
8003 <data element_type="f16" offset="17988" shape="64,1,1,1" size="128"/>
8004 <output>
8005 <port id="0" precision="FP16">
8006 <dim>64</dim>
8007 <dim>1</dim>
8008 <dim>1</dim>
8009 <dim>1</dim>
8010 </port>
8011 </output>
8012 </layer>
8013 <layer id="599" name="bottleneck2_3/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
8014 <data auto_broadcast="numpy"/>
8015 <input>
8016 <port id="0">
8017 <dim>64</dim>
8018 <dim>16</dim>
8019 <dim>1</dim>
8020 <dim>1</dim>
8021 </port>
8022 <port id="1">
8023 <dim>64</dim>
8024 <dim>1</dim>
8025 <dim>1</dim>
8026 <dim>1</dim>
8027 </port>
8028 </input>
8029 <output>
8030 <port id="2" precision="FP16">
8031 <dim>64</dim>
8032 <dim>16</dim>
8033 <dim>1</dim>
8034 <dim>1</dim>
8035 </port>
8036 </output>
8037 </layer>
8038 <layer id="600" name="bottleneck2_3/dim_inc/conv" type="Convolution" version="opset1">
8039 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
8040 <input>
8041 <port id="0">
8042 <dim>1</dim>
8043 <dim>16</dim>
8044 <dim>80</dim>
8045 <dim>136</dim>
8046 </port>
8047 <port id="1">
8048 <dim>64</dim>
8049 <dim>16</dim>
8050 <dim>1</dim>
8051 <dim>1</dim>
8052 </port>
8053 </input>
8054 <output>
8055 <port id="2" precision="FP16">
8056 <dim>1</dim>
8057 <dim>64</dim>
8058 <dim>80</dim>
8059 <dim>136</dim>
8060 </port>
8061 </output>
8062 </layer>
8063 <layer id="601" name="data_add_238492385490819800" type="Const" version="opset1">
8064 <data element_type="f16" offset="18116" shape="1,64,1,1" size="128"/>
8065 <output>
8066 <port id="0" precision="FP16">
8067 <dim>1</dim>
8068 <dim>64</dim>
8069 <dim>1</dim>
8070 <dim>1</dim>
8071 </port>
8072 </output>
8073 </layer>
8074 <layer id="602" name="bottleneck2_3/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
8075 <data auto_broadcast="numpy"/>
8076 <input>
8077 <port id="0">
8078 <dim>1</dim>
8079 <dim>64</dim>
8080 <dim>80</dim>
8081 <dim>136</dim>
8082 </port>
8083 <port id="1">
8084 <dim>1</dim>
8085 <dim>64</dim>
8086 <dim>1</dim>
8087 <dim>1</dim>
8088 </port>
8089 </input>
8090 <output>
8091 <port id="2" names="bottleneck2_3/dim_inc/conv" precision="FP16">
8092 <dim>1</dim>
8093 <dim>64</dim>
8094 <dim>80</dim>
8095 <dim>136</dim>
8096 </port>
8097 </output>
8098 </layer>
8099 <layer id="603" name="bottleneck2_3/add/fq_input_1" type="FakeQuantize" version="opset1">
8100 <data auto_broadcast="numpy" levels="256"/>
8101 <input>
8102 <port id="0">
8103 <dim>1</dim>
8104 <dim>64</dim>
8105 <dim>80</dim>
8106 <dim>136</dim>
8107 </port>
8108 <port id="1"/>
8109 <port id="2"/>
8110 <port id="3"/>
8111 <port id="4"/>
8112 </input>
8113 <output>
8114 <port id="5" precision="FP16">
8115 <dim>1</dim>
8116 <dim>64</dim>
8117 <dim>80</dim>
8118 <dim>136</dim>
8119 </port>
8120 </output>
8121 </layer>
8122 <layer id="604" name="bottleneck2_3/add" type="Add" version="opset1">
8123 <data auto_broadcast="numpy"/>
8124 <input>
8125 <port id="0">
8126 <dim>1</dim>
8127 <dim>64</dim>
8128 <dim>80</dim>
8129 <dim>136</dim>
8130 </port>
8131 <port id="1">
8132 <dim>1</dim>
8133 <dim>64</dim>
8134 <dim>80</dim>
8135 <dim>136</dim>
8136 </port>
8137 </input>
8138 <output>
8139 <port id="2" names="bottleneck2_3/add" precision="FP16">
8140 <dim>1</dim>
8141 <dim>64</dim>
8142 <dim>80</dim>
8143 <dim>136</dim>
8144 </port>
8145 </output>
8146 </layer>
8147 <layer id="605" name="bottleneck2_3/fn/weights3089239845911" type="Const" version="opset1">
8148 <data element_type="f32" offset="1576" shape="1" size="4"/>
8149 <output>
8150 <port id="0" precision="FP32">
8151 <dim>1</dim>
8152 </port>
8153 </output>
8154 </layer>
8155 <layer id="606" name="bottleneck2_3/fn" type="PReLU" version="opset1">
8156 <input>
8157 <port id="0">
8158 <dim>1</dim>
8159 <dim>64</dim>
8160 <dim>80</dim>
8161 <dim>136</dim>
8162 </port>
8163 <port id="1">
8164 <dim>1</dim>
8165 </port>
8166 </input>
8167 <output>
8168 <port id="2" names="bottleneck2_3/add" precision="FP16">
8169 <dim>1</dim>
8170 <dim>64</dim>
8171 <dim>80</dim>
8172 <dim>136</dim>
8173 </port>
8174 </output>
8175 </layer>
8176 <layer id="607" name="bottleneck2_4/add/fq_input_0" type="FakeQuantize" version="opset1">
8177 <data auto_broadcast="numpy" levels="256"/>
8178 <input>
8179 <port id="0">
8180 <dim>1</dim>
8181 <dim>64</dim>
8182 <dim>80</dim>
8183 <dim>136</dim>
8184 </port>
8185 <port id="1"/>
8186 <port id="2"/>
8187 <port id="3"/>
8188 <port id="4"/>
8189 </input>
8190 <output>
8191 <port id="5" precision="FP16">
8192 <dim>1</dim>
8193 <dim>64</dim>
8194 <dim>80</dim>
8195 <dim>136</dim>
8196 </port>
8197 </output>
8198 </layer>
8199 <layer id="608" name="5034503822635" type="Const" version="opset1">
8200 <data element_type="f16" offset="18244" shape="" size="2"/>
8201 <output>
8202 <port id="0" precision="FP16"/>
8203 </output>
8204 </layer>
8205 <layer id="609" name="5035503922557" type="Const" version="opset1">
8206 <data element_type="f16" offset="18246" shape="" size="2"/>
8207 <output>
8208 <port id="0" precision="FP16"/>
8209 </output>
8210 </layer>
8211 <layer id="610" name="5036504020106" type="Const" version="opset1">
8212 <data element_type="f16" offset="18244" shape="" size="2"/>
8213 <output>
8214 <port id="0" precision="FP16"/>
8215 </output>
8216 </layer>
8217 <layer id="611" name="5037504119368" type="Const" version="opset1">
8218 <data element_type="f16" offset="18246" shape="" size="2"/>
8219 <output>
8220 <port id="0" precision="FP16"/>
8221 </output>
8222 </layer>
8223 <layer id="612" name="3224322820490" type="Const" version="opset1">
8224 <data element_type="f16" offset="18248" shape="" size="2"/>
8225 <output>
8226 <port id="0" precision="FP16"/>
8227 </output>
8228 </layer>
8229 <layer id="613" name="3225322921099" type="Const" version="opset1">
8230 <data element_type="f16" offset="18250" shape="" size="2"/>
8231 <output>
8232 <port id="0" precision="FP16"/>
8233 </output>
8234 </layer>
8235 <layer id="614" name="3226323021711" type="Const" version="opset1">
8236 <data element_type="f16" offset="18248" shape="" size="2"/>
8237 <output>
8238 <port id="0" precision="FP16"/>
8239 </output>
8240 </layer>
8241 <layer id="615" name="3227323121966" type="Const" version="opset1">
8242 <data element_type="f16" offset="18250" shape="" size="2"/>
8243 <output>
8244 <port id="0" precision="FP16"/>
8245 </output>
8246 </layer>
8247 <layer id="616" name="2644264820301" type="Const" version="opset1">
8248 <data element_type="f16" offset="18252" shape="1,16,1,1" size="32"/>
8249 <output>
8250 <port id="0" precision="FP16">
8251 <dim>1</dim>
8252 <dim>16</dim>
8253 <dim>1</dim>
8254 <dim>1</dim>
8255 </port>
8256 </output>
8257 </layer>
8258 <layer id="617" name="2645264919755" type="Const" version="opset1">
8259 <data element_type="f16" offset="18284" shape="1,16,1,1" size="32"/>
8260 <output>
8261 <port id="0" precision="FP16">
8262 <dim>1</dim>
8263 <dim>16</dim>
8264 <dim>1</dim>
8265 <dim>1</dim>
8266 </port>
8267 </output>
8268 </layer>
8269 <layer id="618" name="2646265021318" type="Const" version="opset1">
8270 <data element_type="f16" offset="18252" shape="1,16,1,1" size="32"/>
8271 <output>
8272 <port id="0" precision="FP16">
8273 <dim>1</dim>
8274 <dim>16</dim>
8275 <dim>1</dim>
8276 <dim>1</dim>
8277 </port>
8278 </output>
8279 </layer>
8280 <layer id="619" name="2647265121384" type="Const" version="opset1">
8281 <data element_type="f16" offset="18284" shape="1,16,1,1" size="32"/>
8282 <output>
8283 <port id="0" precision="FP16">
8284 <dim>1</dim>
8285 <dim>16</dim>
8286 <dim>1</dim>
8287 <dim>1</dim>
8288 </port>
8289 </output>
8290 </layer>
8291 <layer id="620" name="bottleneck2_4/dim_red/bn/mean/Fused_Mul__copy91310156/quantized1398421159" type="Const" version="opset1">
8292 <data element_type="i8" offset="18316" shape="16,64,1,1" size="1024"/>
8293 <output>
8294 <port id="0" precision="I8">
8295 <dim>16</dim>
8296 <dim>64</dim>
8297 <dim>1</dim>
8298 <dim>1</dim>
8299 </port>
8300 </output>
8301 </layer>
8302 <layer id="621" name="bottleneck2_4/dim_red/bn/mean/Fused_Mul__copy91310156/quantized/to_f16" type="Convert" version="opset1">
8303 <data destination_type="f16"/>
8304 <input>
8305 <port id="0">
8306 <dim>16</dim>
8307 <dim>64</dim>
8308 <dim>1</dim>
8309 <dim>1</dim>
8310 </port>
8311 </input>
8312 <output>
8313 <port id="1" precision="FP16">
8314 <dim>16</dim>
8315 <dim>64</dim>
8316 <dim>1</dim>
8317 <dim>1</dim>
8318 </port>
8319 </output>
8320 </layer>
8321 <layer id="622" name="bottleneck2_4/dim_red/conv/fq_weights_1/zero_point1399721360" type="Const" version="opset1">
8322 <data element_type="f16" offset="19340" shape="16,1,1,1" size="32"/>
8323 <output>
8324 <port id="0" precision="FP16">
8325 <dim>16</dim>
8326 <dim>1</dim>
8327 <dim>1</dim>
8328 <dim>1</dim>
8329 </port>
8330 </output>
8331 </layer>
8332 <layer id="623" name="bottleneck2_4/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
8333 <data auto_broadcast="numpy"/>
8334 <input>
8335 <port id="0">
8336 <dim>16</dim>
8337 <dim>64</dim>
8338 <dim>1</dim>
8339 <dim>1</dim>
8340 </port>
8341 <port id="1">
8342 <dim>16</dim>
8343 <dim>1</dim>
8344 <dim>1</dim>
8345 <dim>1</dim>
8346 </port>
8347 </input>
8348 <output>
8349 <port id="2" precision="FP16">
8350 <dim>16</dim>
8351 <dim>64</dim>
8352 <dim>1</dim>
8353 <dim>1</dim>
8354 </port>
8355 </output>
8356 </layer>
8357 <layer id="624" name="bottleneck2_4/dim_red/conv/fq_weights_1/scale1399220787" type="Const" version="opset1">
8358 <data element_type="f16" offset="19372" shape="16,1,1,1" size="32"/>
8359 <output>
8360 <port id="0" precision="FP16">
8361 <dim>16</dim>
8362 <dim>1</dim>
8363 <dim>1</dim>
8364 <dim>1</dim>
8365 </port>
8366 </output>
8367 </layer>
8368 <layer id="625" name="bottleneck2_4/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
8369 <data auto_broadcast="numpy"/>
8370 <input>
8371 <port id="0">
8372 <dim>16</dim>
8373 <dim>64</dim>
8374 <dim>1</dim>
8375 <dim>1</dim>
8376 </port>
8377 <port id="1">
8378 <dim>16</dim>
8379 <dim>1</dim>
8380 <dim>1</dim>
8381 <dim>1</dim>
8382 </port>
8383 </input>
8384 <output>
8385 <port id="2" precision="FP16">
8386 <dim>16</dim>
8387 <dim>64</dim>
8388 <dim>1</dim>
8389 <dim>1</dim>
8390 </port>
8391 </output>
8392 </layer>
8393 <layer id="626" name="bottleneck2_4/dim_red/conv" type="Convolution" version="opset1">
8394 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
8395 <input>
8396 <port id="0">
8397 <dim>1</dim>
8398 <dim>64</dim>
8399 <dim>80</dim>
8400 <dim>136</dim>
8401 </port>
8402 <port id="1">
8403 <dim>16</dim>
8404 <dim>64</dim>
8405 <dim>1</dim>
8406 <dim>1</dim>
8407 </port>
8408 </input>
8409 <output>
8410 <port id="2" precision="FP16">
8411 <dim>1</dim>
8412 <dim>16</dim>
8413 <dim>80</dim>
8414 <dim>136</dim>
8415 </port>
8416 </output>
8417 </layer>
8418 <layer id="627" name="data_add_238572386291522089" type="Const" version="opset1">
8419 <data element_type="f16" offset="19404" shape="1,16,1,1" size="32"/>
8420 <output>
8421 <port id="0" precision="FP16">
8422 <dim>1</dim>
8423 <dim>16</dim>
8424 <dim>1</dim>
8425 <dim>1</dim>
8426 </port>
8427 </output>
8428 </layer>
8429 <layer id="628" name="bottleneck2_4/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
8430 <data auto_broadcast="numpy"/>
8431 <input>
8432 <port id="0">
8433 <dim>1</dim>
8434 <dim>16</dim>
8435 <dim>80</dim>
8436 <dim>136</dim>
8437 </port>
8438 <port id="1">
8439 <dim>1</dim>
8440 <dim>16</dim>
8441 <dim>1</dim>
8442 <dim>1</dim>
8443 </port>
8444 </input>
8445 <output>
8446 <port id="2" names="bottleneck2_4/dim_red/conv" precision="FP16">
8447 <dim>1</dim>
8448 <dim>16</dim>
8449 <dim>80</dim>
8450 <dim>136</dim>
8451 </port>
8452 </output>
8453 </layer>
8454 <layer id="629" name="bottleneck2_4/dim_red/fn/weights3114440556917" type="Const" version="opset1">
8455 <data element_type="f32" offset="1576" shape="1" size="4"/>
8456 <output>
8457 <port id="0" precision="FP32">
8458 <dim>1</dim>
8459 </port>
8460 </output>
8461 </layer>
8462 <layer id="630" name="bottleneck2_4/dim_red/fn" type="PReLU" version="opset1">
8463 <input>
8464 <port id="0">
8465 <dim>1</dim>
8466 <dim>16</dim>
8467 <dim>80</dim>
8468 <dim>136</dim>
8469 </port>
8470 <port id="1">
8471 <dim>1</dim>
8472 </port>
8473 </input>
8474 <output>
8475 <port id="2" names="bottleneck2_4/dim_red/conv" precision="FP16">
8476 <dim>1</dim>
8477 <dim>16</dim>
8478 <dim>80</dim>
8479 <dim>136</dim>
8480 </port>
8481 </output>
8482 </layer>
8483 <layer id="631" name="bottleneck2_4/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
8484 <data auto_broadcast="numpy" levels="256"/>
8485 <input>
8486 <port id="0">
8487 <dim>1</dim>
8488 <dim>16</dim>
8489 <dim>80</dim>
8490 <dim>136</dim>
8491 </port>
8492 <port id="1">
8493 <dim>1</dim>
8494 <dim>16</dim>
8495 <dim>1</dim>
8496 <dim>1</dim>
8497 </port>
8498 <port id="2">
8499 <dim>1</dim>
8500 <dim>16</dim>
8501 <dim>1</dim>
8502 <dim>1</dim>
8503 </port>
8504 <port id="3">
8505 <dim>1</dim>
8506 <dim>16</dim>
8507 <dim>1</dim>
8508 <dim>1</dim>
8509 </port>
8510 <port id="4">
8511 <dim>1</dim>
8512 <dim>16</dim>
8513 <dim>1</dim>
8514 <dim>1</dim>
8515 </port>
8516 </input>
8517 <output>
8518 <port id="5" precision="FP16">
8519 <dim>1</dim>
8520 <dim>16</dim>
8521 <dim>80</dim>
8522 <dim>136</dim>
8523 </port>
8524 </output>
8525 </layer>
8526 <layer id="632" name="16803/value1680520367" type="Const" version="opset1">
8527 <data element_type="i64" offset="8036" shape="5" size="40"/>
8528 <output>
8529 <port id="0" precision="I64">
8530 <dim>5</dim>
8531 </port>
8532 </output>
8533 </layer>
8534 <layer id="633" name="bottleneck2_4/inner/dw1/bn/mean/Fused_Mul__copy91910159/quantized1163222761" type="Const" version="opset1">
8535 <data element_type="i8" offset="19436" shape="16,1,3,3" size="144"/>
8536 <output>
8537 <port id="0" precision="I8">
8538 <dim>16</dim>
8539 <dim>1</dim>
8540 <dim>3</dim>
8541 <dim>3</dim>
8542 </port>
8543 </output>
8544 </layer>
8545 <layer id="634" name="bottleneck2_4/inner/dw1/bn/mean/Fused_Mul__copy91910159/quantized/to_f16" type="Convert" version="opset1">
8546 <data destination_type="f16"/>
8547 <input>
8548 <port id="0">
8549 <dim>16</dim>
8550 <dim>1</dim>
8551 <dim>3</dim>
8552 <dim>3</dim>
8553 </port>
8554 </input>
8555 <output>
8556 <port id="1" precision="FP16">
8557 <dim>16</dim>
8558 <dim>1</dim>
8559 <dim>3</dim>
8560 <dim>3</dim>
8561 </port>
8562 </output>
8563 </layer>
8564 <layer id="635" name="bottleneck2_4/inner/dw1/conv/fq_weights_1/zero_point1164522191" type="Const" version="opset1">
8565 <data element_type="f16" offset="19580" shape="16,1,1,1" size="32"/>
8566 <output>
8567 <port id="0" precision="FP16">
8568 <dim>16</dim>
8569 <dim>1</dim>
8570 <dim>1</dim>
8571 <dim>1</dim>
8572 </port>
8573 </output>
8574 </layer>
8575 <layer id="636" name="bottleneck2_4/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
8576 <data auto_broadcast="numpy"/>
8577 <input>
8578 <port id="0">
8579 <dim>16</dim>
8580 <dim>1</dim>
8581 <dim>3</dim>
8582 <dim>3</dim>
8583 </port>
8584 <port id="1">
8585 <dim>16</dim>
8586 <dim>1</dim>
8587 <dim>1</dim>
8588 <dim>1</dim>
8589 </port>
8590 </input>
8591 <output>
8592 <port id="2" precision="FP16">
8593 <dim>16</dim>
8594 <dim>1</dim>
8595 <dim>3</dim>
8596 <dim>3</dim>
8597 </port>
8598 </output>
8599 </layer>
8600 <layer id="637" name="bottleneck2_4/inner/dw1/conv/fq_weights_1/scale1164021003" type="Const" version="opset1">
8601 <data element_type="f16" offset="19612" shape="16,1,1,1" size="32"/>
8602 <output>
8603 <port id="0" precision="FP16">
8604 <dim>16</dim>
8605 <dim>1</dim>
8606 <dim>1</dim>
8607 <dim>1</dim>
8608 </port>
8609 </output>
8610 </layer>
8611 <layer id="638" name="bottleneck2_4/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
8612 <data auto_broadcast="numpy"/>
8613 <input>
8614 <port id="0">
8615 <dim>16</dim>
8616 <dim>1</dim>
8617 <dim>3</dim>
8618 <dim>3</dim>
8619 </port>
8620 <port id="1">
8621 <dim>16</dim>
8622 <dim>1</dim>
8623 <dim>1</dim>
8624 <dim>1</dim>
8625 </port>
8626 </input>
8627 <output>
8628 <port id="2" precision="FP16">
8629 <dim>16</dim>
8630 <dim>1</dim>
8631 <dim>3</dim>
8632 <dim>3</dim>
8633 </port>
8634 </output>
8635 </layer>
8636 <layer id="639" name="16803" type="Reshape" version="opset1">
8637 <data special_zero="true"/>
8638 <input>
8639 <port id="0">
8640 <dim>16</dim>
8641 <dim>1</dim>
8642 <dim>3</dim>
8643 <dim>3</dim>
8644 </port>
8645 <port id="1">
8646 <dim>5</dim>
8647 </port>
8648 </input>
8649 <output>
8650 <port id="2" precision="FP16">
8651 <dim>16</dim>
8652 <dim>1</dim>
8653 <dim>1</dim>
8654 <dim>3</dim>
8655 <dim>3</dim>
8656 </port>
8657 </output>
8658 </layer>
8659 <layer id="640" name="bottleneck2_4/inner/dw1/conv" type="GroupConvolution" version="opset1">
8660 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
8661 <input>
8662 <port id="0">
8663 <dim>1</dim>
8664 <dim>16</dim>
8665 <dim>80</dim>
8666 <dim>136</dim>
8667 </port>
8668 <port id="1">
8669 <dim>16</dim>
8670 <dim>1</dim>
8671 <dim>1</dim>
8672 <dim>3</dim>
8673 <dim>3</dim>
8674 </port>
8675 </input>
8676 <output>
8677 <port id="2" precision="FP16">
8678 <dim>1</dim>
8679 <dim>16</dim>
8680 <dim>80</dim>
8681 <dim>136</dim>
8682 </port>
8683 </output>
8684 </layer>
8685 <layer id="641" name="data_add_238652387092120916" type="Const" version="opset1">
8686 <data element_type="f16" offset="19644" shape="1,16,1,1" size="32"/>
8687 <output>
8688 <port id="0" precision="FP16">
8689 <dim>1</dim>
8690 <dim>16</dim>
8691 <dim>1</dim>
8692 <dim>1</dim>
8693 </port>
8694 </output>
8695 </layer>
8696 <layer id="642" name="bottleneck2_4/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
8697 <data auto_broadcast="numpy"/>
8698 <input>
8699 <port id="0">
8700 <dim>1</dim>
8701 <dim>16</dim>
8702 <dim>80</dim>
8703 <dim>136</dim>
8704 </port>
8705 <port id="1">
8706 <dim>1</dim>
8707 <dim>16</dim>
8708 <dim>1</dim>
8709 <dim>1</dim>
8710 </port>
8711 </input>
8712 <output>
8713 <port id="2" names="bottleneck2_4/inner/dw1/conv" precision="FP16">
8714 <dim>1</dim>
8715 <dim>16</dim>
8716 <dim>80</dim>
8717 <dim>136</dim>
8718 </port>
8719 </output>
8720 </layer>
8721 <layer id="643" name="bottleneck2_4/inner/dw1/fn/weights3110039950923" type="Const" version="opset1">
8722 <data element_type="f32" offset="1576" shape="1" size="4"/>
8723 <output>
8724 <port id="0" precision="FP32">
8725 <dim>1</dim>
8726 </port>
8727 </output>
8728 </layer>
8729 <layer id="644" name="bottleneck2_4/inner/dw1/fn" type="PReLU" version="opset1">
8730 <input>
8731 <port id="0">
8732 <dim>1</dim>
8733 <dim>16</dim>
8734 <dim>80</dim>
8735 <dim>136</dim>
8736 </port>
8737 <port id="1">
8738 <dim>1</dim>
8739 </port>
8740 </input>
8741 <output>
8742 <port id="2" names="bottleneck2_4/inner/dw1/conv" precision="FP16">
8743 <dim>1</dim>
8744 <dim>16</dim>
8745 <dim>80</dim>
8746 <dim>136</dim>
8747 </port>
8748 </output>
8749 </layer>
8750 <layer id="645" name="bottleneck2_4/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
8751 <data auto_broadcast="numpy" levels="256"/>
8752 <input>
8753 <port id="0">
8754 <dim>1</dim>
8755 <dim>16</dim>
8756 <dim>80</dim>
8757 <dim>136</dim>
8758 </port>
8759 <port id="1"/>
8760 <port id="2"/>
8761 <port id="3"/>
8762 <port id="4"/>
8763 </input>
8764 <output>
8765 <port id="5" precision="FP16">
8766 <dim>1</dim>
8767 <dim>16</dim>
8768 <dim>80</dim>
8769 <dim>136</dim>
8770 </port>
8771 </output>
8772 </layer>
8773 <layer id="646" name="bottleneck2_4/dim_inc/bn/mean/Fused_Mul__copy92510162/quantized1189622095" type="Const" version="opset1">
8774 <data element_type="i8" offset="19676" shape="64,16,1,1" size="1024"/>
8775 <output>
8776 <port id="0" precision="I8">
8777 <dim>64</dim>
8778 <dim>16</dim>
8779 <dim>1</dim>
8780 <dim>1</dim>
8781 </port>
8782 </output>
8783 </layer>
8784 <layer id="647" name="bottleneck2_4/dim_inc/bn/mean/Fused_Mul__copy92510162/quantized/to_f16" type="Convert" version="opset1">
8785 <data destination_type="f16"/>
8786 <input>
8787 <port id="0">
8788 <dim>64</dim>
8789 <dim>16</dim>
8790 <dim>1</dim>
8791 <dim>1</dim>
8792 </port>
8793 </input>
8794 <output>
8795 <port id="1" precision="FP16">
8796 <dim>64</dim>
8797 <dim>16</dim>
8798 <dim>1</dim>
8799 <dim>1</dim>
8800 </port>
8801 </output>
8802 </layer>
8803 <layer id="648" name="bottleneck2_4/dim_inc/conv/fq_weights_1/zero_point1190922224" type="Const" version="opset1">
8804 <data element_type="f16" offset="20700" shape="64,1,1,1" size="128"/>
8805 <output>
8806 <port id="0" precision="FP16">
8807 <dim>64</dim>
8808 <dim>1</dim>
8809 <dim>1</dim>
8810 <dim>1</dim>
8811 </port>
8812 </output>
8813 </layer>
8814 <layer id="649" name="bottleneck2_4/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
8815 <data auto_broadcast="numpy"/>
8816 <input>
8817 <port id="0">
8818 <dim>64</dim>
8819 <dim>16</dim>
8820 <dim>1</dim>
8821 <dim>1</dim>
8822 </port>
8823 <port id="1">
8824 <dim>64</dim>
8825 <dim>1</dim>
8826 <dim>1</dim>
8827 <dim>1</dim>
8828 </port>
8829 </input>
8830 <output>
8831 <port id="2" precision="FP16">
8832 <dim>64</dim>
8833 <dim>16</dim>
8834 <dim>1</dim>
8835 <dim>1</dim>
8836 </port>
8837 </output>
8838 </layer>
8839 <layer id="650" name="bottleneck2_4/dim_inc/conv/fq_weights_1/scale1190421480" type="Const" version="opset1">
8840 <data element_type="f16" offset="20828" shape="64,1,1,1" size="128"/>
8841 <output>
8842 <port id="0" precision="FP16">
8843 <dim>64</dim>
8844 <dim>1</dim>
8845 <dim>1</dim>
8846 <dim>1</dim>
8847 </port>
8848 </output>
8849 </layer>
8850 <layer id="651" name="bottleneck2_4/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
8851 <data auto_broadcast="numpy"/>
8852 <input>
8853 <port id="0">
8854 <dim>64</dim>
8855 <dim>16</dim>
8856 <dim>1</dim>
8857 <dim>1</dim>
8858 </port>
8859 <port id="1">
8860 <dim>64</dim>
8861 <dim>1</dim>
8862 <dim>1</dim>
8863 <dim>1</dim>
8864 </port>
8865 </input>
8866 <output>
8867 <port id="2" precision="FP16">
8868 <dim>64</dim>
8869 <dim>16</dim>
8870 <dim>1</dim>
8871 <dim>1</dim>
8872 </port>
8873 </output>
8874 </layer>
8875 <layer id="652" name="bottleneck2_4/dim_inc/conv" type="Convolution" version="opset1">
8876 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
8877 <input>
8878 <port id="0">
8879 <dim>1</dim>
8880 <dim>16</dim>
8881 <dim>80</dim>
8882 <dim>136</dim>
8883 </port>
8884 <port id="1">
8885 <dim>64</dim>
8886 <dim>16</dim>
8887 <dim>1</dim>
8888 <dim>1</dim>
8889 </port>
8890 </input>
8891 <output>
8892 <port id="2" precision="FP16">
8893 <dim>1</dim>
8894 <dim>64</dim>
8895 <dim>80</dim>
8896 <dim>136</dim>
8897 </port>
8898 </output>
8899 </layer>
8900 <layer id="653" name="data_add_238732387892720712" type="Const" version="opset1">
8901 <data element_type="f16" offset="20956" shape="1,64,1,1" size="128"/>
8902 <output>
8903 <port id="0" precision="FP16">
8904 <dim>1</dim>
8905 <dim>64</dim>
8906 <dim>1</dim>
8907 <dim>1</dim>
8908 </port>
8909 </output>
8910 </layer>
8911 <layer id="654" name="bottleneck2_4/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
8912 <data auto_broadcast="numpy"/>
8913 <input>
8914 <port id="0">
8915 <dim>1</dim>
8916 <dim>64</dim>
8917 <dim>80</dim>
8918 <dim>136</dim>
8919 </port>
8920 <port id="1">
8921 <dim>1</dim>
8922 <dim>64</dim>
8923 <dim>1</dim>
8924 <dim>1</dim>
8925 </port>
8926 </input>
8927 <output>
8928 <port id="2" names="bottleneck2_4/dim_inc/conv" precision="FP16">
8929 <dim>1</dim>
8930 <dim>64</dim>
8931 <dim>80</dim>
8932 <dim>136</dim>
8933 </port>
8934 </output>
8935 </layer>
8936 <layer id="655" name="bottleneck2_4/add/fq_input_1" type="FakeQuantize" version="opset1">
8937 <data auto_broadcast="numpy" levels="256"/>
8938 <input>
8939 <port id="0">
8940 <dim>1</dim>
8941 <dim>64</dim>
8942 <dim>80</dim>
8943 <dim>136</dim>
8944 </port>
8945 <port id="1"/>
8946 <port id="2"/>
8947 <port id="3"/>
8948 <port id="4"/>
8949 </input>
8950 <output>
8951 <port id="5" precision="FP16">
8952 <dim>1</dim>
8953 <dim>64</dim>
8954 <dim>80</dim>
8955 <dim>136</dim>
8956 </port>
8957 </output>
8958 </layer>
8959 <layer id="656" name="bottleneck2_4/add" type="Add" version="opset1">
8960 <data auto_broadcast="numpy"/>
8961 <input>
8962 <port id="0">
8963 <dim>1</dim>
8964 <dim>64</dim>
8965 <dim>80</dim>
8966 <dim>136</dim>
8967 </port>
8968 <port id="1">
8969 <dim>1</dim>
8970 <dim>64</dim>
8971 <dim>80</dim>
8972 <dim>136</dim>
8973 </port>
8974 </input>
8975 <output>
8976 <port id="2" names="bottleneck2_4/add" precision="FP16">
8977 <dim>1</dim>
8978 <dim>64</dim>
8979 <dim>80</dim>
8980 <dim>136</dim>
8981 </port>
8982 </output>
8983 </layer>
8984 <layer id="657" name="bottleneck2_4/fn/weights3084440526930" type="Const" version="opset1">
8985 <data element_type="f32" offset="1576" shape="1" size="4"/>
8986 <output>
8987 <port id="0" precision="FP32">
8988 <dim>1</dim>
8989 </port>
8990 </output>
8991 </layer>
8992 <layer id="658" name="bottleneck2_4/fn" type="PReLU" version="opset1">
8993 <input>
8994 <port id="0">
8995 <dim>1</dim>
8996 <dim>64</dim>
8997 <dim>80</dim>
8998 <dim>136</dim>
8999 </port>
9000 <port id="1">
9001 <dim>1</dim>
9002 </port>
9003 </input>
9004 <output>
9005 <port id="2" names="bottleneck2_4/add" precision="FP16">
9006 <dim>1</dim>
9007 <dim>64</dim>
9008 <dim>80</dim>
9009 <dim>136</dim>
9010 </port>
9011 </output>
9012 </layer>
9013 <layer id="659" name="bottleneck2_5/add/fq_input_0" type="FakeQuantize" version="opset1">
9014 <data auto_broadcast="numpy" levels="256"/>
9015 <input>
9016 <port id="0">
9017 <dim>1</dim>
9018 <dim>64</dim>
9019 <dim>80</dim>
9020 <dim>136</dim>
9021 </port>
9022 <port id="1"/>
9023 <port id="2"/>
9024 <port id="3"/>
9025 <port id="4"/>
9026 </input>
9027 <output>
9028 <port id="5" precision="FP16">
9029 <dim>1</dim>
9030 <dim>64</dim>
9031 <dim>80</dim>
9032 <dim>136</dim>
9033 </port>
9034 </output>
9035 </layer>
9036 <layer id="660" name="5054505821225" type="Const" version="opset1">
9037 <data element_type="f16" offset="21084" shape="" size="2"/>
9038 <output>
9039 <port id="0" precision="FP16"/>
9040 </output>
9041 </layer>
9042 <layer id="661" name="5055505919782" type="Const" version="opset1">
9043 <data element_type="f16" offset="21086" shape="" size="2"/>
9044 <output>
9045 <port id="0" precision="FP16"/>
9046 </output>
9047 </layer>
9048 <layer id="662" name="5056506020196" type="Const" version="opset1">
9049 <data element_type="f16" offset="21084" shape="" size="2"/>
9050 <output>
9051 <port id="0" precision="FP16"/>
9052 </output>
9053 </layer>
9054 <layer id="663" name="5057506122386" type="Const" version="opset1">
9055 <data element_type="f16" offset="21086" shape="" size="2"/>
9056 <output>
9057 <port id="0" precision="FP16"/>
9058 </output>
9059 </layer>
9060 <layer id="664" name="3204320822938" type="Const" version="opset1">
9061 <data element_type="f16" offset="21088" shape="" size="2"/>
9062 <output>
9063 <port id="0" precision="FP16"/>
9064 </output>
9065 </layer>
9066 <layer id="665" name="3205320920421" type="Const" version="opset1">
9067 <data element_type="f16" offset="21090" shape="" size="2"/>
9068 <output>
9069 <port id="0" precision="FP16"/>
9070 </output>
9071 </layer>
9072 <layer id="666" name="3206321020400" type="Const" version="opset1">
9073 <data element_type="f16" offset="21088" shape="" size="2"/>
9074 <output>
9075 <port id="0" precision="FP16"/>
9076 </output>
9077 </layer>
9078 <layer id="667" name="3207321119365" type="Const" version="opset1">
9079 <data element_type="f16" offset="21090" shape="" size="2"/>
9080 <output>
9081 <port id="0" precision="FP16"/>
9082 </output>
9083 </layer>
9084 <layer id="668" name="3064306821147" type="Const" version="opset1">
9085 <data element_type="f16" offset="21092" shape="1,16,1,1" size="32"/>
9086 <output>
9087 <port id="0" precision="FP16">
9088 <dim>1</dim>
9089 <dim>16</dim>
9090 <dim>1</dim>
9091 <dim>1</dim>
9092 </port>
9093 </output>
9094 </layer>
9095 <layer id="669" name="3065306920277" type="Const" version="opset1">
9096 <data element_type="f16" offset="21124" shape="1,16,1,1" size="32"/>
9097 <output>
9098 <port id="0" precision="FP16">
9099 <dim>1</dim>
9100 <dim>16</dim>
9101 <dim>1</dim>
9102 <dim>1</dim>
9103 </port>
9104 </output>
9105 </layer>
9106 <layer id="670" name="3066307020850" type="Const" version="opset1">
9107 <data element_type="f16" offset="21092" shape="1,16,1,1" size="32"/>
9108 <output>
9109 <port id="0" precision="FP16">
9110 <dim>1</dim>
9111 <dim>16</dim>
9112 <dim>1</dim>
9113 <dim>1</dim>
9114 </port>
9115 </output>
9116 </layer>
9117 <layer id="671" name="3067307120520" type="Const" version="opset1">
9118 <data element_type="f16" offset="21124" shape="1,16,1,1" size="32"/>
9119 <output>
9120 <port id="0" precision="FP16">
9121 <dim>1</dim>
9122 <dim>16</dim>
9123 <dim>1</dim>
9124 <dim>1</dim>
9125 </port>
9126 </output>
9127 </layer>
9128 <layer id="672" name="bottleneck2_5/dim_red/bn/mean/Fused_Mul__copy93210165/quantized1367219947" type="Const" version="opset1">
9129 <data element_type="i8" offset="21156" shape="16,64,1,1" size="1024"/>
9130 <output>
9131 <port id="0" precision="I8">
9132 <dim>16</dim>
9133 <dim>64</dim>
9134 <dim>1</dim>
9135 <dim>1</dim>
9136 </port>
9137 </output>
9138 </layer>
9139 <layer id="673" name="bottleneck2_5/dim_red/bn/mean/Fused_Mul__copy93210165/quantized/to_f16" type="Convert" version="opset1">
9140 <data destination_type="f16"/>
9141 <input>
9142 <port id="0">
9143 <dim>16</dim>
9144 <dim>64</dim>
9145 <dim>1</dim>
9146 <dim>1</dim>
9147 </port>
9148 </input>
9149 <output>
9150 <port id="1" precision="FP16">
9151 <dim>16</dim>
9152 <dim>64</dim>
9153 <dim>1</dim>
9154 <dim>1</dim>
9155 </port>
9156 </output>
9157 </layer>
9158 <layer id="674" name="bottleneck2_5/dim_red/conv/fq_weights_1/zero_point1368521183" type="Const" version="opset1">
9159 <data element_type="f16" offset="22180" shape="16,1,1,1" size="32"/>
9160 <output>
9161 <port id="0" precision="FP16">
9162 <dim>16</dim>
9163 <dim>1</dim>
9164 <dim>1</dim>
9165 <dim>1</dim>
9166 </port>
9167 </output>
9168 </layer>
9169 <layer id="675" name="bottleneck2_5/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
9170 <data auto_broadcast="numpy"/>
9171 <input>
9172 <port id="0">
9173 <dim>16</dim>
9174 <dim>64</dim>
9175 <dim>1</dim>
9176 <dim>1</dim>
9177 </port>
9178 <port id="1">
9179 <dim>16</dim>
9180 <dim>1</dim>
9181 <dim>1</dim>
9182 <dim>1</dim>
9183 </port>
9184 </input>
9185 <output>
9186 <port id="2" precision="FP16">
9187 <dim>16</dim>
9188 <dim>64</dim>
9189 <dim>1</dim>
9190 <dim>1</dim>
9191 </port>
9192 </output>
9193 </layer>
9194 <layer id="676" name="bottleneck2_5/dim_red/conv/fq_weights_1/scale1368022131" type="Const" version="opset1">
9195 <data element_type="f16" offset="22212" shape="16,1,1,1" size="32"/>
9196 <output>
9197 <port id="0" precision="FP16">
9198 <dim>16</dim>
9199 <dim>1</dim>
9200 <dim>1</dim>
9201 <dim>1</dim>
9202 </port>
9203 </output>
9204 </layer>
9205 <layer id="677" name="bottleneck2_5/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
9206 <data auto_broadcast="numpy"/>
9207 <input>
9208 <port id="0">
9209 <dim>16</dim>
9210 <dim>64</dim>
9211 <dim>1</dim>
9212 <dim>1</dim>
9213 </port>
9214 <port id="1">
9215 <dim>16</dim>
9216 <dim>1</dim>
9217 <dim>1</dim>
9218 <dim>1</dim>
9219 </port>
9220 </input>
9221 <output>
9222 <port id="2" precision="FP16">
9223 <dim>16</dim>
9224 <dim>64</dim>
9225 <dim>1</dim>
9226 <dim>1</dim>
9227 </port>
9228 </output>
9229 </layer>
9230 <layer id="678" name="bottleneck2_5/dim_red/conv" type="Convolution" version="opset1">
9231 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
9232 <input>
9233 <port id="0">
9234 <dim>1</dim>
9235 <dim>64</dim>
9236 <dim>80</dim>
9237 <dim>136</dim>
9238 </port>
9239 <port id="1">
9240 <dim>16</dim>
9241 <dim>64</dim>
9242 <dim>1</dim>
9243 <dim>1</dim>
9244 </port>
9245 </input>
9246 <output>
9247 <port id="2" precision="FP16">
9248 <dim>1</dim>
9249 <dim>16</dim>
9250 <dim>80</dim>
9251 <dim>136</dim>
9252 </port>
9253 </output>
9254 </layer>
9255 <layer id="679" name="data_add_238812388693420631" type="Const" version="opset1">
9256 <data element_type="f16" offset="22244" shape="1,16,1,1" size="32"/>
9257 <output>
9258 <port id="0" precision="FP16">
9259 <dim>1</dim>
9260 <dim>16</dim>
9261 <dim>1</dim>
9262 <dim>1</dim>
9263 </port>
9264 </output>
9265 </layer>
9266 <layer id="680" name="bottleneck2_5/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
9267 <data auto_broadcast="numpy"/>
9268 <input>
9269 <port id="0">
9270 <dim>1</dim>
9271 <dim>16</dim>
9272 <dim>80</dim>
9273 <dim>136</dim>
9274 </port>
9275 <port id="1">
9276 <dim>1</dim>
9277 <dim>16</dim>
9278 <dim>1</dim>
9279 <dim>1</dim>
9280 </port>
9281 </input>
9282 <output>
9283 <port id="2" names="bottleneck2_5/dim_red/conv" precision="FP16">
9284 <dim>1</dim>
9285 <dim>16</dim>
9286 <dim>80</dim>
9287 <dim>136</dim>
9288 </port>
9289 </output>
9290 </layer>
9291 <layer id="681" name="bottleneck2_5/dim_red/fn/weights3114840367936" type="Const" version="opset1">
9292 <data element_type="f32" offset="1576" shape="1" size="4"/>
9293 <output>
9294 <port id="0" precision="FP32">
9295 <dim>1</dim>
9296 </port>
9297 </output>
9298 </layer>
9299 <layer id="682" name="bottleneck2_5/dim_red/fn" type="PReLU" version="opset1">
9300 <input>
9301 <port id="0">
9302 <dim>1</dim>
9303 <dim>16</dim>
9304 <dim>80</dim>
9305 <dim>136</dim>
9306 </port>
9307 <port id="1">
9308 <dim>1</dim>
9309 </port>
9310 </input>
9311 <output>
9312 <port id="2" names="bottleneck2_5/dim_red/conv" precision="FP16">
9313 <dim>1</dim>
9314 <dim>16</dim>
9315 <dim>80</dim>
9316 <dim>136</dim>
9317 </port>
9318 </output>
9319 </layer>
9320 <layer id="683" name="bottleneck2_5/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
9321 <data auto_broadcast="numpy" levels="256"/>
9322 <input>
9323 <port id="0">
9324 <dim>1</dim>
9325 <dim>16</dim>
9326 <dim>80</dim>
9327 <dim>136</dim>
9328 </port>
9329 <port id="1">
9330 <dim>1</dim>
9331 <dim>16</dim>
9332 <dim>1</dim>
9333 <dim>1</dim>
9334 </port>
9335 <port id="2">
9336 <dim>1</dim>
9337 <dim>16</dim>
9338 <dim>1</dim>
9339 <dim>1</dim>
9340 </port>
9341 <port id="3">
9342 <dim>1</dim>
9343 <dim>16</dim>
9344 <dim>1</dim>
9345 <dim>1</dim>
9346 </port>
9347 <port id="4">
9348 <dim>1</dim>
9349 <dim>16</dim>
9350 <dim>1</dim>
9351 <dim>1</dim>
9352 </port>
9353 </input>
9354 <output>
9355 <port id="5" precision="FP16">
9356 <dim>1</dim>
9357 <dim>16</dim>
9358 <dim>80</dim>
9359 <dim>136</dim>
9360 </port>
9361 </output>
9362 </layer>
9363 <layer id="684" name="16811/value1681320502" type="Const" version="opset1">
9364 <data element_type="i64" offset="8036" shape="5" size="40"/>
9365 <output>
9366 <port id="0" precision="I64">
9367 <dim>5</dim>
9368 </port>
9369 </output>
9370 </layer>
9371 <layer id="685" name="bottleneck2_5/inner/dw1/bn/mean/Fused_Mul__copy93810168/quantized1348021606" type="Const" version="opset1">
9372 <data element_type="i8" offset="22276" shape="16,1,3,3" size="144"/>
9373 <output>
9374 <port id="0" precision="I8">
9375 <dim>16</dim>
9376 <dim>1</dim>
9377 <dim>3</dim>
9378 <dim>3</dim>
9379 </port>
9380 </output>
9381 </layer>
9382 <layer id="686" name="bottleneck2_5/inner/dw1/bn/mean/Fused_Mul__copy93810168/quantized/to_f16" type="Convert" version="opset1">
9383 <data destination_type="f16"/>
9384 <input>
9385 <port id="0">
9386 <dim>16</dim>
9387 <dim>1</dim>
9388 <dim>3</dim>
9389 <dim>3</dim>
9390 </port>
9391 </input>
9392 <output>
9393 <port id="1" precision="FP16">
9394 <dim>16</dim>
9395 <dim>1</dim>
9396 <dim>3</dim>
9397 <dim>3</dim>
9398 </port>
9399 </output>
9400 </layer>
9401 <layer id="687" name="bottleneck2_5/inner/dw1/conv/fq_weights_1/zero_point1349320136" type="Const" version="opset1">
9402 <data element_type="f16" offset="22420" shape="16,1,1,1" size="32"/>
9403 <output>
9404 <port id="0" precision="FP16">
9405 <dim>16</dim>
9406 <dim>1</dim>
9407 <dim>1</dim>
9408 <dim>1</dim>
9409 </port>
9410 </output>
9411 </layer>
9412 <layer id="688" name="bottleneck2_5/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
9413 <data auto_broadcast="numpy"/>
9414 <input>
9415 <port id="0">
9416 <dim>16</dim>
9417 <dim>1</dim>
9418 <dim>3</dim>
9419 <dim>3</dim>
9420 </port>
9421 <port id="1">
9422 <dim>16</dim>
9423 <dim>1</dim>
9424 <dim>1</dim>
9425 <dim>1</dim>
9426 </port>
9427 </input>
9428 <output>
9429 <port id="2" precision="FP16">
9430 <dim>16</dim>
9431 <dim>1</dim>
9432 <dim>3</dim>
9433 <dim>3</dim>
9434 </port>
9435 </output>
9436 </layer>
9437 <layer id="689" name="bottleneck2_5/inner/dw1/conv/fq_weights_1/scale1348820358" type="Const" version="opset1">
9438 <data element_type="f16" offset="22452" shape="16,1,1,1" size="32"/>
9439 <output>
9440 <port id="0" precision="FP16">
9441 <dim>16</dim>
9442 <dim>1</dim>
9443 <dim>1</dim>
9444 <dim>1</dim>
9445 </port>
9446 </output>
9447 </layer>
9448 <layer id="690" name="bottleneck2_5/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
9449 <data auto_broadcast="numpy"/>
9450 <input>
9451 <port id="0">
9452 <dim>16</dim>
9453 <dim>1</dim>
9454 <dim>3</dim>
9455 <dim>3</dim>
9456 </port>
9457 <port id="1">
9458 <dim>16</dim>
9459 <dim>1</dim>
9460 <dim>1</dim>
9461 <dim>1</dim>
9462 </port>
9463 </input>
9464 <output>
9465 <port id="2" precision="FP16">
9466 <dim>16</dim>
9467 <dim>1</dim>
9468 <dim>3</dim>
9469 <dim>3</dim>
9470 </port>
9471 </output>
9472 </layer>
9473 <layer id="691" name="16811" type="Reshape" version="opset1">
9474 <data special_zero="true"/>
9475 <input>
9476 <port id="0">
9477 <dim>16</dim>
9478 <dim>1</dim>
9479 <dim>3</dim>
9480 <dim>3</dim>
9481 </port>
9482 <port id="1">
9483 <dim>5</dim>
9484 </port>
9485 </input>
9486 <output>
9487 <port id="2" precision="FP16">
9488 <dim>16</dim>
9489 <dim>1</dim>
9490 <dim>1</dim>
9491 <dim>3</dim>
9492 <dim>3</dim>
9493 </port>
9494 </output>
9495 </layer>
9496 <layer id="692" name="bottleneck2_5/inner/dw1/conv" type="GroupConvolution" version="opset1">
9497 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
9498 <input>
9499 <port id="0">
9500 <dim>1</dim>
9501 <dim>16</dim>
9502 <dim>80</dim>
9503 <dim>136</dim>
9504 </port>
9505 <port id="1">
9506 <dim>16</dim>
9507 <dim>1</dim>
9508 <dim>1</dim>
9509 <dim>3</dim>
9510 <dim>3</dim>
9511 </port>
9512 </input>
9513 <output>
9514 <port id="2" precision="FP16">
9515 <dim>1</dim>
9516 <dim>16</dim>
9517 <dim>80</dim>
9518 <dim>136</dim>
9519 </port>
9520 </output>
9521 </layer>
9522 <layer id="693" name="data_add_238892389494020859" type="Const" version="opset1">
9523 <data element_type="f16" offset="22484" shape="1,16,1,1" size="32"/>
9524 <output>
9525 <port id="0" precision="FP16">
9526 <dim>1</dim>
9527 <dim>16</dim>
9528 <dim>1</dim>
9529 <dim>1</dim>
9530 </port>
9531 </output>
9532 </layer>
9533 <layer id="694" name="bottleneck2_5/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
9534 <data auto_broadcast="numpy"/>
9535 <input>
9536 <port id="0">
9537 <dim>1</dim>
9538 <dim>16</dim>
9539 <dim>80</dim>
9540 <dim>136</dim>
9541 </port>
9542 <port id="1">
9543 <dim>1</dim>
9544 <dim>16</dim>
9545 <dim>1</dim>
9546 <dim>1</dim>
9547 </port>
9548 </input>
9549 <output>
9550 <port id="2" names="bottleneck2_5/inner/dw1/conv" precision="FP16">
9551 <dim>1</dim>
9552 <dim>16</dim>
9553 <dim>80</dim>
9554 <dim>136</dim>
9555 </port>
9556 </output>
9557 </layer>
9558 <layer id="695" name="bottleneck2_5/inner/dw1/fn/weights3078440256942" type="Const" version="opset1">
9559 <data element_type="f32" offset="1576" shape="1" size="4"/>
9560 <output>
9561 <port id="0" precision="FP32">
9562 <dim>1</dim>
9563 </port>
9564 </output>
9565 </layer>
9566 <layer id="696" name="bottleneck2_5/inner/dw1/fn" type="PReLU" version="opset1">
9567 <input>
9568 <port id="0">
9569 <dim>1</dim>
9570 <dim>16</dim>
9571 <dim>80</dim>
9572 <dim>136</dim>
9573 </port>
9574 <port id="1">
9575 <dim>1</dim>
9576 </port>
9577 </input>
9578 <output>
9579 <port id="2" names="bottleneck2_5/inner/dw1/conv" precision="FP16">
9580 <dim>1</dim>
9581 <dim>16</dim>
9582 <dim>80</dim>
9583 <dim>136</dim>
9584 </port>
9585 </output>
9586 </layer>
9587 <layer id="697" name="bottleneck2_5/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
9588 <data auto_broadcast="numpy" levels="256"/>
9589 <input>
9590 <port id="0">
9591 <dim>1</dim>
9592 <dim>16</dim>
9593 <dim>80</dim>
9594 <dim>136</dim>
9595 </port>
9596 <port id="1"/>
9597 <port id="2"/>
9598 <port id="3"/>
9599 <port id="4"/>
9600 </input>
9601 <output>
9602 <port id="5" precision="FP16">
9603 <dim>1</dim>
9604 <dim>16</dim>
9605 <dim>80</dim>
9606 <dim>136</dim>
9607 </port>
9608 </output>
9609 </layer>
9610 <layer id="698" name="bottleneck2_5/dim_inc/bn/mean/Fused_Mul__copy94410171/quantized1290419635" type="Const" version="opset1">
9611 <data element_type="i8" offset="22516" shape="64,16,1,1" size="1024"/>
9612 <output>
9613 <port id="0" precision="I8">
9614 <dim>64</dim>
9615 <dim>16</dim>
9616 <dim>1</dim>
9617 <dim>1</dim>
9618 </port>
9619 </output>
9620 </layer>
9621 <layer id="699" name="bottleneck2_5/dim_inc/bn/mean/Fused_Mul__copy94410171/quantized/to_f16" type="Convert" version="opset1">
9622 <data destination_type="f16"/>
9623 <input>
9624 <port id="0">
9625 <dim>64</dim>
9626 <dim>16</dim>
9627 <dim>1</dim>
9628 <dim>1</dim>
9629 </port>
9630 </input>
9631 <output>
9632 <port id="1" precision="FP16">
9633 <dim>64</dim>
9634 <dim>16</dim>
9635 <dim>1</dim>
9636 <dim>1</dim>
9637 </port>
9638 </output>
9639 </layer>
9640 <layer id="700" name="bottleneck2_5/dim_inc/conv/fq_weights_1/zero_point1291720784" type="Const" version="opset1">
9641 <data element_type="f16" offset="23540" shape="64,1,1,1" size="128"/>
9642 <output>
9643 <port id="0" precision="FP16">
9644 <dim>64</dim>
9645 <dim>1</dim>
9646 <dim>1</dim>
9647 <dim>1</dim>
9648 </port>
9649 </output>
9650 </layer>
9651 <layer id="701" name="bottleneck2_5/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
9652 <data auto_broadcast="numpy"/>
9653 <input>
9654 <port id="0">
9655 <dim>64</dim>
9656 <dim>16</dim>
9657 <dim>1</dim>
9658 <dim>1</dim>
9659 </port>
9660 <port id="1">
9661 <dim>64</dim>
9662 <dim>1</dim>
9663 <dim>1</dim>
9664 <dim>1</dim>
9665 </port>
9666 </input>
9667 <output>
9668 <port id="2" precision="FP16">
9669 <dim>64</dim>
9670 <dim>16</dim>
9671 <dim>1</dim>
9672 <dim>1</dim>
9673 </port>
9674 </output>
9675 </layer>
9676 <layer id="702" name="bottleneck2_5/dim_inc/conv/fq_weights_1/scale1291222455" type="Const" version="opset1">
9677 <data element_type="f16" offset="23668" shape="64,1,1,1" size="128"/>
9678 <output>
9679 <port id="0" precision="FP16">
9680 <dim>64</dim>
9681 <dim>1</dim>
9682 <dim>1</dim>
9683 <dim>1</dim>
9684 </port>
9685 </output>
9686 </layer>
9687 <layer id="703" name="bottleneck2_5/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
9688 <data auto_broadcast="numpy"/>
9689 <input>
9690 <port id="0">
9691 <dim>64</dim>
9692 <dim>16</dim>
9693 <dim>1</dim>
9694 <dim>1</dim>
9695 </port>
9696 <port id="1">
9697 <dim>64</dim>
9698 <dim>1</dim>
9699 <dim>1</dim>
9700 <dim>1</dim>
9701 </port>
9702 </input>
9703 <output>
9704 <port id="2" precision="FP16">
9705 <dim>64</dim>
9706 <dim>16</dim>
9707 <dim>1</dim>
9708 <dim>1</dim>
9709 </port>
9710 </output>
9711 </layer>
9712 <layer id="704" name="bottleneck2_5/dim_inc/conv" type="Convolution" version="opset1">
9713 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
9714 <input>
9715 <port id="0">
9716 <dim>1</dim>
9717 <dim>16</dim>
9718 <dim>80</dim>
9719 <dim>136</dim>
9720 </port>
9721 <port id="1">
9722 <dim>64</dim>
9723 <dim>16</dim>
9724 <dim>1</dim>
9725 <dim>1</dim>
9726 </port>
9727 </input>
9728 <output>
9729 <port id="2" precision="FP16">
9730 <dim>1</dim>
9731 <dim>64</dim>
9732 <dim>80</dim>
9733 <dim>136</dim>
9734 </port>
9735 </output>
9736 </layer>
9737 <layer id="705" name="data_add_238972390294621426" type="Const" version="opset1">
9738 <data element_type="f16" offset="23796" shape="1,64,1,1" size="128"/>
9739 <output>
9740 <port id="0" precision="FP16">
9741 <dim>1</dim>
9742 <dim>64</dim>
9743 <dim>1</dim>
9744 <dim>1</dim>
9745 </port>
9746 </output>
9747 </layer>
9748 <layer id="706" name="bottleneck2_5/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
9749 <data auto_broadcast="numpy"/>
9750 <input>
9751 <port id="0">
9752 <dim>1</dim>
9753 <dim>64</dim>
9754 <dim>80</dim>
9755 <dim>136</dim>
9756 </port>
9757 <port id="1">
9758 <dim>1</dim>
9759 <dim>64</dim>
9760 <dim>1</dim>
9761 <dim>1</dim>
9762 </port>
9763 </input>
9764 <output>
9765 <port id="2" names="bottleneck2_5/dim_inc/conv" precision="FP16">
9766 <dim>1</dim>
9767 <dim>64</dim>
9768 <dim>80</dim>
9769 <dim>136</dim>
9770 </port>
9771 </output>
9772 </layer>
9773 <layer id="707" name="bottleneck2_5/add/fq_input_1" type="FakeQuantize" version="opset1">
9774 <data auto_broadcast="numpy" levels="256"/>
9775 <input>
9776 <port id="0">
9777 <dim>1</dim>
9778 <dim>64</dim>
9779 <dim>80</dim>
9780 <dim>136</dim>
9781 </port>
9782 <port id="1"/>
9783 <port id="2"/>
9784 <port id="3"/>
9785 <port id="4"/>
9786 </input>
9787 <output>
9788 <port id="5" precision="FP16">
9789 <dim>1</dim>
9790 <dim>64</dim>
9791 <dim>80</dim>
9792 <dim>136</dim>
9793 </port>
9794 </output>
9795 </layer>
9796 <layer id="708" name="bottleneck2_5/add" type="Add" version="opset1">
9797 <data auto_broadcast="numpy"/>
9798 <input>
9799 <port id="0">
9800 <dim>1</dim>
9801 <dim>64</dim>
9802 <dim>80</dim>
9803 <dim>136</dim>
9804 </port>
9805 <port id="1">
9806 <dim>1</dim>
9807 <dim>64</dim>
9808 <dim>80</dim>
9809 <dim>136</dim>
9810 </port>
9811 </input>
9812 <output>
9813 <port id="2" names="bottleneck2_5/add" precision="FP16">
9814 <dim>1</dim>
9815 <dim>64</dim>
9816 <dim>80</dim>
9817 <dim>136</dim>
9818 </port>
9819 </output>
9820 </layer>
9821 <layer id="709" name="bottleneck2_5/fn/weights3085640373949" type="Const" version="opset1">
9822 <data element_type="f32" offset="1576" shape="1" size="4"/>
9823 <output>
9824 <port id="0" precision="FP32">
9825 <dim>1</dim>
9826 </port>
9827 </output>
9828 </layer>
9829 <layer id="710" name="bottleneck2_5/fn" type="PReLU" version="opset1">
9830 <input>
9831 <port id="0">
9832 <dim>1</dim>
9833 <dim>64</dim>
9834 <dim>80</dim>
9835 <dim>136</dim>
9836 </port>
9837 <port id="1">
9838 <dim>1</dim>
9839 </port>
9840 </input>
9841 <output>
9842 <port id="2" names="bottleneck2_5/add" precision="FP16">
9843 <dim>1</dim>
9844 <dim>64</dim>
9845 <dim>80</dim>
9846 <dim>136</dim>
9847 </port>
9848 </output>
9849 </layer>
9850 <layer id="711" name="bottleneck2_6/add/fq_input_0" type="FakeQuantize" version="opset1">
9851 <data auto_broadcast="numpy" levels="256"/>
9852 <input>
9853 <port id="0">
9854 <dim>1</dim>
9855 <dim>64</dim>
9856 <dim>80</dim>
9857 <dim>136</dim>
9858 </port>
9859 <port id="1"/>
9860 <port id="2"/>
9861 <port id="3"/>
9862 <port id="4"/>
9863 </input>
9864 <output>
9865 <port id="5" precision="FP16">
9866 <dim>1</dim>
9867 <dim>64</dim>
9868 <dim>80</dim>
9869 <dim>136</dim>
9870 </port>
9871 </output>
9872 </layer>
9873 <layer id="712" name="2914291819626" type="Const" version="opset1">
9874 <data element_type="f16" offset="23924" shape="" size="2"/>
9875 <output>
9876 <port id="0" precision="FP16"/>
9877 </output>
9878 </layer>
9879 <layer id="713" name="2915291920778" type="Const" version="opset1">
9880 <data element_type="f16" offset="23926" shape="" size="2"/>
9881 <output>
9882 <port id="0" precision="FP16"/>
9883 </output>
9884 </layer>
9885 <layer id="714" name="2916292022956" type="Const" version="opset1">
9886 <data element_type="f16" offset="23924" shape="" size="2"/>
9887 <output>
9888 <port id="0" precision="FP16"/>
9889 </output>
9890 </layer>
9891 <layer id="715" name="2917292121741" type="Const" version="opset1">
9892 <data element_type="f16" offset="23926" shape="" size="2"/>
9893 <output>
9894 <port id="0" precision="FP16"/>
9895 </output>
9896 </layer>
9897 <layer id="716" name="3984398821168" type="Const" version="opset1">
9898 <data element_type="f16" offset="23928" shape="" size="2"/>
9899 <output>
9900 <port id="0" precision="FP16"/>
9901 </output>
9902 </layer>
9903 <layer id="717" name="3985398919848" type="Const" version="opset1">
9904 <data element_type="f16" offset="23930" shape="" size="2"/>
9905 <output>
9906 <port id="0" precision="FP16"/>
9907 </output>
9908 </layer>
9909 <layer id="718" name="3986399020754" type="Const" version="opset1">
9910 <data element_type="f16" offset="23928" shape="" size="2"/>
9911 <output>
9912 <port id="0" precision="FP16"/>
9913 </output>
9914 </layer>
9915 <layer id="719" name="3987399122671" type="Const" version="opset1">
9916 <data element_type="f16" offset="23930" shape="" size="2"/>
9917 <output>
9918 <port id="0" precision="FP16"/>
9919 </output>
9920 </layer>
9921 <layer id="720" name="5464546820265" type="Const" version="opset1">
9922 <data element_type="f16" offset="23932" shape="1,16,1,1" size="32"/>
9923 <output>
9924 <port id="0" precision="FP16">
9925 <dim>1</dim>
9926 <dim>16</dim>
9927 <dim>1</dim>
9928 <dim>1</dim>
9929 </port>
9930 </output>
9931 </layer>
9932 <layer id="721" name="5465546919521" type="Const" version="opset1">
9933 <data element_type="f16" offset="23964" shape="1,16,1,1" size="32"/>
9934 <output>
9935 <port id="0" precision="FP16">
9936 <dim>1</dim>
9937 <dim>16</dim>
9938 <dim>1</dim>
9939 <dim>1</dim>
9940 </port>
9941 </output>
9942 </layer>
9943 <layer id="722" name="5466547021918" type="Const" version="opset1">
9944 <data element_type="f16" offset="23932" shape="1,16,1,1" size="32"/>
9945 <output>
9946 <port id="0" precision="FP16">
9947 <dim>1</dim>
9948 <dim>16</dim>
9949 <dim>1</dim>
9950 <dim>1</dim>
9951 </port>
9952 </output>
9953 </layer>
9954 <layer id="723" name="5467547121174" type="Const" version="opset1">
9955 <data element_type="f16" offset="23964" shape="1,16,1,1" size="32"/>
9956 <output>
9957 <port id="0" precision="FP16">
9958 <dim>1</dim>
9959 <dim>16</dim>
9960 <dim>1</dim>
9961 <dim>1</dim>
9962 </port>
9963 </output>
9964 </layer>
9965 <layer id="724" name="bottleneck2_6/dim_red/bn/mean/Fused_Mul__copy95110174/quantized1264019545" type="Const" version="opset1">
9966 <data element_type="i8" offset="23996" shape="16,64,1,1" size="1024"/>
9967 <output>
9968 <port id="0" precision="I8">
9969 <dim>16</dim>
9970 <dim>64</dim>
9971 <dim>1</dim>
9972 <dim>1</dim>
9973 </port>
9974 </output>
9975 </layer>
9976 <layer id="725" name="bottleneck2_6/dim_red/bn/mean/Fused_Mul__copy95110174/quantized/to_f16" type="Convert" version="opset1">
9977 <data destination_type="f16"/>
9978 <input>
9979 <port id="0">
9980 <dim>16</dim>
9981 <dim>64</dim>
9982 <dim>1</dim>
9983 <dim>1</dim>
9984 </port>
9985 </input>
9986 <output>
9987 <port id="1" precision="FP16">
9988 <dim>16</dim>
9989 <dim>64</dim>
9990 <dim>1</dim>
9991 <dim>1</dim>
9992 </port>
9993 </output>
9994 </layer>
9995 <layer id="726" name="bottleneck2_6/dim_red/conv/fq_weights_1/zero_point1265322986" type="Const" version="opset1">
9996 <data element_type="f16" offset="25020" shape="16,1,1,1" size="32"/>
9997 <output>
9998 <port id="0" precision="FP16">
9999 <dim>16</dim>
10000 <dim>1</dim>
10001 <dim>1</dim>
10002 <dim>1</dim>
10003 </port>
10004 </output>
10005 </layer>
10006 <layer id="727" name="bottleneck2_6/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
10007 <data auto_broadcast="numpy"/>
10008 <input>
10009 <port id="0">
10010 <dim>16</dim>
10011 <dim>64</dim>
10012 <dim>1</dim>
10013 <dim>1</dim>
10014 </port>
10015 <port id="1">
10016 <dim>16</dim>
10017 <dim>1</dim>
10018 <dim>1</dim>
10019 <dim>1</dim>
10020 </port>
10021 </input>
10022 <output>
10023 <port id="2" precision="FP16">
10024 <dim>16</dim>
10025 <dim>64</dim>
10026 <dim>1</dim>
10027 <dim>1</dim>
10028 </port>
10029 </output>
10030 </layer>
10031 <layer id="728" name="bottleneck2_6/dim_red/conv/fq_weights_1/scale1264821084" type="Const" version="opset1">
10032 <data element_type="f16" offset="25052" shape="16,1,1,1" size="32"/>
10033 <output>
10034 <port id="0" precision="FP16">
10035 <dim>16</dim>
10036 <dim>1</dim>
10037 <dim>1</dim>
10038 <dim>1</dim>
10039 </port>
10040 </output>
10041 </layer>
10042 <layer id="729" name="bottleneck2_6/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
10043 <data auto_broadcast="numpy"/>
10044 <input>
10045 <port id="0">
10046 <dim>16</dim>
10047 <dim>64</dim>
10048 <dim>1</dim>
10049 <dim>1</dim>
10050 </port>
10051 <port id="1">
10052 <dim>16</dim>
10053 <dim>1</dim>
10054 <dim>1</dim>
10055 <dim>1</dim>
10056 </port>
10057 </input>
10058 <output>
10059 <port id="2" precision="FP16">
10060 <dim>16</dim>
10061 <dim>64</dim>
10062 <dim>1</dim>
10063 <dim>1</dim>
10064 </port>
10065 </output>
10066 </layer>
10067 <layer id="730" name="bottleneck2_6/dim_red/conv" type="Convolution" version="opset1">
10068 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
10069 <input>
10070 <port id="0">
10071 <dim>1</dim>
10072 <dim>64</dim>
10073 <dim>80</dim>
10074 <dim>136</dim>
10075 </port>
10076 <port id="1">
10077 <dim>16</dim>
10078 <dim>64</dim>
10079 <dim>1</dim>
10080 <dim>1</dim>
10081 </port>
10082 </input>
10083 <output>
10084 <port id="2" precision="FP16">
10085 <dim>1</dim>
10086 <dim>16</dim>
10087 <dim>80</dim>
10088 <dim>136</dim>
10089 </port>
10090 </output>
10091 </layer>
10092 <layer id="731" name="data_add_239052391095321834" type="Const" version="opset1">
10093 <data element_type="f16" offset="25084" shape="1,16,1,1" size="32"/>
10094 <output>
10095 <port id="0" precision="FP16">
10096 <dim>1</dim>
10097 <dim>16</dim>
10098 <dim>1</dim>
10099 <dim>1</dim>
10100 </port>
10101 </output>
10102 </layer>
10103 <layer id="732" name="bottleneck2_6/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
10104 <data auto_broadcast="numpy"/>
10105 <input>
10106 <port id="0">
10107 <dim>1</dim>
10108 <dim>16</dim>
10109 <dim>80</dim>
10110 <dim>136</dim>
10111 </port>
10112 <port id="1">
10113 <dim>1</dim>
10114 <dim>16</dim>
10115 <dim>1</dim>
10116 <dim>1</dim>
10117 </port>
10118 </input>
10119 <output>
10120 <port id="2" names="bottleneck2_6/dim_red/conv" precision="FP16">
10121 <dim>1</dim>
10122 <dim>16</dim>
10123 <dim>80</dim>
10124 <dim>136</dim>
10125 </port>
10126 </output>
10127 </layer>
10128 <layer id="733" name="bottleneck2_6/dim_red/fn/weights3083640613955" type="Const" version="opset1">
10129 <data element_type="f32" offset="1576" shape="1" size="4"/>
10130 <output>
10131 <port id="0" precision="FP32">
10132 <dim>1</dim>
10133 </port>
10134 </output>
10135 </layer>
10136 <layer id="734" name="bottleneck2_6/dim_red/fn" type="PReLU" version="opset1">
10137 <input>
10138 <port id="0">
10139 <dim>1</dim>
10140 <dim>16</dim>
10141 <dim>80</dim>
10142 <dim>136</dim>
10143 </port>
10144 <port id="1">
10145 <dim>1</dim>
10146 </port>
10147 </input>
10148 <output>
10149 <port id="2" names="bottleneck2_6/dim_red/conv" precision="FP16">
10150 <dim>1</dim>
10151 <dim>16</dim>
10152 <dim>80</dim>
10153 <dim>136</dim>
10154 </port>
10155 </output>
10156 </layer>
10157 <layer id="735" name="bottleneck2_6/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
10158 <data auto_broadcast="numpy" levels="256"/>
10159 <input>
10160 <port id="0">
10161 <dim>1</dim>
10162 <dim>16</dim>
10163 <dim>80</dim>
10164 <dim>136</dim>
10165 </port>
10166 <port id="1">
10167 <dim>1</dim>
10168 <dim>16</dim>
10169 <dim>1</dim>
10170 <dim>1</dim>
10171 </port>
10172 <port id="2">
10173 <dim>1</dim>
10174 <dim>16</dim>
10175 <dim>1</dim>
10176 <dim>1</dim>
10177 </port>
10178 <port id="3">
10179 <dim>1</dim>
10180 <dim>16</dim>
10181 <dim>1</dim>
10182 <dim>1</dim>
10183 </port>
10184 <port id="4">
10185 <dim>1</dim>
10186 <dim>16</dim>
10187 <dim>1</dim>
10188 <dim>1</dim>
10189 </port>
10190 </input>
10191 <output>
10192 <port id="5" precision="FP16">
10193 <dim>1</dim>
10194 <dim>16</dim>
10195 <dim>80</dim>
10196 <dim>136</dim>
10197 </port>
10198 </output>
10199 </layer>
10200 <layer id="736" name="16931/value1693322713" type="Const" version="opset1">
10201 <data element_type="i64" offset="8036" shape="5" size="40"/>
10202 <output>
10203 <port id="0" precision="I64">
10204 <dim>5</dim>
10205 </port>
10206 </output>
10207 </layer>
10208 <layer id="737" name="bottleneck2_6/inner/dw1/bn/mean/Fused_Mul__copy95710177/quantized1388819623" type="Const" version="opset1">
10209 <data element_type="i8" offset="25116" shape="16,1,3,3" size="144"/>
10210 <output>
10211 <port id="0" precision="I8">
10212 <dim>16</dim>
10213 <dim>1</dim>
10214 <dim>3</dim>
10215 <dim>3</dim>
10216 </port>
10217 </output>
10218 </layer>
10219 <layer id="738" name="bottleneck2_6/inner/dw1/bn/mean/Fused_Mul__copy95710177/quantized/to_f16" type="Convert" version="opset1">
10220 <data destination_type="f16"/>
10221 <input>
10222 <port id="0">
10223 <dim>16</dim>
10224 <dim>1</dim>
10225 <dim>3</dim>
10226 <dim>3</dim>
10227 </port>
10228 </input>
10229 <output>
10230 <port id="1" precision="FP16">
10231 <dim>16</dim>
10232 <dim>1</dim>
10233 <dim>3</dim>
10234 <dim>3</dim>
10235 </port>
10236 </output>
10237 </layer>
10238 <layer id="739" name="bottleneck2_6/inner/dw1/conv/fq_weights_1/zero_point1390121903" type="Const" version="opset1">
10239 <data element_type="f16" offset="25260" shape="16,1,1,1" size="32"/>
10240 <output>
10241 <port id="0" precision="FP16">
10242 <dim>16</dim>
10243 <dim>1</dim>
10244 <dim>1</dim>
10245 <dim>1</dim>
10246 </port>
10247 </output>
10248 </layer>
10249 <layer id="740" name="bottleneck2_6/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
10250 <data auto_broadcast="numpy"/>
10251 <input>
10252 <port id="0">
10253 <dim>16</dim>
10254 <dim>1</dim>
10255 <dim>3</dim>
10256 <dim>3</dim>
10257 </port>
10258 <port id="1">
10259 <dim>16</dim>
10260 <dim>1</dim>
10261 <dim>1</dim>
10262 <dim>1</dim>
10263 </port>
10264 </input>
10265 <output>
10266 <port id="2" precision="FP16">
10267 <dim>16</dim>
10268 <dim>1</dim>
10269 <dim>3</dim>
10270 <dim>3</dim>
10271 </port>
10272 </output>
10273 </layer>
10274 <layer id="741" name="bottleneck2_6/inner/dw1/conv/fq_weights_1/scale1389619968" type="Const" version="opset1">
10275 <data element_type="f16" offset="25292" shape="16,1,1,1" size="32"/>
10276 <output>
10277 <port id="0" precision="FP16">
10278 <dim>16</dim>
10279 <dim>1</dim>
10280 <dim>1</dim>
10281 <dim>1</dim>
10282 </port>
10283 </output>
10284 </layer>
10285 <layer id="742" name="bottleneck2_6/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
10286 <data auto_broadcast="numpy"/>
10287 <input>
10288 <port id="0">
10289 <dim>16</dim>
10290 <dim>1</dim>
10291 <dim>3</dim>
10292 <dim>3</dim>
10293 </port>
10294 <port id="1">
10295 <dim>16</dim>
10296 <dim>1</dim>
10297 <dim>1</dim>
10298 <dim>1</dim>
10299 </port>
10300 </input>
10301 <output>
10302 <port id="2" precision="FP16">
10303 <dim>16</dim>
10304 <dim>1</dim>
10305 <dim>3</dim>
10306 <dim>3</dim>
10307 </port>
10308 </output>
10309 </layer>
10310 <layer id="743" name="16931" type="Reshape" version="opset1">
10311 <data special_zero="true"/>
10312 <input>
10313 <port id="0">
10314 <dim>16</dim>
10315 <dim>1</dim>
10316 <dim>3</dim>
10317 <dim>3</dim>
10318 </port>
10319 <port id="1">
10320 <dim>5</dim>
10321 </port>
10322 </input>
10323 <output>
10324 <port id="2" precision="FP16">
10325 <dim>16</dim>
10326 <dim>1</dim>
10327 <dim>1</dim>
10328 <dim>3</dim>
10329 <dim>3</dim>
10330 </port>
10331 </output>
10332 </layer>
10333 <layer id="744" name="bottleneck2_6/inner/dw1/conv" type="GroupConvolution" version="opset1">
10334 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
10335 <input>
10336 <port id="0">
10337 <dim>1</dim>
10338 <dim>16</dim>
10339 <dim>80</dim>
10340 <dim>136</dim>
10341 </port>
10342 <port id="1">
10343 <dim>16</dim>
10344 <dim>1</dim>
10345 <dim>1</dim>
10346 <dim>3</dim>
10347 <dim>3</dim>
10348 </port>
10349 </input>
10350 <output>
10351 <port id="2" precision="FP16">
10352 <dim>1</dim>
10353 <dim>16</dim>
10354 <dim>80</dim>
10355 <dim>136</dim>
10356 </port>
10357 </output>
10358 </layer>
10359 <layer id="745" name="data_add_239132391895919890" type="Const" version="opset1">
10360 <data element_type="f16" offset="25324" shape="1,16,1,1" size="32"/>
10361 <output>
10362 <port id="0" precision="FP16">
10363 <dim>1</dim>
10364 <dim>16</dim>
10365 <dim>1</dim>
10366 <dim>1</dim>
10367 </port>
10368 </output>
10369 </layer>
10370 <layer id="746" name="bottleneck2_6/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
10371 <data auto_broadcast="numpy"/>
10372 <input>
10373 <port id="0">
10374 <dim>1</dim>
10375 <dim>16</dim>
10376 <dim>80</dim>
10377 <dim>136</dim>
10378 </port>
10379 <port id="1">
10380 <dim>1</dim>
10381 <dim>16</dim>
10382 <dim>1</dim>
10383 <dim>1</dim>
10384 </port>
10385 </input>
10386 <output>
10387 <port id="2" names="bottleneck2_6/inner/dw1/conv" precision="FP16">
10388 <dim>1</dim>
10389 <dim>16</dim>
10390 <dim>80</dim>
10391 <dim>136</dim>
10392 </port>
10393 </output>
10394 </layer>
10395 <layer id="747" name="bottleneck2_6/inner/dw1/fn/weights3101239827961" type="Const" version="opset1">
10396 <data element_type="f32" offset="1576" shape="1" size="4"/>
10397 <output>
10398 <port id="0" precision="FP32">
10399 <dim>1</dim>
10400 </port>
10401 </output>
10402 </layer>
10403 <layer id="748" name="bottleneck2_6/inner/dw1/fn" type="PReLU" version="opset1">
10404 <input>
10405 <port id="0">
10406 <dim>1</dim>
10407 <dim>16</dim>
10408 <dim>80</dim>
10409 <dim>136</dim>
10410 </port>
10411 <port id="1">
10412 <dim>1</dim>
10413 </port>
10414 </input>
10415 <output>
10416 <port id="2" names="bottleneck2_6/inner/dw1/conv" precision="FP16">
10417 <dim>1</dim>
10418 <dim>16</dim>
10419 <dim>80</dim>
10420 <dim>136</dim>
10421 </port>
10422 </output>
10423 </layer>
10424 <layer id="749" name="bottleneck2_6/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
10425 <data auto_broadcast="numpy" levels="256"/>
10426 <input>
10427 <port id="0">
10428 <dim>1</dim>
10429 <dim>16</dim>
10430 <dim>80</dim>
10431 <dim>136</dim>
10432 </port>
10433 <port id="1"/>
10434 <port id="2"/>
10435 <port id="3"/>
10436 <port id="4"/>
10437 </input>
10438 <output>
10439 <port id="5" precision="FP16">
10440 <dim>1</dim>
10441 <dim>16</dim>
10442 <dim>80</dim>
10443 <dim>136</dim>
10444 </port>
10445 </output>
10446 </layer>
10447 <layer id="750" name="bottleneck2_6/dim_inc/bn/mean/Fused_Mul__copy96310180/quantized1309619398" type="Const" version="opset1">
10448 <data element_type="i8" offset="25356" shape="64,16,1,1" size="1024"/>
10449 <output>
10450 <port id="0" precision="I8">
10451 <dim>64</dim>
10452 <dim>16</dim>
10453 <dim>1</dim>
10454 <dim>1</dim>
10455 </port>
10456 </output>
10457 </layer>
10458 <layer id="751" name="bottleneck2_6/dim_inc/bn/mean/Fused_Mul__copy96310180/quantized/to_f16" type="Convert" version="opset1">
10459 <data destination_type="f16"/>
10460 <input>
10461 <port id="0">
10462 <dim>64</dim>
10463 <dim>16</dim>
10464 <dim>1</dim>
10465 <dim>1</dim>
10466 </port>
10467 </input>
10468 <output>
10469 <port id="1" precision="FP16">
10470 <dim>64</dim>
10471 <dim>16</dim>
10472 <dim>1</dim>
10473 <dim>1</dim>
10474 </port>
10475 </output>
10476 </layer>
10477 <layer id="752" name="bottleneck2_6/dim_inc/conv/fq_weights_1/zero_point1310921357" type="Const" version="opset1">
10478 <data element_type="f16" offset="26380" shape="64,1,1,1" size="128"/>
10479 <output>
10480 <port id="0" precision="FP16">
10481 <dim>64</dim>
10482 <dim>1</dim>
10483 <dim>1</dim>
10484 <dim>1</dim>
10485 </port>
10486 </output>
10487 </layer>
10488 <layer id="753" name="bottleneck2_6/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
10489 <data auto_broadcast="numpy"/>
10490 <input>
10491 <port id="0">
10492 <dim>64</dim>
10493 <dim>16</dim>
10494 <dim>1</dim>
10495 <dim>1</dim>
10496 </port>
10497 <port id="1">
10498 <dim>64</dim>
10499 <dim>1</dim>
10500 <dim>1</dim>
10501 <dim>1</dim>
10502 </port>
10503 </input>
10504 <output>
10505 <port id="2" precision="FP16">
10506 <dim>64</dim>
10507 <dim>16</dim>
10508 <dim>1</dim>
10509 <dim>1</dim>
10510 </port>
10511 </output>
10512 </layer>
10513 <layer id="754" name="bottleneck2_6/dim_inc/conv/fq_weights_1/scale1310421198" type="Const" version="opset1">
10514 <data element_type="f16" offset="26508" shape="64,1,1,1" size="128"/>
10515 <output>
10516 <port id="0" precision="FP16">
10517 <dim>64</dim>
10518 <dim>1</dim>
10519 <dim>1</dim>
10520 <dim>1</dim>
10521 </port>
10522 </output>
10523 </layer>
10524 <layer id="755" name="bottleneck2_6/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
10525 <data auto_broadcast="numpy"/>
10526 <input>
10527 <port id="0">
10528 <dim>64</dim>
10529 <dim>16</dim>
10530 <dim>1</dim>
10531 <dim>1</dim>
10532 </port>
10533 <port id="1">
10534 <dim>64</dim>
10535 <dim>1</dim>
10536 <dim>1</dim>
10537 <dim>1</dim>
10538 </port>
10539 </input>
10540 <output>
10541 <port id="2" precision="FP16">
10542 <dim>64</dim>
10543 <dim>16</dim>
10544 <dim>1</dim>
10545 <dim>1</dim>
10546 </port>
10547 </output>
10548 </layer>
10549 <layer id="756" name="bottleneck2_6/dim_inc/conv" type="Convolution" version="opset1">
10550 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
10551 <input>
10552 <port id="0">
10553 <dim>1</dim>
10554 <dim>16</dim>
10555 <dim>80</dim>
10556 <dim>136</dim>
10557 </port>
10558 <port id="1">
10559 <dim>64</dim>
10560 <dim>16</dim>
10561 <dim>1</dim>
10562 <dim>1</dim>
10563 </port>
10564 </input>
10565 <output>
10566 <port id="2" precision="FP16">
10567 <dim>1</dim>
10568 <dim>64</dim>
10569 <dim>80</dim>
10570 <dim>136</dim>
10571 </port>
10572 </output>
10573 </layer>
10574 <layer id="757" name="data_add_239212392696522050" type="Const" version="opset1">
10575 <data element_type="f16" offset="26636" shape="1,64,1,1" size="128"/>
10576 <output>
10577 <port id="0" precision="FP16">
10578 <dim>1</dim>
10579 <dim>64</dim>
10580 <dim>1</dim>
10581 <dim>1</dim>
10582 </port>
10583 </output>
10584 </layer>
10585 <layer id="758" name="bottleneck2_6/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
10586 <data auto_broadcast="numpy"/>
10587 <input>
10588 <port id="0">
10589 <dim>1</dim>
10590 <dim>64</dim>
10591 <dim>80</dim>
10592 <dim>136</dim>
10593 </port>
10594 <port id="1">
10595 <dim>1</dim>
10596 <dim>64</dim>
10597 <dim>1</dim>
10598 <dim>1</dim>
10599 </port>
10600 </input>
10601 <output>
10602 <port id="2" names="bottleneck2_6/dim_inc/conv" precision="FP16">
10603 <dim>1</dim>
10604 <dim>64</dim>
10605 <dim>80</dim>
10606 <dim>136</dim>
10607 </port>
10608 </output>
10609 </layer>
10610 <layer id="759" name="bottleneck2_6/add/fq_input_1" type="FakeQuantize" version="opset1">
10611 <data auto_broadcast="numpy" levels="256"/>
10612 <input>
10613 <port id="0">
10614 <dim>1</dim>
10615 <dim>64</dim>
10616 <dim>80</dim>
10617 <dim>136</dim>
10618 </port>
10619 <port id="1"/>
10620 <port id="2"/>
10621 <port id="3"/>
10622 <port id="4"/>
10623 </input>
10624 <output>
10625 <port id="5" precision="FP16">
10626 <dim>1</dim>
10627 <dim>64</dim>
10628 <dim>80</dim>
10629 <dim>136</dim>
10630 </port>
10631 </output>
10632 </layer>
10633 <layer id="760" name="bottleneck2_6/add" type="Add" version="opset1">
10634 <data auto_broadcast="numpy"/>
10635 <input>
10636 <port id="0">
10637 <dim>1</dim>
10638 <dim>64</dim>
10639 <dim>80</dim>
10640 <dim>136</dim>
10641 </port>
10642 <port id="1">
10643 <dim>1</dim>
10644 <dim>64</dim>
10645 <dim>80</dim>
10646 <dim>136</dim>
10647 </port>
10648 </input>
10649 <output>
10650 <port id="2" names="bottleneck2_6/add" precision="FP16">
10651 <dim>1</dim>
10652 <dim>64</dim>
10653 <dim>80</dim>
10654 <dim>136</dim>
10655 </port>
10656 </output>
10657 </layer>
10658 <layer id="761" name="bottleneck2_6/fn/weights3076040430968" type="Const" version="opset1">
10659 <data element_type="f32" offset="1576" shape="1" size="4"/>
10660 <output>
10661 <port id="0" precision="FP32">
10662 <dim>1</dim>
10663 </port>
10664 </output>
10665 </layer>
10666 <layer id="762" name="bottleneck2_6/fn" type="PReLU" version="opset1">
10667 <input>
10668 <port id="0">
10669 <dim>1</dim>
10670 <dim>64</dim>
10671 <dim>80</dim>
10672 <dim>136</dim>
10673 </port>
10674 <port id="1">
10675 <dim>1</dim>
10676 </port>
10677 </input>
10678 <output>
10679 <port id="2" names="bottleneck2_6/add" precision="FP16">
10680 <dim>1</dim>
10681 <dim>64</dim>
10682 <dim>80</dim>
10683 <dim>136</dim>
10684 </port>
10685 </output>
10686 </layer>
10687 <layer id="763" name="bottleneck2_7/add/fq_input_0" type="FakeQuantize" version="opset1">
10688 <data auto_broadcast="numpy" levels="256"/>
10689 <input>
10690 <port id="0">
10691 <dim>1</dim>
10692 <dim>64</dim>
10693 <dim>80</dim>
10694 <dim>136</dim>
10695 </port>
10696 <port id="1"/>
10697 <port id="2"/>
10698 <port id="3"/>
10699 <port id="4"/>
10700 </input>
10701 <output>
10702 <port id="5" precision="FP16">
10703 <dim>1</dim>
10704 <dim>64</dim>
10705 <dim>80</dim>
10706 <dim>136</dim>
10707 </port>
10708 </output>
10709 </layer>
10710 <layer id="764" name="4454445822257" type="Const" version="opset1">
10711 <data element_type="f16" offset="26764" shape="" size="2"/>
10712 <output>
10713 <port id="0" precision="FP16"/>
10714 </output>
10715 </layer>
10716 <layer id="765" name="4455445921093" type="Const" version="opset1">
10717 <data element_type="f16" offset="26766" shape="" size="2"/>
10718 <output>
10719 <port id="0" precision="FP16"/>
10720 </output>
10721 </layer>
10722 <layer id="766" name="4456446019599" type="Const" version="opset1">
10723 <data element_type="f16" offset="26764" shape="" size="2"/>
10724 <output>
10725 <port id="0" precision="FP16"/>
10726 </output>
10727 </layer>
10728 <layer id="767" name="4457446122842" type="Const" version="opset1">
10729 <data element_type="f16" offset="26766" shape="" size="2"/>
10730 <output>
10731 <port id="0" precision="FP16"/>
10732 </output>
10733 </layer>
10734 <layer id="768" name="5364536821585" type="Const" version="opset1">
10735 <data element_type="f16" offset="26768" shape="" size="2"/>
10736 <output>
10737 <port id="0" precision="FP16"/>
10738 </output>
10739 </layer>
10740 <layer id="769" name="5365536921744" type="Const" version="opset1">
10741 <data element_type="f16" offset="26770" shape="" size="2"/>
10742 <output>
10743 <port id="0" precision="FP16"/>
10744 </output>
10745 </layer>
10746 <layer id="770" name="5366537020397" type="Const" version="opset1">
10747 <data element_type="f16" offset="26768" shape="" size="2"/>
10748 <output>
10749 <port id="0" precision="FP16"/>
10750 </output>
10751 </layer>
10752 <layer id="771" name="5367537121087" type="Const" version="opset1">
10753 <data element_type="f16" offset="26770" shape="" size="2"/>
10754 <output>
10755 <port id="0" precision="FP16"/>
10756 </output>
10757 </layer>
10758 <layer id="772" name="4884488819671" type="Const" version="opset1">
10759 <data element_type="f16" offset="26772" shape="1,16,1,1" size="32"/>
10760 <output>
10761 <port id="0" precision="FP16">
10762 <dim>1</dim>
10763 <dim>16</dim>
10764 <dim>1</dim>
10765 <dim>1</dim>
10766 </port>
10767 </output>
10768 </layer>
10769 <layer id="773" name="4885488919494" type="Const" version="opset1">
10770 <data element_type="f16" offset="26804" shape="1,16,1,1" size="32"/>
10771 <output>
10772 <port id="0" precision="FP16">
10773 <dim>1</dim>
10774 <dim>16</dim>
10775 <dim>1</dim>
10776 <dim>1</dim>
10777 </port>
10778 </output>
10779 </layer>
10780 <layer id="774" name="4886489022326" type="Const" version="opset1">
10781 <data element_type="f16" offset="26772" shape="1,16,1,1" size="32"/>
10782 <output>
10783 <port id="0" precision="FP16">
10784 <dim>1</dim>
10785 <dim>16</dim>
10786 <dim>1</dim>
10787 <dim>1</dim>
10788 </port>
10789 </output>
10790 </layer>
10791 <layer id="775" name="4887489119860" type="Const" version="opset1">
10792 <data element_type="f16" offset="26804" shape="1,16,1,1" size="32"/>
10793 <output>
10794 <port id="0" precision="FP16">
10795 <dim>1</dim>
10796 <dim>16</dim>
10797 <dim>1</dim>
10798 <dim>1</dim>
10799 </port>
10800 </output>
10801 </layer>
10802 <layer id="776" name="bottleneck2_7/dim_red/bn/mean/Fused_Mul__copy97010183/quantized1153619740" type="Const" version="opset1">
10803 <data element_type="i8" offset="26836" shape="16,64,1,1" size="1024"/>
10804 <output>
10805 <port id="0" precision="I8">
10806 <dim>16</dim>
10807 <dim>64</dim>
10808 <dim>1</dim>
10809 <dim>1</dim>
10810 </port>
10811 </output>
10812 </layer>
10813 <layer id="777" name="bottleneck2_7/dim_red/bn/mean/Fused_Mul__copy97010183/quantized/to_f16" type="Convert" version="opset1">
10814 <data destination_type="f16"/>
10815 <input>
10816 <port id="0">
10817 <dim>16</dim>
10818 <dim>64</dim>
10819 <dim>1</dim>
10820 <dim>1</dim>
10821 </port>
10822 </input>
10823 <output>
10824 <port id="1" precision="FP16">
10825 <dim>16</dim>
10826 <dim>64</dim>
10827 <dim>1</dim>
10828 <dim>1</dim>
10829 </port>
10830 </output>
10831 </layer>
10832 <layer id="778" name="bottleneck2_7/dim_red/conv/fq_weights_1/zero_point1154922197" type="Const" version="opset1">
10833 <data element_type="f16" offset="27860" shape="16,1,1,1" size="32"/>
10834 <output>
10835 <port id="0" precision="FP16">
10836 <dim>16</dim>
10837 <dim>1</dim>
10838 <dim>1</dim>
10839 <dim>1</dim>
10840 </port>
10841 </output>
10842 </layer>
10843 <layer id="779" name="bottleneck2_7/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
10844 <data auto_broadcast="numpy"/>
10845 <input>
10846 <port id="0">
10847 <dim>16</dim>
10848 <dim>64</dim>
10849 <dim>1</dim>
10850 <dim>1</dim>
10851 </port>
10852 <port id="1">
10853 <dim>16</dim>
10854 <dim>1</dim>
10855 <dim>1</dim>
10856 <dim>1</dim>
10857 </port>
10858 </input>
10859 <output>
10860 <port id="2" precision="FP16">
10861 <dim>16</dim>
10862 <dim>64</dim>
10863 <dim>1</dim>
10864 <dim>1</dim>
10865 </port>
10866 </output>
10867 </layer>
10868 <layer id="780" name="bottleneck2_7/dim_red/conv/fq_weights_1/scale1154422755" type="Const" version="opset1">
10869 <data element_type="f16" offset="27892" shape="16,1,1,1" size="32"/>
10870 <output>
10871 <port id="0" precision="FP16">
10872 <dim>16</dim>
10873 <dim>1</dim>
10874 <dim>1</dim>
10875 <dim>1</dim>
10876 </port>
10877 </output>
10878 </layer>
10879 <layer id="781" name="bottleneck2_7/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
10880 <data auto_broadcast="numpy"/>
10881 <input>
10882 <port id="0">
10883 <dim>16</dim>
10884 <dim>64</dim>
10885 <dim>1</dim>
10886 <dim>1</dim>
10887 </port>
10888 <port id="1">
10889 <dim>16</dim>
10890 <dim>1</dim>
10891 <dim>1</dim>
10892 <dim>1</dim>
10893 </port>
10894 </input>
10895 <output>
10896 <port id="2" precision="FP16">
10897 <dim>16</dim>
10898 <dim>64</dim>
10899 <dim>1</dim>
10900 <dim>1</dim>
10901 </port>
10902 </output>
10903 </layer>
10904 <layer id="782" name="bottleneck2_7/dim_red/conv" type="Convolution" version="opset1">
10905 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
10906 <input>
10907 <port id="0">
10908 <dim>1</dim>
10909 <dim>64</dim>
10910 <dim>80</dim>
10911 <dim>136</dim>
10912 </port>
10913 <port id="1">
10914 <dim>16</dim>
10915 <dim>64</dim>
10916 <dim>1</dim>
10917 <dim>1</dim>
10918 </port>
10919 </input>
10920 <output>
10921 <port id="2" precision="FP16">
10922 <dim>1</dim>
10923 <dim>16</dim>
10924 <dim>80</dim>
10925 <dim>136</dim>
10926 </port>
10927 </output>
10928 </layer>
10929 <layer id="783" name="data_add_239292393497221297" type="Const" version="opset1">
10930 <data element_type="f16" offset="27924" shape="1,16,1,1" size="32"/>
10931 <output>
10932 <port id="0" precision="FP16">
10933 <dim>1</dim>
10934 <dim>16</dim>
10935 <dim>1</dim>
10936 <dim>1</dim>
10937 </port>
10938 </output>
10939 </layer>
10940 <layer id="784" name="bottleneck2_7/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
10941 <data auto_broadcast="numpy"/>
10942 <input>
10943 <port id="0">
10944 <dim>1</dim>
10945 <dim>16</dim>
10946 <dim>80</dim>
10947 <dim>136</dim>
10948 </port>
10949 <port id="1">
10950 <dim>1</dim>
10951 <dim>16</dim>
10952 <dim>1</dim>
10953 <dim>1</dim>
10954 </port>
10955 </input>
10956 <output>
10957 <port id="2" names="bottleneck2_7/dim_red/conv" precision="FP16">
10958 <dim>1</dim>
10959 <dim>16</dim>
10960 <dim>80</dim>
10961 <dim>136</dim>
10962 </port>
10963 </output>
10964 </layer>
10965 <layer id="785" name="bottleneck2_7/dim_red/fn/weights3110840142974" type="Const" version="opset1">
10966 <data element_type="f32" offset="1576" shape="1" size="4"/>
10967 <output>
10968 <port id="0" precision="FP32">
10969 <dim>1</dim>
10970 </port>
10971 </output>
10972 </layer>
10973 <layer id="786" name="bottleneck2_7/dim_red/fn" type="PReLU" version="opset1">
10974 <input>
10975 <port id="0">
10976 <dim>1</dim>
10977 <dim>16</dim>
10978 <dim>80</dim>
10979 <dim>136</dim>
10980 </port>
10981 <port id="1">
10982 <dim>1</dim>
10983 </port>
10984 </input>
10985 <output>
10986 <port id="2" names="bottleneck2_7/dim_red/conv" precision="FP16">
10987 <dim>1</dim>
10988 <dim>16</dim>
10989 <dim>80</dim>
10990 <dim>136</dim>
10991 </port>
10992 </output>
10993 </layer>
10994 <layer id="787" name="bottleneck2_7/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
10995 <data auto_broadcast="numpy" levels="256"/>
10996 <input>
10997 <port id="0">
10998 <dim>1</dim>
10999 <dim>16</dim>
11000 <dim>80</dim>
11001 <dim>136</dim>
11002 </port>
11003 <port id="1">
11004 <dim>1</dim>
11005 <dim>16</dim>
11006 <dim>1</dim>
11007 <dim>1</dim>
11008 </port>
11009 <port id="2">
11010 <dim>1</dim>
11011 <dim>16</dim>
11012 <dim>1</dim>
11013 <dim>1</dim>
11014 </port>
11015 <port id="3">
11016 <dim>1</dim>
11017 <dim>16</dim>
11018 <dim>1</dim>
11019 <dim>1</dim>
11020 </port>
11021 <port id="4">
11022 <dim>1</dim>
11023 <dim>16</dim>
11024 <dim>1</dim>
11025 <dim>1</dim>
11026 </port>
11027 </input>
11028 <output>
11029 <port id="5" precision="FP16">
11030 <dim>1</dim>
11031 <dim>16</dim>
11032 <dim>80</dim>
11033 <dim>136</dim>
11034 </port>
11035 </output>
11036 </layer>
11037 <layer id="788" name="16911/value1691319764" type="Const" version="opset1">
11038 <data element_type="i64" offset="8036" shape="5" size="40"/>
11039 <output>
11040 <port id="0" precision="I64">
11041 <dim>5</dim>
11042 </port>
11043 </output>
11044 </layer>
11045 <layer id="789" name="bottleneck2_7/inner/dw1/bn/mean/Fused_Mul__copy97610186/quantized1321619503" type="Const" version="opset1">
11046 <data element_type="i8" offset="27956" shape="16,1,3,3" size="144"/>
11047 <output>
11048 <port id="0" precision="I8">
11049 <dim>16</dim>
11050 <dim>1</dim>
11051 <dim>3</dim>
11052 <dim>3</dim>
11053 </port>
11054 </output>
11055 </layer>
11056 <layer id="790" name="bottleneck2_7/inner/dw1/bn/mean/Fused_Mul__copy97610186/quantized/to_f16" type="Convert" version="opset1">
11057 <data destination_type="f16"/>
11058 <input>
11059 <port id="0">
11060 <dim>16</dim>
11061 <dim>1</dim>
11062 <dim>3</dim>
11063 <dim>3</dim>
11064 </port>
11065 </input>
11066 <output>
11067 <port id="1" precision="FP16">
11068 <dim>16</dim>
11069 <dim>1</dim>
11070 <dim>3</dim>
11071 <dim>3</dim>
11072 </port>
11073 </output>
11074 </layer>
11075 <layer id="791" name="bottleneck2_7/inner/dw1/conv/fq_weights_1/zero_point1322920703" type="Const" version="opset1">
11076 <data element_type="f16" offset="28100" shape="16,1,1,1" size="32"/>
11077 <output>
11078 <port id="0" precision="FP16">
11079 <dim>16</dim>
11080 <dim>1</dim>
11081 <dim>1</dim>
11082 <dim>1</dim>
11083 </port>
11084 </output>
11085 </layer>
11086 <layer id="792" name="bottleneck2_7/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
11087 <data auto_broadcast="numpy"/>
11088 <input>
11089 <port id="0">
11090 <dim>16</dim>
11091 <dim>1</dim>
11092 <dim>3</dim>
11093 <dim>3</dim>
11094 </port>
11095 <port id="1">
11096 <dim>16</dim>
11097 <dim>1</dim>
11098 <dim>1</dim>
11099 <dim>1</dim>
11100 </port>
11101 </input>
11102 <output>
11103 <port id="2" precision="FP16">
11104 <dim>16</dim>
11105 <dim>1</dim>
11106 <dim>3</dim>
11107 <dim>3</dim>
11108 </port>
11109 </output>
11110 </layer>
11111 <layer id="793" name="bottleneck2_7/inner/dw1/conv/fq_weights_1/scale1322419530" type="Const" version="opset1">
11112 <data element_type="f16" offset="28132" shape="16,1,1,1" size="32"/>
11113 <output>
11114 <port id="0" precision="FP16">
11115 <dim>16</dim>
11116 <dim>1</dim>
11117 <dim>1</dim>
11118 <dim>1</dim>
11119 </port>
11120 </output>
11121 </layer>
11122 <layer id="794" name="bottleneck2_7/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
11123 <data auto_broadcast="numpy"/>
11124 <input>
11125 <port id="0">
11126 <dim>16</dim>
11127 <dim>1</dim>
11128 <dim>3</dim>
11129 <dim>3</dim>
11130 </port>
11131 <port id="1">
11132 <dim>16</dim>
11133 <dim>1</dim>
11134 <dim>1</dim>
11135 <dim>1</dim>
11136 </port>
11137 </input>
11138 <output>
11139 <port id="2" precision="FP16">
11140 <dim>16</dim>
11141 <dim>1</dim>
11142 <dim>3</dim>
11143 <dim>3</dim>
11144 </port>
11145 </output>
11146 </layer>
11147 <layer id="795" name="16911" type="Reshape" version="opset1">
11148 <data special_zero="true"/>
11149 <input>
11150 <port id="0">
11151 <dim>16</dim>
11152 <dim>1</dim>
11153 <dim>3</dim>
11154 <dim>3</dim>
11155 </port>
11156 <port id="1">
11157 <dim>5</dim>
11158 </port>
11159 </input>
11160 <output>
11161 <port id="2" precision="FP16">
11162 <dim>16</dim>
11163 <dim>1</dim>
11164 <dim>1</dim>
11165 <dim>3</dim>
11166 <dim>3</dim>
11167 </port>
11168 </output>
11169 </layer>
11170 <layer id="796" name="bottleneck2_7/inner/dw1/conv" type="GroupConvolution" version="opset1">
11171 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
11172 <input>
11173 <port id="0">
11174 <dim>1</dim>
11175 <dim>16</dim>
11176 <dim>80</dim>
11177 <dim>136</dim>
11178 </port>
11179 <port id="1">
11180 <dim>16</dim>
11181 <dim>1</dim>
11182 <dim>1</dim>
11183 <dim>3</dim>
11184 <dim>3</dim>
11185 </port>
11186 </input>
11187 <output>
11188 <port id="2" precision="FP16">
11189 <dim>1</dim>
11190 <dim>16</dim>
11191 <dim>80</dim>
11192 <dim>136</dim>
11193 </port>
11194 </output>
11195 </layer>
11196 <layer id="797" name="data_add_239372394297821825" type="Const" version="opset1">
11197 <data element_type="f16" offset="28164" shape="1,16,1,1" size="32"/>
11198 <output>
11199 <port id="0" precision="FP16">
11200 <dim>1</dim>
11201 <dim>16</dim>
11202 <dim>1</dim>
11203 <dim>1</dim>
11204 </port>
11205 </output>
11206 </layer>
11207 <layer id="798" name="bottleneck2_7/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
11208 <data auto_broadcast="numpy"/>
11209 <input>
11210 <port id="0">
11211 <dim>1</dim>
11212 <dim>16</dim>
11213 <dim>80</dim>
11214 <dim>136</dim>
11215 </port>
11216 <port id="1">
11217 <dim>1</dim>
11218 <dim>16</dim>
11219 <dim>1</dim>
11220 <dim>1</dim>
11221 </port>
11222 </input>
11223 <output>
11224 <port id="2" names="bottleneck2_7/inner/dw1/conv" precision="FP16">
11225 <dim>1</dim>
11226 <dim>16</dim>
11227 <dim>80</dim>
11228 <dim>136</dim>
11229 </port>
11230 </output>
11231 </layer>
11232 <layer id="799" name="bottleneck2_7/inner/dw1/fn/weights3088040439980" type="Const" version="opset1">
11233 <data element_type="f32" offset="1576" shape="1" size="4"/>
11234 <output>
11235 <port id="0" precision="FP32">
11236 <dim>1</dim>
11237 </port>
11238 </output>
11239 </layer>
11240 <layer id="800" name="bottleneck2_7/inner/dw1/fn" type="PReLU" version="opset1">
11241 <input>
11242 <port id="0">
11243 <dim>1</dim>
11244 <dim>16</dim>
11245 <dim>80</dim>
11246 <dim>136</dim>
11247 </port>
11248 <port id="1">
11249 <dim>1</dim>
11250 </port>
11251 </input>
11252 <output>
11253 <port id="2" names="bottleneck2_7/inner/dw1/conv" precision="FP16">
11254 <dim>1</dim>
11255 <dim>16</dim>
11256 <dim>80</dim>
11257 <dim>136</dim>
11258 </port>
11259 </output>
11260 </layer>
11261 <layer id="801" name="bottleneck2_7/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
11262 <data auto_broadcast="numpy" levels="256"/>
11263 <input>
11264 <port id="0">
11265 <dim>1</dim>
11266 <dim>16</dim>
11267 <dim>80</dim>
11268 <dim>136</dim>
11269 </port>
11270 <port id="1"/>
11271 <port id="2"/>
11272 <port id="3"/>
11273 <port id="4"/>
11274 </input>
11275 <output>
11276 <port id="5" precision="FP16">
11277 <dim>1</dim>
11278 <dim>16</dim>
11279 <dim>80</dim>
11280 <dim>136</dim>
11281 </port>
11282 </output>
11283 </layer>
11284 <layer id="802" name="bottleneck2_7/dim_inc/bn/mean/Fused_Mul__copy98210189/quantized1211221822" type="Const" version="opset1">
11285 <data element_type="i8" offset="28196" shape="64,16,1,1" size="1024"/>
11286 <output>
11287 <port id="0" precision="I8">
11288 <dim>64</dim>
11289 <dim>16</dim>
11290 <dim>1</dim>
11291 <dim>1</dim>
11292 </port>
11293 </output>
11294 </layer>
11295 <layer id="803" name="bottleneck2_7/dim_inc/bn/mean/Fused_Mul__copy98210189/quantized/to_f16" type="Convert" version="opset1">
11296 <data destination_type="f16"/>
11297 <input>
11298 <port id="0">
11299 <dim>64</dim>
11300 <dim>16</dim>
11301 <dim>1</dim>
11302 <dim>1</dim>
11303 </port>
11304 </input>
11305 <output>
11306 <port id="1" precision="FP16">
11307 <dim>64</dim>
11308 <dim>16</dim>
11309 <dim>1</dim>
11310 <dim>1</dim>
11311 </port>
11312 </output>
11313 </layer>
11314 <layer id="804" name="bottleneck2_7/dim_inc/conv/fq_weights_1/zero_point1212522542" type="Const" version="opset1">
11315 <data element_type="f16" offset="29220" shape="64,1,1,1" size="128"/>
11316 <output>
11317 <port id="0" precision="FP16">
11318 <dim>64</dim>
11319 <dim>1</dim>
11320 <dim>1</dim>
11321 <dim>1</dim>
11322 </port>
11323 </output>
11324 </layer>
11325 <layer id="805" name="bottleneck2_7/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
11326 <data auto_broadcast="numpy"/>
11327 <input>
11328 <port id="0">
11329 <dim>64</dim>
11330 <dim>16</dim>
11331 <dim>1</dim>
11332 <dim>1</dim>
11333 </port>
11334 <port id="1">
11335 <dim>64</dim>
11336 <dim>1</dim>
11337 <dim>1</dim>
11338 <dim>1</dim>
11339 </port>
11340 </input>
11341 <output>
11342 <port id="2" precision="FP16">
11343 <dim>64</dim>
11344 <dim>16</dim>
11345 <dim>1</dim>
11346 <dim>1</dim>
11347 </port>
11348 </output>
11349 </layer>
11350 <layer id="806" name="bottleneck2_7/dim_inc/conv/fq_weights_1/scale1212022902" type="Const" version="opset1">
11351 <data element_type="f16" offset="29348" shape="64,1,1,1" size="128"/>
11352 <output>
11353 <port id="0" precision="FP16">
11354 <dim>64</dim>
11355 <dim>1</dim>
11356 <dim>1</dim>
11357 <dim>1</dim>
11358 </port>
11359 </output>
11360 </layer>
11361 <layer id="807" name="bottleneck2_7/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
11362 <data auto_broadcast="numpy"/>
11363 <input>
11364 <port id="0">
11365 <dim>64</dim>
11366 <dim>16</dim>
11367 <dim>1</dim>
11368 <dim>1</dim>
11369 </port>
11370 <port id="1">
11371 <dim>64</dim>
11372 <dim>1</dim>
11373 <dim>1</dim>
11374 <dim>1</dim>
11375 </port>
11376 </input>
11377 <output>
11378 <port id="2" precision="FP16">
11379 <dim>64</dim>
11380 <dim>16</dim>
11381 <dim>1</dim>
11382 <dim>1</dim>
11383 </port>
11384 </output>
11385 </layer>
11386 <layer id="808" name="bottleneck2_7/dim_inc/conv" type="Convolution" version="opset1">
11387 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
11388 <input>
11389 <port id="0">
11390 <dim>1</dim>
11391 <dim>16</dim>
11392 <dim>80</dim>
11393 <dim>136</dim>
11394 </port>
11395 <port id="1">
11396 <dim>64</dim>
11397 <dim>16</dim>
11398 <dim>1</dim>
11399 <dim>1</dim>
11400 </port>
11401 </input>
11402 <output>
11403 <port id="2" precision="FP16">
11404 <dim>1</dim>
11405 <dim>64</dim>
11406 <dim>80</dim>
11407 <dim>136</dim>
11408 </port>
11409 </output>
11410 </layer>
11411 <layer id="809" name="data_add_239452395098422182" type="Const" version="opset1">
11412 <data element_type="f16" offset="29476" shape="1,64,1,1" size="128"/>
11413 <output>
11414 <port id="0" precision="FP16">
11415 <dim>1</dim>
11416 <dim>64</dim>
11417 <dim>1</dim>
11418 <dim>1</dim>
11419 </port>
11420 </output>
11421 </layer>
11422 <layer id="810" name="bottleneck2_7/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
11423 <data auto_broadcast="numpy"/>
11424 <input>
11425 <port id="0">
11426 <dim>1</dim>
11427 <dim>64</dim>
11428 <dim>80</dim>
11429 <dim>136</dim>
11430 </port>
11431 <port id="1">
11432 <dim>1</dim>
11433 <dim>64</dim>
11434 <dim>1</dim>
11435 <dim>1</dim>
11436 </port>
11437 </input>
11438 <output>
11439 <port id="2" names="bottleneck2_7/dim_inc/conv" precision="FP16">
11440 <dim>1</dim>
11441 <dim>64</dim>
11442 <dim>80</dim>
11443 <dim>136</dim>
11444 </port>
11445 </output>
11446 </layer>
11447 <layer id="811" name="bottleneck2_7/add/fq_input_1" type="FakeQuantize" version="opset1">
11448 <data auto_broadcast="numpy" levels="256"/>
11449 <input>
11450 <port id="0">
11451 <dim>1</dim>
11452 <dim>64</dim>
11453 <dim>80</dim>
11454 <dim>136</dim>
11455 </port>
11456 <port id="1"/>
11457 <port id="2"/>
11458 <port id="3"/>
11459 <port id="4"/>
11460 </input>
11461 <output>
11462 <port id="5" precision="FP16">
11463 <dim>1</dim>
11464 <dim>64</dim>
11465 <dim>80</dim>
11466 <dim>136</dim>
11467 </port>
11468 </output>
11469 </layer>
11470 <layer id="812" name="bottleneck2_7/add" type="Add" version="opset1">
11471 <data auto_broadcast="numpy"/>
11472 <input>
11473 <port id="0">
11474 <dim>1</dim>
11475 <dim>64</dim>
11476 <dim>80</dim>
11477 <dim>136</dim>
11478 </port>
11479 <port id="1">
11480 <dim>1</dim>
11481 <dim>64</dim>
11482 <dim>80</dim>
11483 <dim>136</dim>
11484 </port>
11485 </input>
11486 <output>
11487 <port id="2" names="bottleneck2_7/add" precision="FP16">
11488 <dim>1</dim>
11489 <dim>64</dim>
11490 <dim>80</dim>
11491 <dim>136</dim>
11492 </port>
11493 </output>
11494 </layer>
11495 <layer id="813" name="bottleneck2_7/fn/weights3091640094987" type="Const" version="opset1">
11496 <data element_type="f32" offset="1576" shape="1" size="4"/>
11497 <output>
11498 <port id="0" precision="FP32">
11499 <dim>1</dim>
11500 </port>
11501 </output>
11502 </layer>
11503 <layer id="814" name="bottleneck2_7/fn" type="PReLU" version="opset1">
11504 <input>
11505 <port id="0">
11506 <dim>1</dim>
11507 <dim>64</dim>
11508 <dim>80</dim>
11509 <dim>136</dim>
11510 </port>
11511 <port id="1">
11512 <dim>1</dim>
11513 </port>
11514 </input>
11515 <output>
11516 <port id="2" names="bottleneck2_7/add" precision="FP16">
11517 <dim>1</dim>
11518 <dim>64</dim>
11519 <dim>80</dim>
11520 <dim>136</dim>
11521 </port>
11522 </output>
11523 </layer>
11524 <layer id="815" name="bottleneck2_8/add/fq_input_0" type="FakeQuantize" version="opset1">
11525 <data auto_broadcast="numpy" levels="256"/>
11526 <input>
11527 <port id="0">
11528 <dim>1</dim>
11529 <dim>64</dim>
11530 <dim>80</dim>
11531 <dim>136</dim>
11532 </port>
11533 <port id="1"/>
11534 <port id="2"/>
11535 <port id="3"/>
11536 <port id="4"/>
11537 </input>
11538 <output>
11539 <port id="5" precision="FP16">
11540 <dim>1</dim>
11541 <dim>64</dim>
11542 <dim>80</dim>
11543 <dim>136</dim>
11544 </port>
11545 </output>
11546 </layer>
11547 <layer id="816" name="2754275821450" type="Const" version="opset1">
11548 <data element_type="f16" offset="29604" shape="" size="2"/>
11549 <output>
11550 <port id="0" precision="FP16"/>
11551 </output>
11552 </layer>
11553 <layer id="817" name="2755275920847" type="Const" version="opset1">
11554 <data element_type="f16" offset="29606" shape="" size="2"/>
11555 <output>
11556 <port id="0" precision="FP16"/>
11557 </output>
11558 </layer>
11559 <layer id="818" name="2756276022839" type="Const" version="opset1">
11560 <data element_type="f16" offset="29604" shape="" size="2"/>
11561 <output>
11562 <port id="0" precision="FP16"/>
11563 </output>
11564 </layer>
11565 <layer id="819" name="2757276121993" type="Const" version="opset1">
11566 <data element_type="f16" offset="29606" shape="" size="2"/>
11567 <output>
11568 <port id="0" precision="FP16"/>
11569 </output>
11570 </layer>
11571 <layer id="820" name="2544254820079" type="Const" version="opset1">
11572 <data element_type="f16" offset="29608" shape="" size="2"/>
11573 <output>
11574 <port id="0" precision="FP16"/>
11575 </output>
11576 </layer>
11577 <layer id="821" name="2545254920676" type="Const" version="opset1">
11578 <data element_type="f16" offset="29610" shape="" size="2"/>
11579 <output>
11580 <port id="0" precision="FP16"/>
11581 </output>
11582 </layer>
11583 <layer id="822" name="2546255021231" type="Const" version="opset1">
11584 <data element_type="f16" offset="29608" shape="" size="2"/>
11585 <output>
11586 <port id="0" precision="FP16"/>
11587 </output>
11588 </layer>
11589 <layer id="823" name="2547255121672" type="Const" version="opset1">
11590 <data element_type="f16" offset="29610" shape="" size="2"/>
11591 <output>
11592 <port id="0" precision="FP16"/>
11593 </output>
11594 </layer>
11595 <layer id="824" name="3904390821567" type="Const" version="opset1">
11596 <data element_type="f16" offset="29612" shape="1,16,1,1" size="32"/>
11597 <output>
11598 <port id="0" precision="FP16">
11599 <dim>1</dim>
11600 <dim>16</dim>
11601 <dim>1</dim>
11602 <dim>1</dim>
11603 </port>
11604 </output>
11605 </layer>
11606 <layer id="825" name="3905390921915" type="Const" version="opset1">
11607 <data element_type="f16" offset="29644" shape="1,16,1,1" size="32"/>
11608 <output>
11609 <port id="0" precision="FP16">
11610 <dim>1</dim>
11611 <dim>16</dim>
11612 <dim>1</dim>
11613 <dim>1</dim>
11614 </port>
11615 </output>
11616 </layer>
11617 <layer id="826" name="3906391019995" type="Const" version="opset1">
11618 <data element_type="f16" offset="29612" shape="1,16,1,1" size="32"/>
11619 <output>
11620 <port id="0" precision="FP16">
11621 <dim>1</dim>
11622 <dim>16</dim>
11623 <dim>1</dim>
11624 <dim>1</dim>
11625 </port>
11626 </output>
11627 </layer>
11628 <layer id="827" name="3907391120883" type="Const" version="opset1">
11629 <data element_type="f16" offset="29644" shape="1,16,1,1" size="32"/>
11630 <output>
11631 <port id="0" precision="FP16">
11632 <dim>1</dim>
11633 <dim>16</dim>
11634 <dim>1</dim>
11635 <dim>1</dim>
11636 </port>
11637 </output>
11638 </layer>
11639 <layer id="828" name="bottleneck2_8/dim_red/bn/mean/Fused_Mul__copy98910192/quantized1316821282" type="Const" version="opset1">
11640 <data element_type="i8" offset="29676" shape="16,64,1,1" size="1024"/>
11641 <output>
11642 <port id="0" precision="I8">
11643 <dim>16</dim>
11644 <dim>64</dim>
11645 <dim>1</dim>
11646 <dim>1</dim>
11647 </port>
11648 </output>
11649 </layer>
11650 <layer id="829" name="bottleneck2_8/dim_red/bn/mean/Fused_Mul__copy98910192/quantized/to_f16" type="Convert" version="opset1">
11651 <data destination_type="f16"/>
11652 <input>
11653 <port id="0">
11654 <dim>16</dim>
11655 <dim>64</dim>
11656 <dim>1</dim>
11657 <dim>1</dim>
11658 </port>
11659 </input>
11660 <output>
11661 <port id="1" precision="FP16">
11662 <dim>16</dim>
11663 <dim>64</dim>
11664 <dim>1</dim>
11665 <dim>1</dim>
11666 </port>
11667 </output>
11668 </layer>
11669 <layer id="830" name="bottleneck2_8/dim_red/conv/fq_weights_1/zero_point1318122794" type="Const" version="opset1">
11670 <data element_type="f16" offset="30700" shape="16,1,1,1" size="32"/>
11671 <output>
11672 <port id="0" precision="FP16">
11673 <dim>16</dim>
11674 <dim>1</dim>
11675 <dim>1</dim>
11676 <dim>1</dim>
11677 </port>
11678 </output>
11679 </layer>
11680 <layer id="831" name="bottleneck2_8/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
11681 <data auto_broadcast="numpy"/>
11682 <input>
11683 <port id="0">
11684 <dim>16</dim>
11685 <dim>64</dim>
11686 <dim>1</dim>
11687 <dim>1</dim>
11688 </port>
11689 <port id="1">
11690 <dim>16</dim>
11691 <dim>1</dim>
11692 <dim>1</dim>
11693 <dim>1</dim>
11694 </port>
11695 </input>
11696 <output>
11697 <port id="2" precision="FP16">
11698 <dim>16</dim>
11699 <dim>64</dim>
11700 <dim>1</dim>
11701 <dim>1</dim>
11702 </port>
11703 </output>
11704 </layer>
11705 <layer id="832" name="bottleneck2_8/dim_red/conv/fq_weights_1/scale1317620826" type="Const" version="opset1">
11706 <data element_type="f16" offset="30732" shape="16,1,1,1" size="32"/>
11707 <output>
11708 <port id="0" precision="FP16">
11709 <dim>16</dim>
11710 <dim>1</dim>
11711 <dim>1</dim>
11712 <dim>1</dim>
11713 </port>
11714 </output>
11715 </layer>
11716 <layer id="833" name="bottleneck2_8/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
11717 <data auto_broadcast="numpy"/>
11718 <input>
11719 <port id="0">
11720 <dim>16</dim>
11721 <dim>64</dim>
11722 <dim>1</dim>
11723 <dim>1</dim>
11724 </port>
11725 <port id="1">
11726 <dim>16</dim>
11727 <dim>1</dim>
11728 <dim>1</dim>
11729 <dim>1</dim>
11730 </port>
11731 </input>
11732 <output>
11733 <port id="2" precision="FP16">
11734 <dim>16</dim>
11735 <dim>64</dim>
11736 <dim>1</dim>
11737 <dim>1</dim>
11738 </port>
11739 </output>
11740 </layer>
11741 <layer id="834" name="bottleneck2_8/dim_red/conv" type="Convolution" version="opset1">
11742 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
11743 <input>
11744 <port id="0">
11745 <dim>1</dim>
11746 <dim>64</dim>
11747 <dim>80</dim>
11748 <dim>136</dim>
11749 </port>
11750 <port id="1">
11751 <dim>16</dim>
11752 <dim>64</dim>
11753 <dim>1</dim>
11754 <dim>1</dim>
11755 </port>
11756 </input>
11757 <output>
11758 <port id="2" precision="FP16">
11759 <dim>1</dim>
11760 <dim>16</dim>
11761 <dim>80</dim>
11762 <dim>136</dim>
11763 </port>
11764 </output>
11765 </layer>
11766 <layer id="835" name="data_add_239532395899121927" type="Const" version="opset1">
11767 <data element_type="f16" offset="30764" shape="1,16,1,1" size="32"/>
11768 <output>
11769 <port id="0" precision="FP16">
11770 <dim>1</dim>
11771 <dim>16</dim>
11772 <dim>1</dim>
11773 <dim>1</dim>
11774 </port>
11775 </output>
11776 </layer>
11777 <layer id="836" name="bottleneck2_8/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
11778 <data auto_broadcast="numpy"/>
11779 <input>
11780 <port id="0">
11781 <dim>1</dim>
11782 <dim>16</dim>
11783 <dim>80</dim>
11784 <dim>136</dim>
11785 </port>
11786 <port id="1">
11787 <dim>1</dim>
11788 <dim>16</dim>
11789 <dim>1</dim>
11790 <dim>1</dim>
11791 </port>
11792 </input>
11793 <output>
11794 <port id="2" names="bottleneck2_8/dim_red/conv" precision="FP16">
11795 <dim>1</dim>
11796 <dim>16</dim>
11797 <dim>80</dim>
11798 <dim>136</dim>
11799 </port>
11800 </output>
11801 </layer>
11802 <layer id="837" name="bottleneck2_8/dim_red/fn/weights3095240220993" type="Const" version="opset1">
11803 <data element_type="f32" offset="1576" shape="1" size="4"/>
11804 <output>
11805 <port id="0" precision="FP32">
11806 <dim>1</dim>
11807 </port>
11808 </output>
11809 </layer>
11810 <layer id="838" name="bottleneck2_8/dim_red/fn" type="PReLU" version="opset1">
11811 <input>
11812 <port id="0">
11813 <dim>1</dim>
11814 <dim>16</dim>
11815 <dim>80</dim>
11816 <dim>136</dim>
11817 </port>
11818 <port id="1">
11819 <dim>1</dim>
11820 </port>
11821 </input>
11822 <output>
11823 <port id="2" names="bottleneck2_8/dim_red/conv" precision="FP16">
11824 <dim>1</dim>
11825 <dim>16</dim>
11826 <dim>80</dim>
11827 <dim>136</dim>
11828 </port>
11829 </output>
11830 </layer>
11831 <layer id="839" name="bottleneck2_8/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
11832 <data auto_broadcast="numpy" levels="256"/>
11833 <input>
11834 <port id="0">
11835 <dim>1</dim>
11836 <dim>16</dim>
11837 <dim>80</dim>
11838 <dim>136</dim>
11839 </port>
11840 <port id="1">
11841 <dim>1</dim>
11842 <dim>16</dim>
11843 <dim>1</dim>
11844 <dim>1</dim>
11845 </port>
11846 <port id="2">
11847 <dim>1</dim>
11848 <dim>16</dim>
11849 <dim>1</dim>
11850 <dim>1</dim>
11851 </port>
11852 <port id="3">
11853 <dim>1</dim>
11854 <dim>16</dim>
11855 <dim>1</dim>
11856 <dim>1</dim>
11857 </port>
11858 <port id="4">
11859 <dim>1</dim>
11860 <dim>16</dim>
11861 <dim>1</dim>
11862 <dim>1</dim>
11863 </port>
11864 </input>
11865 <output>
11866 <port id="5" precision="FP16">
11867 <dim>1</dim>
11868 <dim>16</dim>
11869 <dim>80</dim>
11870 <dim>136</dim>
11871 </port>
11872 </output>
11873 </layer>
11874 <layer id="840" name="16863/value1686522035" type="Const" version="opset1">
11875 <data element_type="i64" offset="8036" shape="5" size="40"/>
11876 <output>
11877 <port id="0" precision="I64">
11878 <dim>5</dim>
11879 </port>
11880 </output>
11881 </layer>
11882 <layer id="841" name="bottleneck2_8/inner/dw1/bn/mean/Fused_Mul__copy99510195/quantized1247221654" type="Const" version="opset1">
11883 <data element_type="i8" offset="30796" shape="16,1,3,3" size="144"/>
11884 <output>
11885 <port id="0" precision="I8">
11886 <dim>16</dim>
11887 <dim>1</dim>
11888 <dim>3</dim>
11889 <dim>3</dim>
11890 </port>
11891 </output>
11892 </layer>
11893 <layer id="842" name="bottleneck2_8/inner/dw1/bn/mean/Fused_Mul__copy99510195/quantized/to_f16" type="Convert" version="opset1">
11894 <data destination_type="f16"/>
11895 <input>
11896 <port id="0">
11897 <dim>16</dim>
11898 <dim>1</dim>
11899 <dim>3</dim>
11900 <dim>3</dim>
11901 </port>
11902 </input>
11903 <output>
11904 <port id="1" precision="FP16">
11905 <dim>16</dim>
11906 <dim>1</dim>
11907 <dim>3</dim>
11908 <dim>3</dim>
11909 </port>
11910 </output>
11911 </layer>
11912 <layer id="843" name="bottleneck2_8/inner/dw1/conv/fq_weights_1/zero_point1248519572" type="Const" version="opset1">
11913 <data element_type="f16" offset="30940" shape="16,1,1,1" size="32"/>
11914 <output>
11915 <port id="0" precision="FP16">
11916 <dim>16</dim>
11917 <dim>1</dim>
11918 <dim>1</dim>
11919 <dim>1</dim>
11920 </port>
11921 </output>
11922 </layer>
11923 <layer id="844" name="bottleneck2_8/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
11924 <data auto_broadcast="numpy"/>
11925 <input>
11926 <port id="0">
11927 <dim>16</dim>
11928 <dim>1</dim>
11929 <dim>3</dim>
11930 <dim>3</dim>
11931 </port>
11932 <port id="1">
11933 <dim>16</dim>
11934 <dim>1</dim>
11935 <dim>1</dim>
11936 <dim>1</dim>
11937 </port>
11938 </input>
11939 <output>
11940 <port id="2" precision="FP16">
11941 <dim>16</dim>
11942 <dim>1</dim>
11943 <dim>3</dim>
11944 <dim>3</dim>
11945 </port>
11946 </output>
11947 </layer>
11948 <layer id="845" name="bottleneck2_8/inner/dw1/conv/fq_weights_1/scale1248022083" type="Const" version="opset1">
11949 <data element_type="f16" offset="30972" shape="16,1,1,1" size="32"/>
11950 <output>
11951 <port id="0" precision="FP16">
11952 <dim>16</dim>
11953 <dim>1</dim>
11954 <dim>1</dim>
11955 <dim>1</dim>
11956 </port>
11957 </output>
11958 </layer>
11959 <layer id="846" name="bottleneck2_8/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
11960 <data auto_broadcast="numpy"/>
11961 <input>
11962 <port id="0">
11963 <dim>16</dim>
11964 <dim>1</dim>
11965 <dim>3</dim>
11966 <dim>3</dim>
11967 </port>
11968 <port id="1">
11969 <dim>16</dim>
11970 <dim>1</dim>
11971 <dim>1</dim>
11972 <dim>1</dim>
11973 </port>
11974 </input>
11975 <output>
11976 <port id="2" precision="FP16">
11977 <dim>16</dim>
11978 <dim>1</dim>
11979 <dim>3</dim>
11980 <dim>3</dim>
11981 </port>
11982 </output>
11983 </layer>
11984 <layer id="847" name="16863" type="Reshape" version="opset1">
11985 <data special_zero="true"/>
11986 <input>
11987 <port id="0">
11988 <dim>16</dim>
11989 <dim>1</dim>
11990 <dim>3</dim>
11991 <dim>3</dim>
11992 </port>
11993 <port id="1">
11994 <dim>5</dim>
11995 </port>
11996 </input>
11997 <output>
11998 <port id="2" precision="FP16">
11999 <dim>16</dim>
12000 <dim>1</dim>
12001 <dim>1</dim>
12002 <dim>3</dim>
12003 <dim>3</dim>
12004 </port>
12005 </output>
12006 </layer>
12007 <layer id="848" name="bottleneck2_8/inner/dw1/conv" type="GroupConvolution" version="opset1">
12008 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
12009 <input>
12010 <port id="0">
12011 <dim>1</dim>
12012 <dim>16</dim>
12013 <dim>80</dim>
12014 <dim>136</dim>
12015 </port>
12016 <port id="1">
12017 <dim>16</dim>
12018 <dim>1</dim>
12019 <dim>1</dim>
12020 <dim>3</dim>
12021 <dim>3</dim>
12022 </port>
12023 </input>
12024 <output>
12025 <port id="2" precision="FP16">
12026 <dim>1</dim>
12027 <dim>16</dim>
12028 <dim>80</dim>
12029 <dim>136</dim>
12030 </port>
12031 </output>
12032 </layer>
12033 <layer id="849" name="data_add_239612396699719605" type="Const" version="opset1">
12034 <data element_type="f16" offset="31004" shape="1,16,1,1" size="32"/>
12035 <output>
12036 <port id="0" precision="FP16">
12037 <dim>1</dim>
12038 <dim>16</dim>
12039 <dim>1</dim>
12040 <dim>1</dim>
12041 </port>
12042 </output>
12043 </layer>
12044 <layer id="850" name="bottleneck2_8/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
12045 <data auto_broadcast="numpy"/>
12046 <input>
12047 <port id="0">
12048 <dim>1</dim>
12049 <dim>16</dim>
12050 <dim>80</dim>
12051 <dim>136</dim>
12052 </port>
12053 <port id="1">
12054 <dim>1</dim>
12055 <dim>16</dim>
12056 <dim>1</dim>
12057 <dim>1</dim>
12058 </port>
12059 </input>
12060 <output>
12061 <port id="2" names="bottleneck2_8/inner/dw1/conv" precision="FP16">
12062 <dim>1</dim>
12063 <dim>16</dim>
12064 <dim>80</dim>
12065 <dim>136</dim>
12066 </port>
12067 </output>
12068 </layer>
12069 <layer id="851" name="bottleneck2_8/inner/dw1/fn/weights3080040325999" type="Const" version="opset1">
12070 <data element_type="f32" offset="1576" shape="1" size="4"/>
12071 <output>
12072 <port id="0" precision="FP32">
12073 <dim>1</dim>
12074 </port>
12075 </output>
12076 </layer>
12077 <layer id="852" name="bottleneck2_8/inner/dw1/fn" type="PReLU" version="opset1">
12078 <input>
12079 <port id="0">
12080 <dim>1</dim>
12081 <dim>16</dim>
12082 <dim>80</dim>
12083 <dim>136</dim>
12084 </port>
12085 <port id="1">
12086 <dim>1</dim>
12087 </port>
12088 </input>
12089 <output>
12090 <port id="2" names="bottleneck2_8/inner/dw1/conv" precision="FP16">
12091 <dim>1</dim>
12092 <dim>16</dim>
12093 <dim>80</dim>
12094 <dim>136</dim>
12095 </port>
12096 </output>
12097 </layer>
12098 <layer id="853" name="bottleneck2_8/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
12099 <data auto_broadcast="numpy" levels="256"/>
12100 <input>
12101 <port id="0">
12102 <dim>1</dim>
12103 <dim>16</dim>
12104 <dim>80</dim>
12105 <dim>136</dim>
12106 </port>
12107 <port id="1"/>
12108 <port id="2"/>
12109 <port id="3"/>
12110 <port id="4"/>
12111 </input>
12112 <output>
12113 <port id="5" precision="FP16">
12114 <dim>1</dim>
12115 <dim>16</dim>
12116 <dim>80</dim>
12117 <dim>136</dim>
12118 </port>
12119 </output>
12120 </layer>
12121 <layer id="854" name="bottleneck2_8/dim_inc/bn/mean/Fused_Mul__copy100110198/quantized1355222461" type="Const" version="opset1">
12122 <data element_type="i8" offset="31036" shape="64,16,1,1" size="1024"/>
12123 <output>
12124 <port id="0" precision="I8">
12125 <dim>64</dim>
12126 <dim>16</dim>
12127 <dim>1</dim>
12128 <dim>1</dim>
12129 </port>
12130 </output>
12131 </layer>
12132 <layer id="855" name="bottleneck2_8/dim_inc/bn/mean/Fused_Mul__copy100110198/quantized/to_f16" type="Convert" version="opset1">
12133 <data destination_type="f16"/>
12134 <input>
12135 <port id="0">
12136 <dim>64</dim>
12137 <dim>16</dim>
12138 <dim>1</dim>
12139 <dim>1</dim>
12140 </port>
12141 </input>
12142 <output>
12143 <port id="1" precision="FP16">
12144 <dim>64</dim>
12145 <dim>16</dim>
12146 <dim>1</dim>
12147 <dim>1</dim>
12148 </port>
12149 </output>
12150 </layer>
12151 <layer id="856" name="bottleneck2_8/dim_inc/conv/fq_weights_1/zero_point1356520682" type="Const" version="opset1">
12152 <data element_type="f16" offset="32060" shape="64,1,1,1" size="128"/>
12153 <output>
12154 <port id="0" precision="FP16">
12155 <dim>64</dim>
12156 <dim>1</dim>
12157 <dim>1</dim>
12158 <dim>1</dim>
12159 </port>
12160 </output>
12161 </layer>
12162 <layer id="857" name="bottleneck2_8/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
12163 <data auto_broadcast="numpy"/>
12164 <input>
12165 <port id="0">
12166 <dim>64</dim>
12167 <dim>16</dim>
12168 <dim>1</dim>
12169 <dim>1</dim>
12170 </port>
12171 <port id="1">
12172 <dim>64</dim>
12173 <dim>1</dim>
12174 <dim>1</dim>
12175 <dim>1</dim>
12176 </port>
12177 </input>
12178 <output>
12179 <port id="2" precision="FP16">
12180 <dim>64</dim>
12181 <dim>16</dim>
12182 <dim>1</dim>
12183 <dim>1</dim>
12184 </port>
12185 </output>
12186 </layer>
12187 <layer id="858" name="bottleneck2_8/dim_inc/conv/fq_weights_1/scale1356022122" type="Const" version="opset1">
12188 <data element_type="f16" offset="32188" shape="64,1,1,1" size="128"/>
12189 <output>
12190 <port id="0" precision="FP16">
12191 <dim>64</dim>
12192 <dim>1</dim>
12193 <dim>1</dim>
12194 <dim>1</dim>
12195 </port>
12196 </output>
12197 </layer>
12198 <layer id="859" name="bottleneck2_8/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
12199 <data auto_broadcast="numpy"/>
12200 <input>
12201 <port id="0">
12202 <dim>64</dim>
12203 <dim>16</dim>
12204 <dim>1</dim>
12205 <dim>1</dim>
12206 </port>
12207 <port id="1">
12208 <dim>64</dim>
12209 <dim>1</dim>
12210 <dim>1</dim>
12211 <dim>1</dim>
12212 </port>
12213 </input>
12214 <output>
12215 <port id="2" precision="FP16">
12216 <dim>64</dim>
12217 <dim>16</dim>
12218 <dim>1</dim>
12219 <dim>1</dim>
12220 </port>
12221 </output>
12222 </layer>
12223 <layer id="860" name="bottleneck2_8/dim_inc/conv" type="Convolution" version="opset1">
12224 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
12225 <input>
12226 <port id="0">
12227 <dim>1</dim>
12228 <dim>16</dim>
12229 <dim>80</dim>
12230 <dim>136</dim>
12231 </port>
12232 <port id="1">
12233 <dim>64</dim>
12234 <dim>16</dim>
12235 <dim>1</dim>
12236 <dim>1</dim>
12237 </port>
12238 </input>
12239 <output>
12240 <port id="2" precision="FP16">
12241 <dim>1</dim>
12242 <dim>64</dim>
12243 <dim>80</dim>
12244 <dim>136</dim>
12245 </port>
12246 </output>
12247 </layer>
12248 <layer id="861" name="data_add_2396923974100320613" type="Const" version="opset1">
12249 <data element_type="f16" offset="32316" shape="1,64,1,1" size="128"/>
12250 <output>
12251 <port id="0" precision="FP16">
12252 <dim>1</dim>
12253 <dim>64</dim>
12254 <dim>1</dim>
12255 <dim>1</dim>
12256 </port>
12257 </output>
12258 </layer>
12259 <layer id="862" name="bottleneck2_8/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
12260 <data auto_broadcast="numpy"/>
12261 <input>
12262 <port id="0">
12263 <dim>1</dim>
12264 <dim>64</dim>
12265 <dim>80</dim>
12266 <dim>136</dim>
12267 </port>
12268 <port id="1">
12269 <dim>1</dim>
12270 <dim>64</dim>
12271 <dim>1</dim>
12272 <dim>1</dim>
12273 </port>
12274 </input>
12275 <output>
12276 <port id="2" names="bottleneck2_8/dim_inc/conv" precision="FP16">
12277 <dim>1</dim>
12278 <dim>64</dim>
12279 <dim>80</dim>
12280 <dim>136</dim>
12281 </port>
12282 </output>
12283 </layer>
12284 <layer id="863" name="bottleneck2_8/add/fq_input_1" type="FakeQuantize" version="opset1">
12285 <data auto_broadcast="numpy" levels="256"/>
12286 <input>
12287 <port id="0">
12288 <dim>1</dim>
12289 <dim>64</dim>
12290 <dim>80</dim>
12291 <dim>136</dim>
12292 </port>
12293 <port id="1"/>
12294 <port id="2"/>
12295 <port id="3"/>
12296 <port id="4"/>
12297 </input>
12298 <output>
12299 <port id="5" precision="FP16">
12300 <dim>1</dim>
12301 <dim>64</dim>
12302 <dim>80</dim>
12303 <dim>136</dim>
12304 </port>
12305 </output>
12306 </layer>
12307 <layer id="864" name="bottleneck2_8/add" type="Add" version="opset1">
12308 <data auto_broadcast="numpy"/>
12309 <input>
12310 <port id="0">
12311 <dim>1</dim>
12312 <dim>64</dim>
12313 <dim>80</dim>
12314 <dim>136</dim>
12315 </port>
12316 <port id="1">
12317 <dim>1</dim>
12318 <dim>64</dim>
12319 <dim>80</dim>
12320 <dim>136</dim>
12321 </port>
12322 </input>
12323 <output>
12324 <port id="2" names="bottleneck2_8/add" precision="FP16">
12325 <dim>1</dim>
12326 <dim>64</dim>
12327 <dim>80</dim>
12328 <dim>136</dim>
12329 </port>
12330 </output>
12331 </layer>
12332 <layer id="865" name="bottleneck2_8/fn/weights30840401601006" type="Const" version="opset1">
12333 <data element_type="f32" offset="1576" shape="1" size="4"/>
12334 <output>
12335 <port id="0" precision="FP32">
12336 <dim>1</dim>
12337 </port>
12338 </output>
12339 </layer>
12340 <layer id="866" name="bottleneck2_8/fn" type="PReLU" version="opset1">
12341 <input>
12342 <port id="0">
12343 <dim>1</dim>
12344 <dim>64</dim>
12345 <dim>80</dim>
12346 <dim>136</dim>
12347 </port>
12348 <port id="1">
12349 <dim>1</dim>
12350 </port>
12351 </input>
12352 <output>
12353 <port id="2" names="bottleneck2_8/add" precision="FP16">
12354 <dim>1</dim>
12355 <dim>64</dim>
12356 <dim>80</dim>
12357 <dim>136</dim>
12358 </port>
12359 </output>
12360 </layer>
12361 <layer id="867" name="bottleneck3_0/dim_red/conv/fq_input_0" type="FakeQuantize" version="opset1">
12362 <data auto_broadcast="numpy" levels="256"/>
12363 <input>
12364 <port id="0">
12365 <dim>1</dim>
12366 <dim>64</dim>
12367 <dim>80</dim>
12368 <dim>136</dim>
12369 </port>
12370 <port id="1"/>
12371 <port id="2"/>
12372 <port id="3"/>
12373 <port id="4"/>
12374 </input>
12375 <output>
12376 <port id="5" precision="FP16">
12377 <dim>1</dim>
12378 <dim>64</dim>
12379 <dim>80</dim>
12380 <dim>136</dim>
12381 </port>
12382 </output>
12383 </layer>
12384 <layer id="868" name="bottleneck3_0/skip/pooling" type="MaxPool" version="opset1">
12385 <data auto_pad="explicit" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="ceil" strides="2,2"/>
12386 <input>
12387 <port id="0">
12388 <dim>1</dim>
12389 <dim>64</dim>
12390 <dim>80</dim>
12391 <dim>136</dim>
12392 </port>
12393 </input>
12394 <output>
12395 <port id="1" names="bottleneck3_0/skip/pooling" precision="FP16">
12396 <dim>1</dim>
12397 <dim>64</dim>
12398 <dim>40</dim>
12399 <dim>68</dim>
12400 </port>
12401 </output>
12402 </layer>
12403 <layer id="869" name="bottleneck3_0/skip/bn/mean/Fused_Mul__copy100910201/quantized1268822494" type="Const" version="opset1">
12404 <data element_type="i8" offset="32444" shape="128,64,1,1" size="8192"/>
12405 <output>
12406 <port id="0" precision="I8">
12407 <dim>128</dim>
12408 <dim>64</dim>
12409 <dim>1</dim>
12410 <dim>1</dim>
12411 </port>
12412 </output>
12413 </layer>
12414 <layer id="870" name="bottleneck3_0/skip/bn/mean/Fused_Mul__copy100910201/quantized/to_f16" type="Convert" version="opset1">
12415 <data destination_type="f16"/>
12416 <input>
12417 <port id="0">
12418 <dim>128</dim>
12419 <dim>64</dim>
12420 <dim>1</dim>
12421 <dim>1</dim>
12422 </port>
12423 </input>
12424 <output>
12425 <port id="1" precision="FP16">
12426 <dim>128</dim>
12427 <dim>64</dim>
12428 <dim>1</dim>
12429 <dim>1</dim>
12430 </port>
12431 </output>
12432 </layer>
12433 <layer id="871" name="bottleneck3_0/skip/conv/fq_weights_1/zero_point1270122716" type="Const" version="opset1">
12434 <data element_type="f16" offset="40636" shape="128,1,1,1" size="256"/>
12435 <output>
12436 <port id="0" precision="FP16">
12437 <dim>128</dim>
12438 <dim>1</dim>
12439 <dim>1</dim>
12440 <dim>1</dim>
12441 </port>
12442 </output>
12443 </layer>
12444 <layer id="872" name="bottleneck3_0/skip/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
12445 <data auto_broadcast="numpy"/>
12446 <input>
12447 <port id="0">
12448 <dim>128</dim>
12449 <dim>64</dim>
12450 <dim>1</dim>
12451 <dim>1</dim>
12452 </port>
12453 <port id="1">
12454 <dim>128</dim>
12455 <dim>1</dim>
12456 <dim>1</dim>
12457 <dim>1</dim>
12458 </port>
12459 </input>
12460 <output>
12461 <port id="2" precision="FP16">
12462 <dim>128</dim>
12463 <dim>64</dim>
12464 <dim>1</dim>
12465 <dim>1</dim>
12466 </port>
12467 </output>
12468 </layer>
12469 <layer id="873" name="bottleneck3_0/skip/conv/fq_weights_1/scale1269620070" type="Const" version="opset1">
12470 <data element_type="f16" offset="40892" shape="128,1,1,1" size="256"/>
12471 <output>
12472 <port id="0" precision="FP16">
12473 <dim>128</dim>
12474 <dim>1</dim>
12475 <dim>1</dim>
12476 <dim>1</dim>
12477 </port>
12478 </output>
12479 </layer>
12480 <layer id="874" name="bottleneck3_0/skip/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
12481 <data auto_broadcast="numpy"/>
12482 <input>
12483 <port id="0">
12484 <dim>128</dim>
12485 <dim>64</dim>
12486 <dim>1</dim>
12487 <dim>1</dim>
12488 </port>
12489 <port id="1">
12490 <dim>128</dim>
12491 <dim>1</dim>
12492 <dim>1</dim>
12493 <dim>1</dim>
12494 </port>
12495 </input>
12496 <output>
12497 <port id="2" precision="FP16">
12498 <dim>128</dim>
12499 <dim>64</dim>
12500 <dim>1</dim>
12501 <dim>1</dim>
12502 </port>
12503 </output>
12504 </layer>
12505 <layer id="875" name="bottleneck3_0/skip/conv" type="Convolution" version="opset1">
12506 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
12507 <input>
12508 <port id="0">
12509 <dim>1</dim>
12510 <dim>64</dim>
12511 <dim>40</dim>
12512 <dim>68</dim>
12513 </port>
12514 <port id="1">
12515 <dim>128</dim>
12516 <dim>64</dim>
12517 <dim>1</dim>
12518 <dim>1</dim>
12519 </port>
12520 </input>
12521 <output>
12522 <port id="2" precision="FP16">
12523 <dim>1</dim>
12524 <dim>128</dim>
12525 <dim>40</dim>
12526 <dim>68</dim>
12527 </port>
12528 </output>
12529 </layer>
12530 <layer id="876" name="data_add_2397723982101122173" type="Const" version="opset1">
12531 <data element_type="f16" offset="41148" shape="1,128,1,1" size="256"/>
12532 <output>
12533 <port id="0" precision="FP16">
12534 <dim>1</dim>
12535 <dim>128</dim>
12536 <dim>1</dim>
12537 <dim>1</dim>
12538 </port>
12539 </output>
12540 </layer>
12541 <layer id="877" name="bottleneck3_0/skip/bn/variance/Fused_Add_" type="Add" version="opset1">
12542 <data auto_broadcast="numpy"/>
12543 <input>
12544 <port id="0">
12545 <dim>1</dim>
12546 <dim>128</dim>
12547 <dim>40</dim>
12548 <dim>68</dim>
12549 </port>
12550 <port id="1">
12551 <dim>1</dim>
12552 <dim>128</dim>
12553 <dim>1</dim>
12554 <dim>1</dim>
12555 </port>
12556 </input>
12557 <output>
12558 <port id="2" names="bottleneck3_0/skip/conv" precision="FP16">
12559 <dim>1</dim>
12560 <dim>128</dim>
12561 <dim>40</dim>
12562 <dim>68</dim>
12563 </port>
12564 </output>
12565 </layer>
12566 <layer id="878" name="bottleneck3_0/add/fq_input_0" type="FakeQuantize" version="opset1">
12567 <data auto_broadcast="numpy" levels="256"/>
12568 <input>
12569 <port id="0">
12570 <dim>1</dim>
12571 <dim>128</dim>
12572 <dim>40</dim>
12573 <dim>68</dim>
12574 </port>
12575 <port id="1"/>
12576 <port id="2"/>
12577 <port id="3"/>
12578 <port id="4"/>
12579 </input>
12580 <output>
12581 <port id="5" precision="FP16">
12582 <dim>1</dim>
12583 <dim>128</dim>
12584 <dim>40</dim>
12585 <dim>68</dim>
12586 </port>
12587 </output>
12588 </layer>
12589 <layer id="879" name="4954495819584" type="Const" version="opset1">
12590 <data element_type="f16" offset="41404" shape="" size="2"/>
12591 <output>
12592 <port id="0" precision="FP16"/>
12593 </output>
12594 </layer>
12595 <layer id="880" name="4955495920154" type="Const" version="opset1">
12596 <data element_type="f16" offset="41406" shape="" size="2"/>
12597 <output>
12598 <port id="0" precision="FP16"/>
12599 </output>
12600 </layer>
12601 <layer id="881" name="4956496021789" type="Const" version="opset1">
12602 <data element_type="f16" offset="41404" shape="" size="2"/>
12603 <output>
12604 <port id="0" precision="FP16"/>
12605 </output>
12606 </layer>
12607 <layer id="882" name="4957496120331" type="Const" version="opset1">
12608 <data element_type="f16" offset="41406" shape="" size="2"/>
12609 <output>
12610 <port id="0" precision="FP16"/>
12611 </output>
12612 </layer>
12613 <layer id="883" name="3784378820817" type="Const" version="opset1">
12614 <data element_type="f16" offset="41408" shape="" size="2"/>
12615 <output>
12616 <port id="0" precision="FP16"/>
12617 </output>
12618 </layer>
12619 <layer id="884" name="3785378922965" type="Const" version="opset1">
12620 <data element_type="f16" offset="23930" shape="" size="2"/>
12621 <output>
12622 <port id="0" precision="FP16"/>
12623 </output>
12624 </layer>
12625 <layer id="885" name="3786379022008" type="Const" version="opset1">
12626 <data element_type="f16" offset="41408" shape="" size="2"/>
12627 <output>
12628 <port id="0" precision="FP16"/>
12629 </output>
12630 </layer>
12631 <layer id="886" name="3787379119818" type="Const" version="opset1">
12632 <data element_type="f16" offset="23930" shape="" size="2"/>
12633 <output>
12634 <port id="0" precision="FP16"/>
12635 </output>
12636 </layer>
12637 <layer id="887" name="3704370822284" type="Const" version="opset1">
12638 <data element_type="f16" offset="41410" shape="1,32,1,1" size="64"/>
12639 <output>
12640 <port id="0" precision="FP16">
12641 <dim>1</dim>
12642 <dim>32</dim>
12643 <dim>1</dim>
12644 <dim>1</dim>
12645 </port>
12646 </output>
12647 </layer>
12648 <layer id="888" name="3705370921024" type="Const" version="opset1">
12649 <data element_type="f16" offset="41474" shape="1,32,1,1" size="64"/>
12650 <output>
12651 <port id="0" precision="FP16">
12652 <dim>1</dim>
12653 <dim>32</dim>
12654 <dim>1</dim>
12655 <dim>1</dim>
12656 </port>
12657 </output>
12658 </layer>
12659 <layer id="889" name="3706371019812" type="Const" version="opset1">
12660 <data element_type="f16" offset="41410" shape="1,32,1,1" size="64"/>
12661 <output>
12662 <port id="0" precision="FP16">
12663 <dim>1</dim>
12664 <dim>32</dim>
12665 <dim>1</dim>
12666 <dim>1</dim>
12667 </port>
12668 </output>
12669 </layer>
12670 <layer id="890" name="3707371120835" type="Const" version="opset1">
12671 <data element_type="f16" offset="41474" shape="1,32,1,1" size="64"/>
12672 <output>
12673 <port id="0" precision="FP16">
12674 <dim>1</dim>
12675 <dim>32</dim>
12676 <dim>1</dim>
12677 <dim>1</dim>
12678 </port>
12679 </output>
12680 </layer>
12681 <layer id="891" name="bottleneck3_0/dim_red/bn/mean/Fused_Mul__copy101310203/quantized1237621900" type="Const" version="opset1">
12682 <data element_type="i8" offset="41538" shape="32,64,1,1" size="2048"/>
12683 <output>
12684 <port id="0" precision="I8">
12685 <dim>32</dim>
12686 <dim>64</dim>
12687 <dim>1</dim>
12688 <dim>1</dim>
12689 </port>
12690 </output>
12691 </layer>
12692 <layer id="892" name="bottleneck3_0/dim_red/bn/mean/Fused_Mul__copy101310203/quantized/to_f16" type="Convert" version="opset1">
12693 <data destination_type="f16"/>
12694 <input>
12695 <port id="0">
12696 <dim>32</dim>
12697 <dim>64</dim>
12698 <dim>1</dim>
12699 <dim>1</dim>
12700 </port>
12701 </input>
12702 <output>
12703 <port id="1" precision="FP16">
12704 <dim>32</dim>
12705 <dim>64</dim>
12706 <dim>1</dim>
12707 <dim>1</dim>
12708 </port>
12709 </output>
12710 </layer>
12711 <layer id="893" name="bottleneck3_0/dim_red/conv/fq_weights_1/zero_point1238919839" type="Const" version="opset1">
12712 <data element_type="f16" offset="43586" shape="32,1,1,1" size="64"/>
12713 <output>
12714 <port id="0" precision="FP16">
12715 <dim>32</dim>
12716 <dim>1</dim>
12717 <dim>1</dim>
12718 <dim>1</dim>
12719 </port>
12720 </output>
12721 </layer>
12722 <layer id="894" name="bottleneck3_0/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
12723 <data auto_broadcast="numpy"/>
12724 <input>
12725 <port id="0">
12726 <dim>32</dim>
12727 <dim>64</dim>
12728 <dim>1</dim>
12729 <dim>1</dim>
12730 </port>
12731 <port id="1">
12732 <dim>32</dim>
12733 <dim>1</dim>
12734 <dim>1</dim>
12735 <dim>1</dim>
12736 </port>
12737 </input>
12738 <output>
12739 <port id="2" precision="FP16">
12740 <dim>32</dim>
12741 <dim>64</dim>
12742 <dim>1</dim>
12743 <dim>1</dim>
12744 </port>
12745 </output>
12746 </layer>
12747 <layer id="895" name="bottleneck3_0/dim_red/conv/fq_weights_1/scale1238422143" type="Const" version="opset1">
12748 <data element_type="f16" offset="43650" shape="32,1,1,1" size="64"/>
12749 <output>
12750 <port id="0" precision="FP16">
12751 <dim>32</dim>
12752 <dim>1</dim>
12753 <dim>1</dim>
12754 <dim>1</dim>
12755 </port>
12756 </output>
12757 </layer>
12758 <layer id="896" name="bottleneck3_0/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
12759 <data auto_broadcast="numpy"/>
12760 <input>
12761 <port id="0">
12762 <dim>32</dim>
12763 <dim>64</dim>
12764 <dim>1</dim>
12765 <dim>1</dim>
12766 </port>
12767 <port id="1">
12768 <dim>32</dim>
12769 <dim>1</dim>
12770 <dim>1</dim>
12771 <dim>1</dim>
12772 </port>
12773 </input>
12774 <output>
12775 <port id="2" precision="FP16">
12776 <dim>32</dim>
12777 <dim>64</dim>
12778 <dim>1</dim>
12779 <dim>1</dim>
12780 </port>
12781 </output>
12782 </layer>
12783 <layer id="897" name="bottleneck3_0/dim_red/conv" type="Convolution" version="opset1">
12784 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
12785 <input>
12786 <port id="0">
12787 <dim>1</dim>
12788 <dim>64</dim>
12789 <dim>80</dim>
12790 <dim>136</dim>
12791 </port>
12792 <port id="1">
12793 <dim>32</dim>
12794 <dim>64</dim>
12795 <dim>1</dim>
12796 <dim>1</dim>
12797 </port>
12798 </input>
12799 <output>
12800 <port id="2" precision="FP16">
12801 <dim>1</dim>
12802 <dim>32</dim>
12803 <dim>80</dim>
12804 <dim>136</dim>
12805 </port>
12806 </output>
12807 </layer>
12808 <layer id="898" name="data_add_2398523990101520202" type="Const" version="opset1">
12809 <data element_type="f16" offset="43714" shape="1,32,1,1" size="64"/>
12810 <output>
12811 <port id="0" precision="FP16">
12812 <dim>1</dim>
12813 <dim>32</dim>
12814 <dim>1</dim>
12815 <dim>1</dim>
12816 </port>
12817 </output>
12818 </layer>
12819 <layer id="899" name="bottleneck3_0/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
12820 <data auto_broadcast="numpy"/>
12821 <input>
12822 <port id="0">
12823 <dim>1</dim>
12824 <dim>32</dim>
12825 <dim>80</dim>
12826 <dim>136</dim>
12827 </port>
12828 <port id="1">
12829 <dim>1</dim>
12830 <dim>32</dim>
12831 <dim>1</dim>
12832 <dim>1</dim>
12833 </port>
12834 </input>
12835 <output>
12836 <port id="2" names="bottleneck3_0/dim_red/conv" precision="FP16">
12837 <dim>1</dim>
12838 <dim>32</dim>
12839 <dim>80</dim>
12840 <dim>136</dim>
12841 </port>
12842 </output>
12843 </layer>
12844 <layer id="900" name="bottleneck3_0/dim_red/fn/weights30808397941017" type="Const" version="opset1">
12845 <data element_type="f32" offset="1576" shape="1" size="4"/>
12846 <output>
12847 <port id="0" precision="FP32">
12848 <dim>1</dim>
12849 </port>
12850 </output>
12851 </layer>
12852 <layer id="901" name="bottleneck3_0/dim_red/fn" type="PReLU" version="opset1">
12853 <input>
12854 <port id="0">
12855 <dim>1</dim>
12856 <dim>32</dim>
12857 <dim>80</dim>
12858 <dim>136</dim>
12859 </port>
12860 <port id="1">
12861 <dim>1</dim>
12862 </port>
12863 </input>
12864 <output>
12865 <port id="2" names="bottleneck3_0/dim_red/conv" precision="FP16">
12866 <dim>1</dim>
12867 <dim>32</dim>
12868 <dim>80</dim>
12869 <dim>136</dim>
12870 </port>
12871 </output>
12872 </layer>
12873 <layer id="902" name="bottleneck3_0/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
12874 <data auto_broadcast="numpy" levels="256"/>
12875 <input>
12876 <port id="0">
12877 <dim>1</dim>
12878 <dim>32</dim>
12879 <dim>80</dim>
12880 <dim>136</dim>
12881 </port>
12882 <port id="1">
12883 <dim>1</dim>
12884 <dim>32</dim>
12885 <dim>1</dim>
12886 <dim>1</dim>
12887 </port>
12888 <port id="2">
12889 <dim>1</dim>
12890 <dim>32</dim>
12891 <dim>1</dim>
12892 <dim>1</dim>
12893 </port>
12894 <port id="3">
12895 <dim>1</dim>
12896 <dim>32</dim>
12897 <dim>1</dim>
12898 <dim>1</dim>
12899 </port>
12900 <port id="4">
12901 <dim>1</dim>
12902 <dim>32</dim>
12903 <dim>1</dim>
12904 <dim>1</dim>
12905 </port>
12906 </input>
12907 <output>
12908 <port id="5" precision="FP16">
12909 <dim>1</dim>
12910 <dim>32</dim>
12911 <dim>80</dim>
12912 <dim>136</dim>
12913 </port>
12914 </output>
12915 </layer>
12916 <layer id="903" name="16847/value1684921366" type="Const" version="opset1">
12917 <data element_type="i64" offset="43778" shape="5" size="40"/>
12918 <output>
12919 <port id="0" precision="I64">
12920 <dim>5</dim>
12921 </port>
12922 </output>
12923 </layer>
12924 <layer id="904" name="bottleneck3_0/inner/dw1/bn/mean/Fused_Mul__copy101910206/quantized1297622350" type="Const" version="opset1">
12925 <data element_type="i8" offset="43818" shape="32,1,3,3" size="288"/>
12926 <output>
12927 <port id="0" precision="I8">
12928 <dim>32</dim>
12929 <dim>1</dim>
12930 <dim>3</dim>
12931 <dim>3</dim>
12932 </port>
12933 </output>
12934 </layer>
12935 <layer id="905" name="bottleneck3_0/inner/dw1/bn/mean/Fused_Mul__copy101910206/quantized/to_f16" type="Convert" version="opset1">
12936 <data destination_type="f16"/>
12937 <input>
12938 <port id="0">
12939 <dim>32</dim>
12940 <dim>1</dim>
12941 <dim>3</dim>
12942 <dim>3</dim>
12943 </port>
12944 </input>
12945 <output>
12946 <port id="1" precision="FP16">
12947 <dim>32</dim>
12948 <dim>1</dim>
12949 <dim>3</dim>
12950 <dim>3</dim>
12951 </port>
12952 </output>
12953 </layer>
12954 <layer id="906" name="bottleneck3_0/inner/dw1/conv/fq_weights_1/zero_point1298919377" type="Const" version="opset1">
12955 <data element_type="f16" offset="44106" shape="32,1,1,1" size="64"/>
12956 <output>
12957 <port id="0" precision="FP16">
12958 <dim>32</dim>
12959 <dim>1</dim>
12960 <dim>1</dim>
12961 <dim>1</dim>
12962 </port>
12963 </output>
12964 </layer>
12965 <layer id="907" name="bottleneck3_0/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
12966 <data auto_broadcast="numpy"/>
12967 <input>
12968 <port id="0">
12969 <dim>32</dim>
12970 <dim>1</dim>
12971 <dim>3</dim>
12972 <dim>3</dim>
12973 </port>
12974 <port id="1">
12975 <dim>32</dim>
12976 <dim>1</dim>
12977 <dim>1</dim>
12978 <dim>1</dim>
12979 </port>
12980 </input>
12981 <output>
12982 <port id="2" precision="FP16">
12983 <dim>32</dim>
12984 <dim>1</dim>
12985 <dim>3</dim>
12986 <dim>3</dim>
12987 </port>
12988 </output>
12989 </layer>
12990 <layer id="908" name="bottleneck3_0/inner/dw1/conv/fq_weights_1/scale1298421720" type="Const" version="opset1">
12991 <data element_type="f16" offset="44170" shape="32,1,1,1" size="64"/>
12992 <output>
12993 <port id="0" precision="FP16">
12994 <dim>32</dim>
12995 <dim>1</dim>
12996 <dim>1</dim>
12997 <dim>1</dim>
12998 </port>
12999 </output>
13000 </layer>
13001 <layer id="909" name="bottleneck3_0/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
13002 <data auto_broadcast="numpy"/>
13003 <input>
13004 <port id="0">
13005 <dim>32</dim>
13006 <dim>1</dim>
13007 <dim>3</dim>
13008 <dim>3</dim>
13009 </port>
13010 <port id="1">
13011 <dim>32</dim>
13012 <dim>1</dim>
13013 <dim>1</dim>
13014 <dim>1</dim>
13015 </port>
13016 </input>
13017 <output>
13018 <port id="2" precision="FP16">
13019 <dim>32</dim>
13020 <dim>1</dim>
13021 <dim>3</dim>
13022 <dim>3</dim>
13023 </port>
13024 </output>
13025 </layer>
13026 <layer id="910" name="16847" type="Reshape" version="opset1">
13027 <data special_zero="true"/>
13028 <input>
13029 <port id="0">
13030 <dim>32</dim>
13031 <dim>1</dim>
13032 <dim>3</dim>
13033 <dim>3</dim>
13034 </port>
13035 <port id="1">
13036 <dim>5</dim>
13037 </port>
13038 </input>
13039 <output>
13040 <port id="2" precision="FP16">
13041 <dim>32</dim>
13042 <dim>1</dim>
13043 <dim>1</dim>
13044 <dim>3</dim>
13045 <dim>3</dim>
13046 </port>
13047 </output>
13048 </layer>
13049 <layer id="911" name="bottleneck3_0/inner/dw1/conv" type="GroupConvolution" version="opset1">
13050 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
13051 <input>
13052 <port id="0">
13053 <dim>1</dim>
13054 <dim>32</dim>
13055 <dim>80</dim>
13056 <dim>136</dim>
13057 </port>
13058 <port id="1">
13059 <dim>32</dim>
13060 <dim>1</dim>
13061 <dim>1</dim>
13062 <dim>3</dim>
13063 <dim>3</dim>
13064 </port>
13065 </input>
13066 <output>
13067 <port id="2" precision="FP16">
13068 <dim>1</dim>
13069 <dim>32</dim>
13070 <dim>40</dim>
13071 <dim>68</dim>
13072 </port>
13073 </output>
13074 </layer>
13075 <layer id="912" name="data_add_2399323998102121312" type="Const" version="opset1">
13076 <data element_type="f16" offset="44234" shape="1,32,1,1" size="64"/>
13077 <output>
13078 <port id="0" precision="FP16">
13079 <dim>1</dim>
13080 <dim>32</dim>
13081 <dim>1</dim>
13082 <dim>1</dim>
13083 </port>
13084 </output>
13085 </layer>
13086 <layer id="913" name="bottleneck3_0/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
13087 <data auto_broadcast="numpy"/>
13088 <input>
13089 <port id="0">
13090 <dim>1</dim>
13091 <dim>32</dim>
13092 <dim>40</dim>
13093 <dim>68</dim>
13094 </port>
13095 <port id="1">
13096 <dim>1</dim>
13097 <dim>32</dim>
13098 <dim>1</dim>
13099 <dim>1</dim>
13100 </port>
13101 </input>
13102 <output>
13103 <port id="2" names="bottleneck3_0/inner/dw1/conv" precision="FP16">
13104 <dim>1</dim>
13105 <dim>32</dim>
13106 <dim>40</dim>
13107 <dim>68</dim>
13108 </port>
13109 </output>
13110 </layer>
13111 <layer id="914" name="bottleneck3_0/inner/dw1/fn/weights31172399591023" type="Const" version="opset1">
13112 <data element_type="f32" offset="1576" shape="1" size="4"/>
13113 <output>
13114 <port id="0" precision="FP32">
13115 <dim>1</dim>
13116 </port>
13117 </output>
13118 </layer>
13119 <layer id="915" name="bottleneck3_0/inner/dw1/fn" type="PReLU" version="opset1">
13120 <input>
13121 <port id="0">
13122 <dim>1</dim>
13123 <dim>32</dim>
13124 <dim>40</dim>
13125 <dim>68</dim>
13126 </port>
13127 <port id="1">
13128 <dim>1</dim>
13129 </port>
13130 </input>
13131 <output>
13132 <port id="2" names="bottleneck3_0/inner/dw1/conv" precision="FP16">
13133 <dim>1</dim>
13134 <dim>32</dim>
13135 <dim>40</dim>
13136 <dim>68</dim>
13137 </port>
13138 </output>
13139 </layer>
13140 <layer id="916" name="bottleneck3_0/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
13141 <data auto_broadcast="numpy" levels="256"/>
13142 <input>
13143 <port id="0">
13144 <dim>1</dim>
13145 <dim>32</dim>
13146 <dim>40</dim>
13147 <dim>68</dim>
13148 </port>
13149 <port id="1"/>
13150 <port id="2"/>
13151 <port id="3"/>
13152 <port id="4"/>
13153 </input>
13154 <output>
13155 <port id="5" precision="FP16">
13156 <dim>1</dim>
13157 <dim>32</dim>
13158 <dim>40</dim>
13159 <dim>68</dim>
13160 </port>
13161 </output>
13162 </layer>
13163 <layer id="917" name="bottleneck3_0/dim_inc/bn/mean/Fused_Mul__copy102510209/quantized1288019926" type="Const" version="opset1">
13164 <data element_type="i8" offset="44298" shape="128,32,1,1" size="4096"/>
13165 <output>
13166 <port id="0" precision="I8">
13167 <dim>128</dim>
13168 <dim>32</dim>
13169 <dim>1</dim>
13170 <dim>1</dim>
13171 </port>
13172 </output>
13173 </layer>
13174 <layer id="918" name="bottleneck3_0/dim_inc/bn/mean/Fused_Mul__copy102510209/quantized/to_f16" type="Convert" version="opset1">
13175 <data destination_type="f16"/>
13176 <input>
13177 <port id="0">
13178 <dim>128</dim>
13179 <dim>32</dim>
13180 <dim>1</dim>
13181 <dim>1</dim>
13182 </port>
13183 </input>
13184 <output>
13185 <port id="1" precision="FP16">
13186 <dim>128</dim>
13187 <dim>32</dim>
13188 <dim>1</dim>
13189 <dim>1</dim>
13190 </port>
13191 </output>
13192 </layer>
13193 <layer id="919" name="bottleneck3_0/dim_inc/conv/fq_weights_1/zero_point1289320658" type="Const" version="opset1">
13194 <data element_type="f16" offset="48394" shape="128,1,1,1" size="256"/>
13195 <output>
13196 <port id="0" precision="FP16">
13197 <dim>128</dim>
13198 <dim>1</dim>
13199 <dim>1</dim>
13200 <dim>1</dim>
13201 </port>
13202 </output>
13203 </layer>
13204 <layer id="920" name="bottleneck3_0/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
13205 <data auto_broadcast="numpy"/>
13206 <input>
13207 <port id="0">
13208 <dim>128</dim>
13209 <dim>32</dim>
13210 <dim>1</dim>
13211 <dim>1</dim>
13212 </port>
13213 <port id="1">
13214 <dim>128</dim>
13215 <dim>1</dim>
13216 <dim>1</dim>
13217 <dim>1</dim>
13218 </port>
13219 </input>
13220 <output>
13221 <port id="2" precision="FP16">
13222 <dim>128</dim>
13223 <dim>32</dim>
13224 <dim>1</dim>
13225 <dim>1</dim>
13226 </port>
13227 </output>
13228 </layer>
13229 <layer id="921" name="bottleneck3_0/dim_inc/conv/fq_weights_1/scale1288819863" type="Const" version="opset1">
13230 <data element_type="f16" offset="48650" shape="128,1,1,1" size="256"/>
13231 <output>
13232 <port id="0" precision="FP16">
13233 <dim>128</dim>
13234 <dim>1</dim>
13235 <dim>1</dim>
13236 <dim>1</dim>
13237 </port>
13238 </output>
13239 </layer>
13240 <layer id="922" name="bottleneck3_0/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
13241 <data auto_broadcast="numpy"/>
13242 <input>
13243 <port id="0">
13244 <dim>128</dim>
13245 <dim>32</dim>
13246 <dim>1</dim>
13247 <dim>1</dim>
13248 </port>
13249 <port id="1">
13250 <dim>128</dim>
13251 <dim>1</dim>
13252 <dim>1</dim>
13253 <dim>1</dim>
13254 </port>
13255 </input>
13256 <output>
13257 <port id="2" precision="FP16">
13258 <dim>128</dim>
13259 <dim>32</dim>
13260 <dim>1</dim>
13261 <dim>1</dim>
13262 </port>
13263 </output>
13264 </layer>
13265 <layer id="923" name="bottleneck3_0/dim_inc/conv" type="Convolution" version="opset1">
13266 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
13267 <input>
13268 <port id="0">
13269 <dim>1</dim>
13270 <dim>32</dim>
13271 <dim>40</dim>
13272 <dim>68</dim>
13273 </port>
13274 <port id="1">
13275 <dim>128</dim>
13276 <dim>32</dim>
13277 <dim>1</dim>
13278 <dim>1</dim>
13279 </port>
13280 </input>
13281 <output>
13282 <port id="2" precision="FP16">
13283 <dim>1</dim>
13284 <dim>128</dim>
13285 <dim>40</dim>
13286 <dim>68</dim>
13287 </port>
13288 </output>
13289 </layer>
13290 <layer id="924" name="data_add_2400124006102719554" type="Const" version="opset1">
13291 <data element_type="f16" offset="48906" shape="1,128,1,1" size="256"/>
13292 <output>
13293 <port id="0" precision="FP16">
13294 <dim>1</dim>
13295 <dim>128</dim>
13296 <dim>1</dim>
13297 <dim>1</dim>
13298 </port>
13299 </output>
13300 </layer>
13301 <layer id="925" name="bottleneck3_0/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
13302 <data auto_broadcast="numpy"/>
13303 <input>
13304 <port id="0">
13305 <dim>1</dim>
13306 <dim>128</dim>
13307 <dim>40</dim>
13308 <dim>68</dim>
13309 </port>
13310 <port id="1">
13311 <dim>1</dim>
13312 <dim>128</dim>
13313 <dim>1</dim>
13314 <dim>1</dim>
13315 </port>
13316 </input>
13317 <output>
13318 <port id="2" names="bottleneck3_0/dim_inc/conv" precision="FP16">
13319 <dim>1</dim>
13320 <dim>128</dim>
13321 <dim>40</dim>
13322 <dim>68</dim>
13323 </port>
13324 </output>
13325 </layer>
13326 <layer id="926" name="bottleneck3_0/add/fq_input_1" type="FakeQuantize" version="opset1">
13327 <data auto_broadcast="numpy" levels="256"/>
13328 <input>
13329 <port id="0">
13330 <dim>1</dim>
13331 <dim>128</dim>
13332 <dim>40</dim>
13333 <dim>68</dim>
13334 </port>
13335 <port id="1"/>
13336 <port id="2"/>
13337 <port id="3"/>
13338 <port id="4"/>
13339 </input>
13340 <output>
13341 <port id="5" precision="FP16">
13342 <dim>1</dim>
13343 <dim>128</dim>
13344 <dim>40</dim>
13345 <dim>68</dim>
13346 </port>
13347 </output>
13348 </layer>
13349 <layer id="927" name="bottleneck3_0/add" type="Add" version="opset1">
13350 <data auto_broadcast="numpy"/>
13351 <input>
13352 <port id="0">
13353 <dim>1</dim>
13354 <dim>128</dim>
13355 <dim>40</dim>
13356 <dim>68</dim>
13357 </port>
13358 <port id="1">
13359 <dim>1</dim>
13360 <dim>128</dim>
13361 <dim>40</dim>
13362 <dim>68</dim>
13363 </port>
13364 </input>
13365 <output>
13366 <port id="2" names="bottleneck3_0/add" precision="FP16">
13367 <dim>1</dim>
13368 <dim>128</dim>
13369 <dim>40</dim>
13370 <dim>68</dim>
13371 </port>
13372 </output>
13373 </layer>
13374 <layer id="928" name="bottleneck3_0/fn/weights31120402591030" type="Const" version="opset1">
13375 <data element_type="f32" offset="1576" shape="1" size="4"/>
13376 <output>
13377 <port id="0" precision="FP32">
13378 <dim>1</dim>
13379 </port>
13380 </output>
13381 </layer>
13382 <layer id="929" name="bottleneck3_0/fn" type="PReLU" version="opset1">
13383 <input>
13384 <port id="0">
13385 <dim>1</dim>
13386 <dim>128</dim>
13387 <dim>40</dim>
13388 <dim>68</dim>
13389 </port>
13390 <port id="1">
13391 <dim>1</dim>
13392 </port>
13393 </input>
13394 <output>
13395 <port id="2" names="bottleneck3_0/add" precision="FP16">
13396 <dim>1</dim>
13397 <dim>128</dim>
13398 <dim>40</dim>
13399 <dim>68</dim>
13400 </port>
13401 </output>
13402 </layer>
13403 <layer id="930" name="bottleneck3_1/add/fq_input_0" type="FakeQuantize" version="opset1">
13404 <data auto_broadcast="numpy" levels="256"/>
13405 <input>
13406 <port id="0">
13407 <dim>1</dim>
13408 <dim>128</dim>
13409 <dim>40</dim>
13410 <dim>68</dim>
13411 </port>
13412 <port id="1"/>
13413 <port id="2"/>
13414 <port id="3"/>
13415 <port id="4"/>
13416 </input>
13417 <output>
13418 <port id="5" precision="FP16">
13419 <dim>1</dim>
13420 <dim>128</dim>
13421 <dim>40</dim>
13422 <dim>68</dim>
13423 </port>
13424 </output>
13425 </layer>
13426 <layer id="931" name="5214521820256" type="Const" version="opset1">
13427 <data element_type="f16" offset="49162" shape="" size="2"/>
13428 <output>
13429 <port id="0" precision="FP16"/>
13430 </output>
13431 </layer>
13432 <layer id="932" name="5215521920289" type="Const" version="opset1">
13433 <data element_type="f16" offset="49164" shape="" size="2"/>
13434 <output>
13435 <port id="0" precision="FP16"/>
13436 </output>
13437 </layer>
13438 <layer id="933" name="5216522020670" type="Const" version="opset1">
13439 <data element_type="f16" offset="49162" shape="" size="2"/>
13440 <output>
13441 <port id="0" precision="FP16"/>
13442 </output>
13443 </layer>
13444 <layer id="934" name="5217522120055" type="Const" version="opset1">
13445 <data element_type="f16" offset="49164" shape="" size="2"/>
13446 <output>
13447 <port id="0" precision="FP16"/>
13448 </output>
13449 </layer>
13450 <layer id="935" name="2604260820451" type="Const" version="opset1">
13451 <data element_type="f16" offset="49166" shape="" size="2"/>
13452 <output>
13453 <port id="0" precision="FP16"/>
13454 </output>
13455 </layer>
13456 <layer id="936" name="2605260922074" type="Const" version="opset1">
13457 <data element_type="f16" offset="49168" shape="" size="2"/>
13458 <output>
13459 <port id="0" precision="FP16"/>
13460 </output>
13461 </layer>
13462 <layer id="937" name="2606261022275" type="Const" version="opset1">
13463 <data element_type="f16" offset="49166" shape="" size="2"/>
13464 <output>
13465 <port id="0" precision="FP16"/>
13466 </output>
13467 </layer>
13468 <layer id="938" name="2607261120814" type="Const" version="opset1">
13469 <data element_type="f16" offset="49168" shape="" size="2"/>
13470 <output>
13471 <port id="0" precision="FP16"/>
13472 </output>
13473 </layer>
13474 <layer id="939" name="3684368822539" type="Const" version="opset1">
13475 <data element_type="f16" offset="49170" shape="1,32,1,1" size="64"/>
13476 <output>
13477 <port id="0" precision="FP16">
13478 <dim>1</dim>
13479 <dim>32</dim>
13480 <dim>1</dim>
13481 <dim>1</dim>
13482 </port>
13483 </output>
13484 </layer>
13485 <layer id="940" name="3685368922827" type="Const" version="opset1">
13486 <data element_type="f16" offset="49234" shape="1,32,1,1" size="64"/>
13487 <output>
13488 <port id="0" precision="FP16">
13489 <dim>1</dim>
13490 <dim>32</dim>
13491 <dim>1</dim>
13492 <dim>1</dim>
13493 </port>
13494 </output>
13495 </layer>
13496 <layer id="941" name="3686369020463" type="Const" version="opset1">
13497 <data element_type="f16" offset="49170" shape="1,32,1,1" size="64"/>
13498 <output>
13499 <port id="0" precision="FP16">
13500 <dim>1</dim>
13501 <dim>32</dim>
13502 <dim>1</dim>
13503 <dim>1</dim>
13504 </port>
13505 </output>
13506 </layer>
13507 <layer id="942" name="3687369120550" type="Const" version="opset1">
13508 <data element_type="f16" offset="49234" shape="1,32,1,1" size="64"/>
13509 <output>
13510 <port id="0" precision="FP16">
13511 <dim>1</dim>
13512 <dim>32</dim>
13513 <dim>1</dim>
13514 <dim>1</dim>
13515 </port>
13516 </output>
13517 </layer>
13518 <layer id="943" name="bottleneck3_1/dim_red/bn/mean/Fused_Mul__copy103210212/quantized1400819425" type="Const" version="opset1">
13519 <data element_type="i8" offset="49298" shape="32,128,1,1" size="4096"/>
13520 <output>
13521 <port id="0" precision="I8">
13522 <dim>32</dim>
13523 <dim>128</dim>
13524 <dim>1</dim>
13525 <dim>1</dim>
13526 </port>
13527 </output>
13528 </layer>
13529 <layer id="944" name="bottleneck3_1/dim_red/bn/mean/Fused_Mul__copy103210212/quantized/to_f16" type="Convert" version="opset1">
13530 <data destination_type="f16"/>
13531 <input>
13532 <port id="0">
13533 <dim>32</dim>
13534 <dim>128</dim>
13535 <dim>1</dim>
13536 <dim>1</dim>
13537 </port>
13538 </input>
13539 <output>
13540 <port id="1" precision="FP16">
13541 <dim>32</dim>
13542 <dim>128</dim>
13543 <dim>1</dim>
13544 <dim>1</dim>
13545 </port>
13546 </output>
13547 </layer>
13548 <layer id="945" name="bottleneck3_1/dim_red/conv/fq_weights_1/zero_point1402122161" type="Const" version="opset1">
13549 <data element_type="f16" offset="53394" shape="32,1,1,1" size="64"/>
13550 <output>
13551 <port id="0" precision="FP16">
13552 <dim>32</dim>
13553 <dim>1</dim>
13554 <dim>1</dim>
13555 <dim>1</dim>
13556 </port>
13557 </output>
13558 </layer>
13559 <layer id="946" name="bottleneck3_1/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
13560 <data auto_broadcast="numpy"/>
13561 <input>
13562 <port id="0">
13563 <dim>32</dim>
13564 <dim>128</dim>
13565 <dim>1</dim>
13566 <dim>1</dim>
13567 </port>
13568 <port id="1">
13569 <dim>32</dim>
13570 <dim>1</dim>
13571 <dim>1</dim>
13572 <dim>1</dim>
13573 </port>
13574 </input>
13575 <output>
13576 <port id="2" precision="FP16">
13577 <dim>32</dim>
13578 <dim>128</dim>
13579 <dim>1</dim>
13580 <dim>1</dim>
13581 </port>
13582 </output>
13583 </layer>
13584 <layer id="947" name="bottleneck3_1/dim_red/conv/fq_weights_1/scale1401622887" type="Const" version="opset1">
13585 <data element_type="f16" offset="53458" shape="32,1,1,1" size="64"/>
13586 <output>
13587 <port id="0" precision="FP16">
13588 <dim>32</dim>
13589 <dim>1</dim>
13590 <dim>1</dim>
13591 <dim>1</dim>
13592 </port>
13593 </output>
13594 </layer>
13595 <layer id="948" name="bottleneck3_1/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
13596 <data auto_broadcast="numpy"/>
13597 <input>
13598 <port id="0">
13599 <dim>32</dim>
13600 <dim>128</dim>
13601 <dim>1</dim>
13602 <dim>1</dim>
13603 </port>
13604 <port id="1">
13605 <dim>32</dim>
13606 <dim>1</dim>
13607 <dim>1</dim>
13608 <dim>1</dim>
13609 </port>
13610 </input>
13611 <output>
13612 <port id="2" precision="FP16">
13613 <dim>32</dim>
13614 <dim>128</dim>
13615 <dim>1</dim>
13616 <dim>1</dim>
13617 </port>
13618 </output>
13619 </layer>
13620 <layer id="949" name="bottleneck3_1/dim_red/conv" type="Convolution" version="opset1">
13621 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
13622 <input>
13623 <port id="0">
13624 <dim>1</dim>
13625 <dim>128</dim>
13626 <dim>40</dim>
13627 <dim>68</dim>
13628 </port>
13629 <port id="1">
13630 <dim>32</dim>
13631 <dim>128</dim>
13632 <dim>1</dim>
13633 <dim>1</dim>
13634 </port>
13635 </input>
13636 <output>
13637 <port id="2" precision="FP16">
13638 <dim>1</dim>
13639 <dim>32</dim>
13640 <dim>40</dim>
13641 <dim>68</dim>
13642 </port>
13643 </output>
13644 </layer>
13645 <layer id="950" name="data_add_2400924014103419488" type="Const" version="opset1">
13646 <data element_type="f16" offset="53522" shape="1,32,1,1" size="64"/>
13647 <output>
13648 <port id="0" precision="FP16">
13649 <dim>1</dim>
13650 <dim>32</dim>
13651 <dim>1</dim>
13652 <dim>1</dim>
13653 </port>
13654 </output>
13655 </layer>
13656 <layer id="951" name="bottleneck3_1/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
13657 <data auto_broadcast="numpy"/>
13658 <input>
13659 <port id="0">
13660 <dim>1</dim>
13661 <dim>32</dim>
13662 <dim>40</dim>
13663 <dim>68</dim>
13664 </port>
13665 <port id="1">
13666 <dim>1</dim>
13667 <dim>32</dim>
13668 <dim>1</dim>
13669 <dim>1</dim>
13670 </port>
13671 </input>
13672 <output>
13673 <port id="2" names="bottleneck3_1/dim_red/conv" precision="FP16">
13674 <dim>1</dim>
13675 <dim>32</dim>
13676 <dim>40</dim>
13677 <dim>68</dim>
13678 </port>
13679 </output>
13680 </layer>
13681 <layer id="952" name="bottleneck3_1/dim_red/fn/weights31008405681036" type="Const" version="opset1">
13682 <data element_type="f32" offset="1576" shape="1" size="4"/>
13683 <output>
13684 <port id="0" precision="FP32">
13685 <dim>1</dim>
13686 </port>
13687 </output>
13688 </layer>
13689 <layer id="953" name="bottleneck3_1/dim_red/fn" type="PReLU" version="opset1">
13690 <input>
13691 <port id="0">
13692 <dim>1</dim>
13693 <dim>32</dim>
13694 <dim>40</dim>
13695 <dim>68</dim>
13696 </port>
13697 <port id="1">
13698 <dim>1</dim>
13699 </port>
13700 </input>
13701 <output>
13702 <port id="2" names="bottleneck3_1/dim_red/conv" precision="FP16">
13703 <dim>1</dim>
13704 <dim>32</dim>
13705 <dim>40</dim>
13706 <dim>68</dim>
13707 </port>
13708 </output>
13709 </layer>
13710 <layer id="954" name="bottleneck3_1/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
13711 <data auto_broadcast="numpy" levels="256"/>
13712 <input>
13713 <port id="0">
13714 <dim>1</dim>
13715 <dim>32</dim>
13716 <dim>40</dim>
13717 <dim>68</dim>
13718 </port>
13719 <port id="1">
13720 <dim>1</dim>
13721 <dim>32</dim>
13722 <dim>1</dim>
13723 <dim>1</dim>
13724 </port>
13725 <port id="2">
13726 <dim>1</dim>
13727 <dim>32</dim>
13728 <dim>1</dim>
13729 <dim>1</dim>
13730 </port>
13731 <port id="3">
13732 <dim>1</dim>
13733 <dim>32</dim>
13734 <dim>1</dim>
13735 <dim>1</dim>
13736 </port>
13737 <port id="4">
13738 <dim>1</dim>
13739 <dim>32</dim>
13740 <dim>1</dim>
13741 <dim>1</dim>
13742 </port>
13743 </input>
13744 <output>
13745 <port id="5" precision="FP16">
13746 <dim>1</dim>
13747 <dim>32</dim>
13748 <dim>40</dim>
13749 <dim>68</dim>
13750 </port>
13751 </output>
13752 </layer>
13753 <layer id="955" name="16843/value1684521144" type="Const" version="opset1">
13754 <data element_type="i64" offset="43778" shape="5" size="40"/>
13755 <output>
13756 <port id="0" precision="I64">
13757 <dim>5</dim>
13758 </port>
13759 </output>
13760 </layer>
13761 <layer id="956" name="bottleneck3_1/inner/dw1/bn/mean/Fused_Mul__copy103810215/quantized1283219938" type="Const" version="opset1">
13762 <data element_type="i8" offset="53586" shape="32,1,3,3" size="288"/>
13763 <output>
13764 <port id="0" precision="I8">
13765 <dim>32</dim>
13766 <dim>1</dim>
13767 <dim>3</dim>
13768 <dim>3</dim>
13769 </port>
13770 </output>
13771 </layer>
13772 <layer id="957" name="bottleneck3_1/inner/dw1/bn/mean/Fused_Mul__copy103810215/quantized/to_f16" type="Convert" version="opset1">
13773 <data destination_type="f16"/>
13774 <input>
13775 <port id="0">
13776 <dim>32</dim>
13777 <dim>1</dim>
13778 <dim>3</dim>
13779 <dim>3</dim>
13780 </port>
13781 </input>
13782 <output>
13783 <port id="1" precision="FP16">
13784 <dim>32</dim>
13785 <dim>1</dim>
13786 <dim>3</dim>
13787 <dim>3</dim>
13788 </port>
13789 </output>
13790 </layer>
13791 <layer id="958" name="bottleneck3_1/inner/dw1/conv/fq_weights_1/zero_point1284522812" type="Const" version="opset1">
13792 <data element_type="f16" offset="53874" shape="32,1,1,1" size="64"/>
13793 <output>
13794 <port id="0" precision="FP16">
13795 <dim>32</dim>
13796 <dim>1</dim>
13797 <dim>1</dim>
13798 <dim>1</dim>
13799 </port>
13800 </output>
13801 </layer>
13802 <layer id="959" name="bottleneck3_1/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
13803 <data auto_broadcast="numpy"/>
13804 <input>
13805 <port id="0">
13806 <dim>32</dim>
13807 <dim>1</dim>
13808 <dim>3</dim>
13809 <dim>3</dim>
13810 </port>
13811 <port id="1">
13812 <dim>32</dim>
13813 <dim>1</dim>
13814 <dim>1</dim>
13815 <dim>1</dim>
13816 </port>
13817 </input>
13818 <output>
13819 <port id="2" precision="FP16">
13820 <dim>32</dim>
13821 <dim>1</dim>
13822 <dim>3</dim>
13823 <dim>3</dim>
13824 </port>
13825 </output>
13826 </layer>
13827 <layer id="960" name="bottleneck3_1/inner/dw1/conv/fq_weights_1/scale1284019992" type="Const" version="opset1">
13828 <data element_type="f16" offset="53938" shape="32,1,1,1" size="64"/>
13829 <output>
13830 <port id="0" precision="FP16">
13831 <dim>32</dim>
13832 <dim>1</dim>
13833 <dim>1</dim>
13834 <dim>1</dim>
13835 </port>
13836 </output>
13837 </layer>
13838 <layer id="961" name="bottleneck3_1/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
13839 <data auto_broadcast="numpy"/>
13840 <input>
13841 <port id="0">
13842 <dim>32</dim>
13843 <dim>1</dim>
13844 <dim>3</dim>
13845 <dim>3</dim>
13846 </port>
13847 <port id="1">
13848 <dim>32</dim>
13849 <dim>1</dim>
13850 <dim>1</dim>
13851 <dim>1</dim>
13852 </port>
13853 </input>
13854 <output>
13855 <port id="2" precision="FP16">
13856 <dim>32</dim>
13857 <dim>1</dim>
13858 <dim>3</dim>
13859 <dim>3</dim>
13860 </port>
13861 </output>
13862 </layer>
13863 <layer id="962" name="16843" type="Reshape" version="opset1">
13864 <data special_zero="true"/>
13865 <input>
13866 <port id="0">
13867 <dim>32</dim>
13868 <dim>1</dim>
13869 <dim>3</dim>
13870 <dim>3</dim>
13871 </port>
13872 <port id="1">
13873 <dim>5</dim>
13874 </port>
13875 </input>
13876 <output>
13877 <port id="2" precision="FP16">
13878 <dim>32</dim>
13879 <dim>1</dim>
13880 <dim>1</dim>
13881 <dim>3</dim>
13882 <dim>3</dim>
13883 </port>
13884 </output>
13885 </layer>
13886 <layer id="963" name="bottleneck3_1/inner/dw1/conv" type="GroupConvolution" version="opset1">
13887 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
13888 <input>
13889 <port id="0">
13890 <dim>1</dim>
13891 <dim>32</dim>
13892 <dim>40</dim>
13893 <dim>68</dim>
13894 </port>
13895 <port id="1">
13896 <dim>32</dim>
13897 <dim>1</dim>
13898 <dim>1</dim>
13899 <dim>3</dim>
13900 <dim>3</dim>
13901 </port>
13902 </input>
13903 <output>
13904 <port id="2" precision="FP16">
13905 <dim>1</dim>
13906 <dim>32</dim>
13907 <dim>40</dim>
13908 <dim>68</dim>
13909 </port>
13910 </output>
13911 </layer>
13912 <layer id="964" name="data_add_2401724022104019401" type="Const" version="opset1">
13913 <data element_type="f16" offset="54002" shape="1,32,1,1" size="64"/>
13914 <output>
13915 <port id="0" precision="FP16">
13916 <dim>1</dim>
13917 <dim>32</dim>
13918 <dim>1</dim>
13919 <dim>1</dim>
13920 </port>
13921 </output>
13922 </layer>
13923 <layer id="965" name="bottleneck3_1/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
13924 <data auto_broadcast="numpy"/>
13925 <input>
13926 <port id="0">
13927 <dim>1</dim>
13928 <dim>32</dim>
13929 <dim>40</dim>
13930 <dim>68</dim>
13931 </port>
13932 <port id="1">
13933 <dim>1</dim>
13934 <dim>32</dim>
13935 <dim>1</dim>
13936 <dim>1</dim>
13937 </port>
13938 </input>
13939 <output>
13940 <port id="2" names="bottleneck3_1/inner/dw1/conv" precision="FP16">
13941 <dim>1</dim>
13942 <dim>32</dim>
13943 <dim>40</dim>
13944 <dim>68</dim>
13945 </port>
13946 </output>
13947 </layer>
13948 <layer id="966" name="bottleneck3_1/inner/dw1/fn/weights30968403911042" type="Const" version="opset1">
13949 <data element_type="f32" offset="1576" shape="1" size="4"/>
13950 <output>
13951 <port id="0" precision="FP32">
13952 <dim>1</dim>
13953 </port>
13954 </output>
13955 </layer>
13956 <layer id="967" name="bottleneck3_1/inner/dw1/fn" type="PReLU" version="opset1">
13957 <input>
13958 <port id="0">
13959 <dim>1</dim>
13960 <dim>32</dim>
13961 <dim>40</dim>
13962 <dim>68</dim>
13963 </port>
13964 <port id="1">
13965 <dim>1</dim>
13966 </port>
13967 </input>
13968 <output>
13969 <port id="2" names="bottleneck3_1/inner/dw1/conv" precision="FP16">
13970 <dim>1</dim>
13971 <dim>32</dim>
13972 <dim>40</dim>
13973 <dim>68</dim>
13974 </port>
13975 </output>
13976 </layer>
13977 <layer id="968" name="bottleneck3_1/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
13978 <data auto_broadcast="numpy" levels="256"/>
13979 <input>
13980 <port id="0">
13981 <dim>1</dim>
13982 <dim>32</dim>
13983 <dim>40</dim>
13984 <dim>68</dim>
13985 </port>
13986 <port id="1"/>
13987 <port id="2"/>
13988 <port id="3"/>
13989 <port id="4"/>
13990 </input>
13991 <output>
13992 <port id="5" precision="FP16">
13993 <dim>1</dim>
13994 <dim>32</dim>
13995 <dim>40</dim>
13996 <dim>68</dim>
13997 </port>
13998 </output>
13999 </layer>
14000 <layer id="969" name="bottleneck3_1/dim_inc/bn/mean/Fused_Mul__copy104410218/quantized1307222638" type="Const" version="opset1">
14001 <data element_type="i8" offset="54066" shape="128,32,1,1" size="4096"/>
14002 <output>
14003 <port id="0" precision="I8">
14004 <dim>128</dim>
14005 <dim>32</dim>
14006 <dim>1</dim>
14007 <dim>1</dim>
14008 </port>
14009 </output>
14010 </layer>
14011 <layer id="970" name="bottleneck3_1/dim_inc/bn/mean/Fused_Mul__copy104410218/quantized/to_f16" type="Convert" version="opset1">
14012 <data destination_type="f16"/>
14013 <input>
14014 <port id="0">
14015 <dim>128</dim>
14016 <dim>32</dim>
14017 <dim>1</dim>
14018 <dim>1</dim>
14019 </port>
14020 </input>
14021 <output>
14022 <port id="1" precision="FP16">
14023 <dim>128</dim>
14024 <dim>32</dim>
14025 <dim>1</dim>
14026 <dim>1</dim>
14027 </port>
14028 </output>
14029 </layer>
14030 <layer id="971" name="bottleneck3_1/dim_inc/conv/fq_weights_1/zero_point1308520343" type="Const" version="opset1">
14031 <data element_type="f16" offset="58162" shape="128,1,1,1" size="256"/>
14032 <output>
14033 <port id="0" precision="FP16">
14034 <dim>128</dim>
14035 <dim>1</dim>
14036 <dim>1</dim>
14037 <dim>1</dim>
14038 </port>
14039 </output>
14040 </layer>
14041 <layer id="972" name="bottleneck3_1/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
14042 <data auto_broadcast="numpy"/>
14043 <input>
14044 <port id="0">
14045 <dim>128</dim>
14046 <dim>32</dim>
14047 <dim>1</dim>
14048 <dim>1</dim>
14049 </port>
14050 <port id="1">
14051 <dim>128</dim>
14052 <dim>1</dim>
14053 <dim>1</dim>
14054 <dim>1</dim>
14055 </port>
14056 </input>
14057 <output>
14058 <port id="2" precision="FP16">
14059 <dim>128</dim>
14060 <dim>32</dim>
14061 <dim>1</dim>
14062 <dim>1</dim>
14063 </port>
14064 </output>
14065 </layer>
14066 <layer id="973" name="bottleneck3_1/dim_inc/conv/fq_weights_1/scale1308019578" type="Const" version="opset1">
14067 <data element_type="f16" offset="58418" shape="128,1,1,1" size="256"/>
14068 <output>
14069 <port id="0" precision="FP16">
14070 <dim>128</dim>
14071 <dim>1</dim>
14072 <dim>1</dim>
14073 <dim>1</dim>
14074 </port>
14075 </output>
14076 </layer>
14077 <layer id="974" name="bottleneck3_1/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
14078 <data auto_broadcast="numpy"/>
14079 <input>
14080 <port id="0">
14081 <dim>128</dim>
14082 <dim>32</dim>
14083 <dim>1</dim>
14084 <dim>1</dim>
14085 </port>
14086 <port id="1">
14087 <dim>128</dim>
14088 <dim>1</dim>
14089 <dim>1</dim>
14090 <dim>1</dim>
14091 </port>
14092 </input>
14093 <output>
14094 <port id="2" precision="FP16">
14095 <dim>128</dim>
14096 <dim>32</dim>
14097 <dim>1</dim>
14098 <dim>1</dim>
14099 </port>
14100 </output>
14101 </layer>
14102 <layer id="975" name="bottleneck3_1/dim_inc/conv" type="Convolution" version="opset1">
14103 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
14104 <input>
14105 <port id="0">
14106 <dim>1</dim>
14107 <dim>32</dim>
14108 <dim>40</dim>
14109 <dim>68</dim>
14110 </port>
14111 <port id="1">
14112 <dim>128</dim>
14113 <dim>32</dim>
14114 <dim>1</dim>
14115 <dim>1</dim>
14116 </port>
14117 </input>
14118 <output>
14119 <port id="2" precision="FP16">
14120 <dim>1</dim>
14121 <dim>128</dim>
14122 <dim>40</dim>
14123 <dim>68</dim>
14124 </port>
14125 </output>
14126 </layer>
14127 <layer id="976" name="data_add_2402524030104622374" type="Const" version="opset1">
14128 <data element_type="f16" offset="58674" shape="1,128,1,1" size="256"/>
14129 <output>
14130 <port id="0" precision="FP16">
14131 <dim>1</dim>
14132 <dim>128</dim>
14133 <dim>1</dim>
14134 <dim>1</dim>
14135 </port>
14136 </output>
14137 </layer>
14138 <layer id="977" name="bottleneck3_1/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
14139 <data auto_broadcast="numpy"/>
14140 <input>
14141 <port id="0">
14142 <dim>1</dim>
14143 <dim>128</dim>
14144 <dim>40</dim>
14145 <dim>68</dim>
14146 </port>
14147 <port id="1">
14148 <dim>1</dim>
14149 <dim>128</dim>
14150 <dim>1</dim>
14151 <dim>1</dim>
14152 </port>
14153 </input>
14154 <output>
14155 <port id="2" names="bottleneck3_1/dim_inc/conv" precision="FP16">
14156 <dim>1</dim>
14157 <dim>128</dim>
14158 <dim>40</dim>
14159 <dim>68</dim>
14160 </port>
14161 </output>
14162 </layer>
14163 <layer id="978" name="bottleneck3_1/add/fq_input_1" type="FakeQuantize" version="opset1">
14164 <data auto_broadcast="numpy" levels="256"/>
14165 <input>
14166 <port id="0">
14167 <dim>1</dim>
14168 <dim>128</dim>
14169 <dim>40</dim>
14170 <dim>68</dim>
14171 </port>
14172 <port id="1"/>
14173 <port id="2"/>
14174 <port id="3"/>
14175 <port id="4"/>
14176 </input>
14177 <output>
14178 <port id="5" precision="FP16">
14179 <dim>1</dim>
14180 <dim>128</dim>
14181 <dim>40</dim>
14182 <dim>68</dim>
14183 </port>
14184 </output>
14185 </layer>
14186 <layer id="979" name="bottleneck3_1/add" type="Add" version="opset1">
14187 <data auto_broadcast="numpy"/>
14188 <input>
14189 <port id="0">
14190 <dim>1</dim>
14191 <dim>128</dim>
14192 <dim>40</dim>
14193 <dim>68</dim>
14194 </port>
14195 <port id="1">
14196 <dim>1</dim>
14197 <dim>128</dim>
14198 <dim>40</dim>
14199 <dim>68</dim>
14200 </port>
14201 </input>
14202 <output>
14203 <port id="2" names="bottleneck3_1/add" precision="FP16">
14204 <dim>1</dim>
14205 <dim>128</dim>
14206 <dim>40</dim>
14207 <dim>68</dim>
14208 </port>
14209 </output>
14210 </layer>
14211 <layer id="980" name="bottleneck3_1/fn/weights31104406281049" type="Const" version="opset1">
14212 <data element_type="f32" offset="1576" shape="1" size="4"/>
14213 <output>
14214 <port id="0" precision="FP32">
14215 <dim>1</dim>
14216 </port>
14217 </output>
14218 </layer>
14219 <layer id="981" name="bottleneck3_1/fn" type="PReLU" version="opset1">
14220 <input>
14221 <port id="0">
14222 <dim>1</dim>
14223 <dim>128</dim>
14224 <dim>40</dim>
14225 <dim>68</dim>
14226 </port>
14227 <port id="1">
14228 <dim>1</dim>
14229 </port>
14230 </input>
14231 <output>
14232 <port id="2" names="bottleneck3_1/add" precision="FP16">
14233 <dim>1</dim>
14234 <dim>128</dim>
14235 <dim>40</dim>
14236 <dim>68</dim>
14237 </port>
14238 </output>
14239 </layer>
14240 <layer id="982" name="bottleneck3_2/add/fq_input_0" type="FakeQuantize" version="opset1">
14241 <data auto_broadcast="numpy" levels="256"/>
14242 <input>
14243 <port id="0">
14244 <dim>1</dim>
14245 <dim>128</dim>
14246 <dim>40</dim>
14247 <dim>68</dim>
14248 </port>
14249 <port id="1"/>
14250 <port id="2"/>
14251 <port id="3"/>
14252 <port id="4"/>
14253 </input>
14254 <output>
14255 <port id="5" precision="FP16">
14256 <dim>1</dim>
14257 <dim>128</dim>
14258 <dim>40</dim>
14259 <dim>68</dim>
14260 </port>
14261 </output>
14262 </layer>
14263 <layer id="983" name="4414441821747" type="Const" version="opset1">
14264 <data element_type="f16" offset="58930" shape="" size="2"/>
14265 <output>
14266 <port id="0" precision="FP16"/>
14267 </output>
14268 </layer>
14269 <layer id="984" name="4415441920628" type="Const" version="opset1">
14270 <data element_type="f16" offset="58932" shape="" size="2"/>
14271 <output>
14272 <port id="0" precision="FP16"/>
14273 </output>
14274 </layer>
14275 <layer id="985" name="4416442022383" type="Const" version="opset1">
14276 <data element_type="f16" offset="58930" shape="" size="2"/>
14277 <output>
14278 <port id="0" precision="FP16"/>
14279 </output>
14280 </layer>
14281 <layer id="986" name="4417442122740" type="Const" version="opset1">
14282 <data element_type="f16" offset="58932" shape="" size="2"/>
14283 <output>
14284 <port id="0" precision="FP16"/>
14285 </output>
14286 </layer>
14287 <layer id="987" name="4584458822818" type="Const" version="opset1">
14288 <data element_type="f16" offset="58934" shape="" size="2"/>
14289 <output>
14290 <port id="0" precision="FP16"/>
14291 </output>
14292 </layer>
14293 <layer id="988" name="4585458922263" type="Const" version="opset1">
14294 <data element_type="f16" offset="58936" shape="" size="2"/>
14295 <output>
14296 <port id="0" precision="FP16"/>
14297 </output>
14298 </layer>
14299 <layer id="989" name="4586459022260" type="Const" version="opset1">
14300 <data element_type="f16" offset="58934" shape="" size="2"/>
14301 <output>
14302 <port id="0" precision="FP16"/>
14303 </output>
14304 </layer>
14305 <layer id="990" name="4587459120019" type="Const" version="opset1">
14306 <data element_type="f16" offset="58936" shape="" size="2"/>
14307 <output>
14308 <port id="0" precision="FP16"/>
14309 </output>
14310 </layer>
14311 <layer id="991" name="5524552819980" type="Const" version="opset1">
14312 <data element_type="f16" offset="58938" shape="1,32,1,1" size="64"/>
14313 <output>
14314 <port id="0" precision="FP16">
14315 <dim>1</dim>
14316 <dim>32</dim>
14317 <dim>1</dim>
14318 <dim>1</dim>
14319 </port>
14320 </output>
14321 </layer>
14322 <layer id="992" name="5525552922809" type="Const" version="opset1">
14323 <data element_type="f16" offset="59002" shape="1,32,1,1" size="64"/>
14324 <output>
14325 <port id="0" precision="FP16">
14326 <dim>1</dim>
14327 <dim>32</dim>
14328 <dim>1</dim>
14329 <dim>1</dim>
14330 </port>
14331 </output>
14332 </layer>
14333 <layer id="993" name="5526553021516" type="Const" version="opset1">
14334 <data element_type="f16" offset="58938" shape="1,32,1,1" size="64"/>
14335 <output>
14336 <port id="0" precision="FP16">
14337 <dim>1</dim>
14338 <dim>32</dim>
14339 <dim>1</dim>
14340 <dim>1</dim>
14341 </port>
14342 </output>
14343 </layer>
14344 <layer id="994" name="5527553122677" type="Const" version="opset1">
14345 <data element_type="f16" offset="59002" shape="1,32,1,1" size="64"/>
14346 <output>
14347 <port id="0" precision="FP16">
14348 <dim>1</dim>
14349 <dim>32</dim>
14350 <dim>1</dim>
14351 <dim>1</dim>
14352 </port>
14353 </output>
14354 </layer>
14355 <layer id="995" name="bottleneck3_2/dim_red/bn/mean/Fused_Mul__copy105110221/quantized1170422995" type="Const" version="opset1">
14356 <data element_type="i8" offset="59066" shape="32,128,1,1" size="4096"/>
14357 <output>
14358 <port id="0" precision="I8">
14359 <dim>32</dim>
14360 <dim>128</dim>
14361 <dim>1</dim>
14362 <dim>1</dim>
14363 </port>
14364 </output>
14365 </layer>
14366 <layer id="996" name="bottleneck3_2/dim_red/bn/mean/Fused_Mul__copy105110221/quantized/to_f16" type="Convert" version="opset1">
14367 <data destination_type="f16"/>
14368 <input>
14369 <port id="0">
14370 <dim>32</dim>
14371 <dim>128</dim>
14372 <dim>1</dim>
14373 <dim>1</dim>
14374 </port>
14375 </input>
14376 <output>
14377 <port id="1" precision="FP16">
14378 <dim>32</dim>
14379 <dim>128</dim>
14380 <dim>1</dim>
14381 <dim>1</dim>
14382 </port>
14383 </output>
14384 </layer>
14385 <layer id="997" name="bottleneck3_2/dim_red/conv/fq_weights_1/zero_point1171720169" type="Const" version="opset1">
14386 <data element_type="f16" offset="63162" shape="32,1,1,1" size="64"/>
14387 <output>
14388 <port id="0" precision="FP16">
14389 <dim>32</dim>
14390 <dim>1</dim>
14391 <dim>1</dim>
14392 <dim>1</dim>
14393 </port>
14394 </output>
14395 </layer>
14396 <layer id="998" name="bottleneck3_2/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
14397 <data auto_broadcast="numpy"/>
14398 <input>
14399 <port id="0">
14400 <dim>32</dim>
14401 <dim>128</dim>
14402 <dim>1</dim>
14403 <dim>1</dim>
14404 </port>
14405 <port id="1">
14406 <dim>32</dim>
14407 <dim>1</dim>
14408 <dim>1</dim>
14409 <dim>1</dim>
14410 </port>
14411 </input>
14412 <output>
14413 <port id="2" precision="FP16">
14414 <dim>32</dim>
14415 <dim>128</dim>
14416 <dim>1</dim>
14417 <dim>1</dim>
14418 </port>
14419 </output>
14420 </layer>
14421 <layer id="999" name="bottleneck3_2/dim_red/conv/fq_weights_1/scale1171219386" type="Const" version="opset1">
14422 <data element_type="f16" offset="63226" shape="32,1,1,1" size="64"/>
14423 <output>
14424 <port id="0" precision="FP16">
14425 <dim>32</dim>
14426 <dim>1</dim>
14427 <dim>1</dim>
14428 <dim>1</dim>
14429 </port>
14430 </output>
14431 </layer>
14432 <layer id="1000" name="bottleneck3_2/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
14433 <data auto_broadcast="numpy"/>
14434 <input>
14435 <port id="0">
14436 <dim>32</dim>
14437 <dim>128</dim>
14438 <dim>1</dim>
14439 <dim>1</dim>
14440 </port>
14441 <port id="1">
14442 <dim>32</dim>
14443 <dim>1</dim>
14444 <dim>1</dim>
14445 <dim>1</dim>
14446 </port>
14447 </input>
14448 <output>
14449 <port id="2" precision="FP16">
14450 <dim>32</dim>
14451 <dim>128</dim>
14452 <dim>1</dim>
14453 <dim>1</dim>
14454 </port>
14455 </output>
14456 </layer>
14457 <layer id="1001" name="bottleneck3_2/dim_red/conv" type="Convolution" version="opset1">
14458 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
14459 <input>
14460 <port id="0">
14461 <dim>1</dim>
14462 <dim>128</dim>
14463 <dim>40</dim>
14464 <dim>68</dim>
14465 </port>
14466 <port id="1">
14467 <dim>32</dim>
14468 <dim>128</dim>
14469 <dim>1</dim>
14470 <dim>1</dim>
14471 </port>
14472 </input>
14473 <output>
14474 <port id="2" precision="FP16">
14475 <dim>1</dim>
14476 <dim>32</dim>
14477 <dim>40</dim>
14478 <dim>68</dim>
14479 </port>
14480 </output>
14481 </layer>
14482 <layer id="1002" name="data_add_2403324038105321561" type="Const" version="opset1">
14483 <data element_type="f16" offset="63290" shape="1,32,1,1" size="64"/>
14484 <output>
14485 <port id="0" precision="FP16">
14486 <dim>1</dim>
14487 <dim>32</dim>
14488 <dim>1</dim>
14489 <dim>1</dim>
14490 </port>
14491 </output>
14492 </layer>
14493 <layer id="1003" name="bottleneck3_2/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
14494 <data auto_broadcast="numpy"/>
14495 <input>
14496 <port id="0">
14497 <dim>1</dim>
14498 <dim>32</dim>
14499 <dim>40</dim>
14500 <dim>68</dim>
14501 </port>
14502 <port id="1">
14503 <dim>1</dim>
14504 <dim>32</dim>
14505 <dim>1</dim>
14506 <dim>1</dim>
14507 </port>
14508 </input>
14509 <output>
14510 <port id="2" names="bottleneck3_2/dim_red/conv" precision="FP16">
14511 <dim>1</dim>
14512 <dim>32</dim>
14513 <dim>40</dim>
14514 <dim>68</dim>
14515 </port>
14516 </output>
14517 </layer>
14518 <layer id="1004" name="bottleneck3_2/dim_red/fn/weights31028397161055" type="Const" version="opset1">
14519 <data element_type="f32" offset="1576" shape="1" size="4"/>
14520 <output>
14521 <port id="0" precision="FP32">
14522 <dim>1</dim>
14523 </port>
14524 </output>
14525 </layer>
14526 <layer id="1005" name="bottleneck3_2/dim_red/fn" type="PReLU" version="opset1">
14527 <input>
14528 <port id="0">
14529 <dim>1</dim>
14530 <dim>32</dim>
14531 <dim>40</dim>
14532 <dim>68</dim>
14533 </port>
14534 <port id="1">
14535 <dim>1</dim>
14536 </port>
14537 </input>
14538 <output>
14539 <port id="2" names="bottleneck3_2/dim_red/conv" precision="FP16">
14540 <dim>1</dim>
14541 <dim>32</dim>
14542 <dim>40</dim>
14543 <dim>68</dim>
14544 </port>
14545 </output>
14546 </layer>
14547 <layer id="1006" name="bottleneck3_2/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
14548 <data auto_broadcast="numpy" levels="256"/>
14549 <input>
14550 <port id="0">
14551 <dim>1</dim>
14552 <dim>32</dim>
14553 <dim>40</dim>
14554 <dim>68</dim>
14555 </port>
14556 <port id="1">
14557 <dim>1</dim>
14558 <dim>32</dim>
14559 <dim>1</dim>
14560 <dim>1</dim>
14561 </port>
14562 <port id="2">
14563 <dim>1</dim>
14564 <dim>32</dim>
14565 <dim>1</dim>
14566 <dim>1</dim>
14567 </port>
14568 <port id="3">
14569 <dim>1</dim>
14570 <dim>32</dim>
14571 <dim>1</dim>
14572 <dim>1</dim>
14573 </port>
14574 <port id="4">
14575 <dim>1</dim>
14576 <dim>32</dim>
14577 <dim>1</dim>
14578 <dim>1</dim>
14579 </port>
14580 </input>
14581 <output>
14582 <port id="5" precision="FP16">
14583 <dim>1</dim>
14584 <dim>32</dim>
14585 <dim>40</dim>
14586 <dim>68</dim>
14587 </port>
14588 </output>
14589 </layer>
14590 <layer id="1007" name="16939/value1694121888" type="Const" version="opset1">
14591 <data element_type="i64" offset="43778" shape="5" size="40"/>
14592 <output>
14593 <port id="0" precision="I64">
14594 <dim>5</dim>
14595 </port>
14596 </output>
14597 </layer>
14598 <layer id="1008" name="bottleneck3_2/inner/dw1/bn/mean/Fused_Mul__copy105710224/quantized1259221393" type="Const" version="opset1">
14599 <data element_type="i8" offset="63354" shape="32,1,3,3" size="288"/>
14600 <output>
14601 <port id="0" precision="I8">
14602 <dim>32</dim>
14603 <dim>1</dim>
14604 <dim>3</dim>
14605 <dim>3</dim>
14606 </port>
14607 </output>
14608 </layer>
14609 <layer id="1009" name="bottleneck3_2/inner/dw1/bn/mean/Fused_Mul__copy105710224/quantized/to_f16" type="Convert" version="opset1">
14610 <data destination_type="f16"/>
14611 <input>
14612 <port id="0">
14613 <dim>32</dim>
14614 <dim>1</dim>
14615 <dim>3</dim>
14616 <dim>3</dim>
14617 </port>
14618 </input>
14619 <output>
14620 <port id="1" precision="FP16">
14621 <dim>32</dim>
14622 <dim>1</dim>
14623 <dim>3</dim>
14624 <dim>3</dim>
14625 </port>
14626 </output>
14627 </layer>
14628 <layer id="1010" name="bottleneck3_2/inner/dw1/conv/fq_weights_1/zero_point1260522299" type="Const" version="opset1">
14629 <data element_type="f16" offset="63642" shape="32,1,1,1" size="64"/>
14630 <output>
14631 <port id="0" precision="FP16">
14632 <dim>32</dim>
14633 <dim>1</dim>
14634 <dim>1</dim>
14635 <dim>1</dim>
14636 </port>
14637 </output>
14638 </layer>
14639 <layer id="1011" name="bottleneck3_2/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
14640 <data auto_broadcast="numpy"/>
14641 <input>
14642 <port id="0">
14643 <dim>32</dim>
14644 <dim>1</dim>
14645 <dim>3</dim>
14646 <dim>3</dim>
14647 </port>
14648 <port id="1">
14649 <dim>32</dim>
14650 <dim>1</dim>
14651 <dim>1</dim>
14652 <dim>1</dim>
14653 </port>
14654 </input>
14655 <output>
14656 <port id="2" precision="FP16">
14657 <dim>32</dim>
14658 <dim>1</dim>
14659 <dim>3</dim>
14660 <dim>3</dim>
14661 </port>
14662 </output>
14663 </layer>
14664 <layer id="1012" name="bottleneck3_2/inner/dw1/conv/fq_weights_1/scale1260021615" type="Const" version="opset1">
14665 <data element_type="f16" offset="63706" shape="32,1,1,1" size="64"/>
14666 <output>
14667 <port id="0" precision="FP16">
14668 <dim>32</dim>
14669 <dim>1</dim>
14670 <dim>1</dim>
14671 <dim>1</dim>
14672 </port>
14673 </output>
14674 </layer>
14675 <layer id="1013" name="bottleneck3_2/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
14676 <data auto_broadcast="numpy"/>
14677 <input>
14678 <port id="0">
14679 <dim>32</dim>
14680 <dim>1</dim>
14681 <dim>3</dim>
14682 <dim>3</dim>
14683 </port>
14684 <port id="1">
14685 <dim>32</dim>
14686 <dim>1</dim>
14687 <dim>1</dim>
14688 <dim>1</dim>
14689 </port>
14690 </input>
14691 <output>
14692 <port id="2" precision="FP16">
14693 <dim>32</dim>
14694 <dim>1</dim>
14695 <dim>3</dim>
14696 <dim>3</dim>
14697 </port>
14698 </output>
14699 </layer>
14700 <layer id="1014" name="16939" type="Reshape" version="opset1">
14701 <data special_zero="true"/>
14702 <input>
14703 <port id="0">
14704 <dim>32</dim>
14705 <dim>1</dim>
14706 <dim>3</dim>
14707 <dim>3</dim>
14708 </port>
14709 <port id="1">
14710 <dim>5</dim>
14711 </port>
14712 </input>
14713 <output>
14714 <port id="2" precision="FP16">
14715 <dim>32</dim>
14716 <dim>1</dim>
14717 <dim>1</dim>
14718 <dim>3</dim>
14719 <dim>3</dim>
14720 </port>
14721 </output>
14722 </layer>
14723 <layer id="1015" name="bottleneck3_2/inner/dw1/conv" type="GroupConvolution" version="opset1">
14724 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
14725 <input>
14726 <port id="0">
14727 <dim>1</dim>
14728 <dim>32</dim>
14729 <dim>40</dim>
14730 <dim>68</dim>
14731 </port>
14732 <port id="1">
14733 <dim>32</dim>
14734 <dim>1</dim>
14735 <dim>1</dim>
14736 <dim>3</dim>
14737 <dim>3</dim>
14738 </port>
14739 </input>
14740 <output>
14741 <port id="2" precision="FP16">
14742 <dim>1</dim>
14743 <dim>32</dim>
14744 <dim>40</dim>
14745 <dim>68</dim>
14746 </port>
14747 </output>
14748 </layer>
14749 <layer id="1016" name="data_add_2404124046105922929" type="Const" version="opset1">
14750 <data element_type="f16" offset="63770" shape="1,32,1,1" size="64"/>
14751 <output>
14752 <port id="0" precision="FP16">
14753 <dim>1</dim>
14754 <dim>32</dim>
14755 <dim>1</dim>
14756 <dim>1</dim>
14757 </port>
14758 </output>
14759 </layer>
14760 <layer id="1017" name="bottleneck3_2/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
14761 <data auto_broadcast="numpy"/>
14762 <input>
14763 <port id="0">
14764 <dim>1</dim>
14765 <dim>32</dim>
14766 <dim>40</dim>
14767 <dim>68</dim>
14768 </port>
14769 <port id="1">
14770 <dim>1</dim>
14771 <dim>32</dim>
14772 <dim>1</dim>
14773 <dim>1</dim>
14774 </port>
14775 </input>
14776 <output>
14777 <port id="2" names="bottleneck3_2/inner/dw1/conv" precision="FP16">
14778 <dim>1</dim>
14779 <dim>32</dim>
14780 <dim>40</dim>
14781 <dim>68</dim>
14782 </port>
14783 </output>
14784 </layer>
14785 <layer id="1018" name="bottleneck3_2/inner/dw1/fn/weights31188400851061" type="Const" version="opset1">
14786 <data element_type="f32" offset="1576" shape="1" size="4"/>
14787 <output>
14788 <port id="0" precision="FP32">
14789 <dim>1</dim>
14790 </port>
14791 </output>
14792 </layer>
14793 <layer id="1019" name="bottleneck3_2/inner/dw1/fn" type="PReLU" version="opset1">
14794 <input>
14795 <port id="0">
14796 <dim>1</dim>
14797 <dim>32</dim>
14798 <dim>40</dim>
14799 <dim>68</dim>
14800 </port>
14801 <port id="1">
14802 <dim>1</dim>
14803 </port>
14804 </input>
14805 <output>
14806 <port id="2" names="bottleneck3_2/inner/dw1/conv" precision="FP16">
14807 <dim>1</dim>
14808 <dim>32</dim>
14809 <dim>40</dim>
14810 <dim>68</dim>
14811 </port>
14812 </output>
14813 </layer>
14814 <layer id="1020" name="bottleneck3_2/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
14815 <data auto_broadcast="numpy" levels="256"/>
14816 <input>
14817 <port id="0">
14818 <dim>1</dim>
14819 <dim>32</dim>
14820 <dim>40</dim>
14821 <dim>68</dim>
14822 </port>
14823 <port id="1"/>
14824 <port id="2"/>
14825 <port id="3"/>
14826 <port id="4"/>
14827 </input>
14828 <output>
14829 <port id="5" precision="FP16">
14830 <dim>1</dim>
14831 <dim>32</dim>
14832 <dim>40</dim>
14833 <dim>68</dim>
14834 </port>
14835 </output>
14836 </layer>
14837 <layer id="1021" name="bottleneck3_2/dim_inc/bn/mean/Fused_Mul__copy106310227/quantized1352820487" type="Const" version="opset1">
14838 <data element_type="i8" offset="63834" shape="128,32,1,1" size="4096"/>
14839 <output>
14840 <port id="0" precision="I8">
14841 <dim>128</dim>
14842 <dim>32</dim>
14843 <dim>1</dim>
14844 <dim>1</dim>
14845 </port>
14846 </output>
14847 </layer>
14848 <layer id="1022" name="bottleneck3_2/dim_inc/bn/mean/Fused_Mul__copy106310227/quantized/to_f16" type="Convert" version="opset1">
14849 <data destination_type="f16"/>
14850 <input>
14851 <port id="0">
14852 <dim>128</dim>
14853 <dim>32</dim>
14854 <dim>1</dim>
14855 <dim>1</dim>
14856 </port>
14857 </input>
14858 <output>
14859 <port id="1" precision="FP16">
14860 <dim>128</dim>
14861 <dim>32</dim>
14862 <dim>1</dim>
14863 <dim>1</dim>
14864 </port>
14865 </output>
14866 </layer>
14867 <layer id="1023" name="bottleneck3_2/dim_inc/conv/fq_weights_1/zero_point1354119497" type="Const" version="opset1">
14868 <data element_type="f16" offset="67930" shape="128,1,1,1" size="256"/>
14869 <output>
14870 <port id="0" precision="FP16">
14871 <dim>128</dim>
14872 <dim>1</dim>
14873 <dim>1</dim>
14874 <dim>1</dim>
14875 </port>
14876 </output>
14877 </layer>
14878 <layer id="1024" name="bottleneck3_2/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
14879 <data auto_broadcast="numpy"/>
14880 <input>
14881 <port id="0">
14882 <dim>128</dim>
14883 <dim>32</dim>
14884 <dim>1</dim>
14885 <dim>1</dim>
14886 </port>
14887 <port id="1">
14888 <dim>128</dim>
14889 <dim>1</dim>
14890 <dim>1</dim>
14891 <dim>1</dim>
14892 </port>
14893 </input>
14894 <output>
14895 <port id="2" precision="FP16">
14896 <dim>128</dim>
14897 <dim>32</dim>
14898 <dim>1</dim>
14899 <dim>1</dim>
14900 </port>
14901 </output>
14902 </layer>
14903 <layer id="1025" name="bottleneck3_2/dim_inc/conv/fq_weights_1/scale1353619404" type="Const" version="opset1">
14904 <data element_type="f16" offset="68186" shape="128,1,1,1" size="256"/>
14905 <output>
14906 <port id="0" precision="FP16">
14907 <dim>128</dim>
14908 <dim>1</dim>
14909 <dim>1</dim>
14910 <dim>1</dim>
14911 </port>
14912 </output>
14913 </layer>
14914 <layer id="1026" name="bottleneck3_2/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
14915 <data auto_broadcast="numpy"/>
14916 <input>
14917 <port id="0">
14918 <dim>128</dim>
14919 <dim>32</dim>
14920 <dim>1</dim>
14921 <dim>1</dim>
14922 </port>
14923 <port id="1">
14924 <dim>128</dim>
14925 <dim>1</dim>
14926 <dim>1</dim>
14927 <dim>1</dim>
14928 </port>
14929 </input>
14930 <output>
14931 <port id="2" precision="FP16">
14932 <dim>128</dim>
14933 <dim>32</dim>
14934 <dim>1</dim>
14935 <dim>1</dim>
14936 </port>
14937 </output>
14938 </layer>
14939 <layer id="1027" name="bottleneck3_2/dim_inc/conv" type="Convolution" version="opset1">
14940 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
14941 <input>
14942 <port id="0">
14943 <dim>1</dim>
14944 <dim>32</dim>
14945 <dim>40</dim>
14946 <dim>68</dim>
14947 </port>
14948 <port id="1">
14949 <dim>128</dim>
14950 <dim>32</dim>
14951 <dim>1</dim>
14952 <dim>1</dim>
14953 </port>
14954 </input>
14955 <output>
14956 <port id="2" precision="FP16">
14957 <dim>1</dim>
14958 <dim>128</dim>
14959 <dim>40</dim>
14960 <dim>68</dim>
14961 </port>
14962 </output>
14963 </layer>
14964 <layer id="1028" name="data_add_2404924054106522890" type="Const" version="opset1">
14965 <data element_type="f16" offset="68442" shape="1,128,1,1" size="256"/>
14966 <output>
14967 <port id="0" precision="FP16">
14968 <dim>1</dim>
14969 <dim>128</dim>
14970 <dim>1</dim>
14971 <dim>1</dim>
14972 </port>
14973 </output>
14974 </layer>
14975 <layer id="1029" name="bottleneck3_2/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
14976 <data auto_broadcast="numpy"/>
14977 <input>
14978 <port id="0">
14979 <dim>1</dim>
14980 <dim>128</dim>
14981 <dim>40</dim>
14982 <dim>68</dim>
14983 </port>
14984 <port id="1">
14985 <dim>1</dim>
14986 <dim>128</dim>
14987 <dim>1</dim>
14988 <dim>1</dim>
14989 </port>
14990 </input>
14991 <output>
14992 <port id="2" names="bottleneck3_2/dim_inc/conv" precision="FP16">
14993 <dim>1</dim>
14994 <dim>128</dim>
14995 <dim>40</dim>
14996 <dim>68</dim>
14997 </port>
14998 </output>
14999 </layer>
15000 <layer id="1030" name="bottleneck3_2/add/fq_input_1" type="FakeQuantize" version="opset1">
15001 <data auto_broadcast="numpy" levels="256"/>
15002 <input>
15003 <port id="0">
15004 <dim>1</dim>
15005 <dim>128</dim>
15006 <dim>40</dim>
15007 <dim>68</dim>
15008 </port>
15009 <port id="1"/>
15010 <port id="2"/>
15011 <port id="3"/>
15012 <port id="4"/>
15013 </input>
15014 <output>
15015 <port id="5" precision="FP16">
15016 <dim>1</dim>
15017 <dim>128</dim>
15018 <dim>40</dim>
15019 <dim>68</dim>
15020 </port>
15021 </output>
15022 </layer>
15023 <layer id="1031" name="bottleneck3_2/add" type="Add" version="opset1">
15024 <data auto_broadcast="numpy"/>
15025 <input>
15026 <port id="0">
15027 <dim>1</dim>
15028 <dim>128</dim>
15029 <dim>40</dim>
15030 <dim>68</dim>
15031 </port>
15032 <port id="1">
15033 <dim>1</dim>
15034 <dim>128</dim>
15035 <dim>40</dim>
15036 <dim>68</dim>
15037 </port>
15038 </input>
15039 <output>
15040 <port id="2" names="bottleneck3_2/add" precision="FP16">
15041 <dim>1</dim>
15042 <dim>128</dim>
15043 <dim>40</dim>
15044 <dim>68</dim>
15045 </port>
15046 </output>
15047 </layer>
15048 <layer id="1032" name="bottleneck3_2/fn/weights31052404151068" type="Const" version="opset1">
15049 <data element_type="f32" offset="1576" shape="1" size="4"/>
15050 <output>
15051 <port id="0" precision="FP32">
15052 <dim>1</dim>
15053 </port>
15054 </output>
15055 </layer>
15056 <layer id="1033" name="bottleneck3_2/fn" type="PReLU" version="opset1">
15057 <input>
15058 <port id="0">
15059 <dim>1</dim>
15060 <dim>128</dim>
15061 <dim>40</dim>
15062 <dim>68</dim>
15063 </port>
15064 <port id="1">
15065 <dim>1</dim>
15066 </port>
15067 </input>
15068 <output>
15069 <port id="2" names="bottleneck3_2/add" precision="FP16">
15070 <dim>1</dim>
15071 <dim>128</dim>
15072 <dim>40</dim>
15073 <dim>68</dim>
15074 </port>
15075 </output>
15076 </layer>
15077 <layer id="1034" name="bottleneck3_3/add/fq_input_0" type="FakeQuantize" version="opset1">
15078 <data auto_broadcast="numpy" levels="256"/>
15079 <input>
15080 <port id="0">
15081 <dim>1</dim>
15082 <dim>128</dim>
15083 <dim>40</dim>
15084 <dim>68</dim>
15085 </port>
15086 <port id="1"/>
15087 <port id="2"/>
15088 <port id="3"/>
15089 <port id="4"/>
15090 </input>
15091 <output>
15092 <port id="5" precision="FP16">
15093 <dim>1</dim>
15094 <dim>128</dim>
15095 <dim>40</dim>
15096 <dim>68</dim>
15097 </port>
15098 </output>
15099 </layer>
15100 <layer id="1035" name="3414341822776" type="Const" version="opset1">
15101 <data element_type="f16" offset="68698" shape="" size="2"/>
15102 <output>
15103 <port id="0" precision="FP16"/>
15104 </output>
15105 </layer>
15106 <layer id="1036" name="3415341921687" type="Const" version="opset1">
15107 <data element_type="f16" offset="68700" shape="" size="2"/>
15108 <output>
15109 <port id="0" precision="FP16"/>
15110 </output>
15111 </layer>
15112 <layer id="1037" name="3416342020697" type="Const" version="opset1">
15113 <data element_type="f16" offset="68698" shape="" size="2"/>
15114 <output>
15115 <port id="0" precision="FP16"/>
15116 </output>
15117 </layer>
15118 <layer id="1038" name="3417342121660" type="Const" version="opset1">
15119 <data element_type="f16" offset="68700" shape="" size="2"/>
15120 <output>
15121 <port id="0" precision="FP16"/>
15122 </output>
15123 </layer>
15124 <layer id="1039" name="3044304821864" type="Const" version="opset1">
15125 <data element_type="f16" offset="68702" shape="" size="2"/>
15126 <output>
15127 <port id="0" precision="FP16"/>
15128 </output>
15129 </layer>
15130 <layer id="1040" name="3045304922281" type="Const" version="opset1">
15131 <data element_type="f16" offset="68704" shape="" size="2"/>
15132 <output>
15133 <port id="0" precision="FP16"/>
15134 </output>
15135 </layer>
15136 <layer id="1041" name="3046305020424" type="Const" version="opset1">
15137 <data element_type="f16" offset="68702" shape="" size="2"/>
15138 <output>
15139 <port id="0" precision="FP16"/>
15140 </output>
15141 </layer>
15142 <layer id="1042" name="3047305119983" type="Const" version="opset1">
15143 <data element_type="f16" offset="68704" shape="" size="2"/>
15144 <output>
15145 <port id="0" precision="FP16"/>
15146 </output>
15147 </layer>
15148 <layer id="1043" name="3764376822410" type="Const" version="opset1">
15149 <data element_type="f16" offset="68706" shape="1,32,1,1" size="64"/>
15150 <output>
15151 <port id="0" precision="FP16">
15152 <dim>1</dim>
15153 <dim>32</dim>
15154 <dim>1</dim>
15155 <dim>1</dim>
15156 </port>
15157 </output>
15158 </layer>
15159 <layer id="1044" name="3765376919410" type="Const" version="opset1">
15160 <data element_type="f16" offset="68770" shape="1,32,1,1" size="64"/>
15161 <output>
15162 <port id="0" precision="FP16">
15163 <dim>1</dim>
15164 <dim>32</dim>
15165 <dim>1</dim>
15166 <dim>1</dim>
15167 </port>
15168 </output>
15169 </layer>
15170 <layer id="1045" name="3766377019854" type="Const" version="opset1">
15171 <data element_type="f16" offset="68706" shape="1,32,1,1" size="64"/>
15172 <output>
15173 <port id="0" precision="FP16">
15174 <dim>1</dim>
15175 <dim>32</dim>
15176 <dim>1</dim>
15177 <dim>1</dim>
15178 </port>
15179 </output>
15180 </layer>
15181 <layer id="1046" name="3767377120097" type="Const" version="opset1">
15182 <data element_type="f16" offset="68770" shape="1,32,1,1" size="64"/>
15183 <output>
15184 <port id="0" precision="FP16">
15185 <dim>1</dim>
15186 <dim>32</dim>
15187 <dim>1</dim>
15188 <dim>1</dim>
15189 </port>
15190 </output>
15191 </layer>
15192 <layer id="1047" name="bottleneck3_3/dim_red/bn/mean/Fused_Mul__copy107010230/quantized1256821192" type="Const" version="opset1">
15193 <data element_type="i8" offset="68834" shape="32,128,1,1" size="4096"/>
15194 <output>
15195 <port id="0" precision="I8">
15196 <dim>32</dim>
15197 <dim>128</dim>
15198 <dim>1</dim>
15199 <dim>1</dim>
15200 </port>
15201 </output>
15202 </layer>
15203 <layer id="1048" name="bottleneck3_3/dim_red/bn/mean/Fused_Mul__copy107010230/quantized/to_f16" type="Convert" version="opset1">
15204 <data destination_type="f16"/>
15205 <input>
15206 <port id="0">
15207 <dim>32</dim>
15208 <dim>128</dim>
15209 <dim>1</dim>
15210 <dim>1</dim>
15211 </port>
15212 </input>
15213 <output>
15214 <port id="1" precision="FP16">
15215 <dim>32</dim>
15216 <dim>128</dim>
15217 <dim>1</dim>
15218 <dim>1</dim>
15219 </port>
15220 </output>
15221 </layer>
15222 <layer id="1049" name="bottleneck3_3/dim_red/conv/fq_weights_1/zero_point1258122926" type="Const" version="opset1">
15223 <data element_type="f16" offset="72930" shape="32,1,1,1" size="64"/>
15224 <output>
15225 <port id="0" precision="FP16">
15226 <dim>32</dim>
15227 <dim>1</dim>
15228 <dim>1</dim>
15229 <dim>1</dim>
15230 </port>
15231 </output>
15232 </layer>
15233 <layer id="1050" name="bottleneck3_3/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
15234 <data auto_broadcast="numpy"/>
15235 <input>
15236 <port id="0">
15237 <dim>32</dim>
15238 <dim>128</dim>
15239 <dim>1</dim>
15240 <dim>1</dim>
15241 </port>
15242 <port id="1">
15243 <dim>32</dim>
15244 <dim>1</dim>
15245 <dim>1</dim>
15246 <dim>1</dim>
15247 </port>
15248 </input>
15249 <output>
15250 <port id="2" precision="FP16">
15251 <dim>32</dim>
15252 <dim>128</dim>
15253 <dim>1</dim>
15254 <dim>1</dim>
15255 </port>
15256 </output>
15257 </layer>
15258 <layer id="1051" name="bottleneck3_3/dim_red/conv/fq_weights_1/scale1257622518" type="Const" version="opset1">
15259 <data element_type="f16" offset="72994" shape="32,1,1,1" size="64"/>
15260 <output>
15261 <port id="0" precision="FP16">
15262 <dim>32</dim>
15263 <dim>1</dim>
15264 <dim>1</dim>
15265 <dim>1</dim>
15266 </port>
15267 </output>
15268 </layer>
15269 <layer id="1052" name="bottleneck3_3/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
15270 <data auto_broadcast="numpy"/>
15271 <input>
15272 <port id="0">
15273 <dim>32</dim>
15274 <dim>128</dim>
15275 <dim>1</dim>
15276 <dim>1</dim>
15277 </port>
15278 <port id="1">
15279 <dim>32</dim>
15280 <dim>1</dim>
15281 <dim>1</dim>
15282 <dim>1</dim>
15283 </port>
15284 </input>
15285 <output>
15286 <port id="2" precision="FP16">
15287 <dim>32</dim>
15288 <dim>128</dim>
15289 <dim>1</dim>
15290 <dim>1</dim>
15291 </port>
15292 </output>
15293 </layer>
15294 <layer id="1053" name="bottleneck3_3/dim_red/conv" type="Convolution" version="opset1">
15295 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
15296 <input>
15297 <port id="0">
15298 <dim>1</dim>
15299 <dim>128</dim>
15300 <dim>40</dim>
15301 <dim>68</dim>
15302 </port>
15303 <port id="1">
15304 <dim>32</dim>
15305 <dim>128</dim>
15306 <dim>1</dim>
15307 <dim>1</dim>
15308 </port>
15309 </input>
15310 <output>
15311 <port id="2" precision="FP16">
15312 <dim>1</dim>
15313 <dim>32</dim>
15314 <dim>40</dim>
15315 <dim>68</dim>
15316 </port>
15317 </output>
15318 </layer>
15319 <layer id="1054" name="data_add_2405724062107219899" type="Const" version="opset1">
15320 <data element_type="f16" offset="73058" shape="1,32,1,1" size="64"/>
15321 <output>
15322 <port id="0" precision="FP16">
15323 <dim>1</dim>
15324 <dim>32</dim>
15325 <dim>1</dim>
15326 <dim>1</dim>
15327 </port>
15328 </output>
15329 </layer>
15330 <layer id="1055" name="bottleneck3_3/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
15331 <data auto_broadcast="numpy"/>
15332 <input>
15333 <port id="0">
15334 <dim>1</dim>
15335 <dim>32</dim>
15336 <dim>40</dim>
15337 <dim>68</dim>
15338 </port>
15339 <port id="1">
15340 <dim>1</dim>
15341 <dim>32</dim>
15342 <dim>1</dim>
15343 <dim>1</dim>
15344 </port>
15345 </input>
15346 <output>
15347 <port id="2" names="bottleneck3_3/dim_red/conv" precision="FP16">
15348 <dim>1</dim>
15349 <dim>32</dim>
15350 <dim>40</dim>
15351 <dim>68</dim>
15352 </port>
15353 </output>
15354 </layer>
15355 <layer id="1056" name="bottleneck3_3/dim_red/fn/weights30868401061074" type="Const" version="opset1">
15356 <data element_type="f32" offset="1576" shape="1" size="4"/>
15357 <output>
15358 <port id="0" precision="FP32">
15359 <dim>1</dim>
15360 </port>
15361 </output>
15362 </layer>
15363 <layer id="1057" name="bottleneck3_3/dim_red/fn" type="PReLU" version="opset1">
15364 <input>
15365 <port id="0">
15366 <dim>1</dim>
15367 <dim>32</dim>
15368 <dim>40</dim>
15369 <dim>68</dim>
15370 </port>
15371 <port id="1">
15372 <dim>1</dim>
15373 </port>
15374 </input>
15375 <output>
15376 <port id="2" names="bottleneck3_3/dim_red/conv" precision="FP16">
15377 <dim>1</dim>
15378 <dim>32</dim>
15379 <dim>40</dim>
15380 <dim>68</dim>
15381 </port>
15382 </output>
15383 </layer>
15384 <layer id="1058" name="bottleneck3_3/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
15385 <data auto_broadcast="numpy" levels="256"/>
15386 <input>
15387 <port id="0">
15388 <dim>1</dim>
15389 <dim>32</dim>
15390 <dim>40</dim>
15391 <dim>68</dim>
15392 </port>
15393 <port id="1">
15394 <dim>1</dim>
15395 <dim>32</dim>
15396 <dim>1</dim>
15397 <dim>1</dim>
15398 </port>
15399 <port id="2">
15400 <dim>1</dim>
15401 <dim>32</dim>
15402 <dim>1</dim>
15403 <dim>1</dim>
15404 </port>
15405 <port id="3">
15406 <dim>1</dim>
15407 <dim>32</dim>
15408 <dim>1</dim>
15409 <dim>1</dim>
15410 </port>
15411 <port id="4">
15412 <dim>1</dim>
15413 <dim>32</dim>
15414 <dim>1</dim>
15415 <dim>1</dim>
15416 </port>
15417 </input>
15418 <output>
15419 <port id="5" precision="FP16">
15420 <dim>1</dim>
15421 <dim>32</dim>
15422 <dim>40</dim>
15423 <dim>68</dim>
15424 </port>
15425 </output>
15426 </layer>
15427 <layer id="1059" name="16851/value1685320610" type="Const" version="opset1">
15428 <data element_type="i64" offset="43778" shape="5" size="40"/>
15429 <output>
15430 <port id="0" precision="I64">
15431 <dim>5</dim>
15432 </port>
15433 </output>
15434 </layer>
15435 <layer id="1060" name="bottleneck3_3/inner/dw1/bn/mean/Fused_Mul__copy107610233/quantized1276022884" type="Const" version="opset1">
15436 <data element_type="i8" offset="73122" shape="32,1,3,3" size="288"/>
15437 <output>
15438 <port id="0" precision="I8">
15439 <dim>32</dim>
15440 <dim>1</dim>
15441 <dim>3</dim>
15442 <dim>3</dim>
15443 </port>
15444 </output>
15445 </layer>
15446 <layer id="1061" name="bottleneck3_3/inner/dw1/bn/mean/Fused_Mul__copy107610233/quantized/to_f16" type="Convert" version="opset1">
15447 <data destination_type="f16"/>
15448 <input>
15449 <port id="0">
15450 <dim>32</dim>
15451 <dim>1</dim>
15452 <dim>3</dim>
15453 <dim>3</dim>
15454 </port>
15455 </input>
15456 <output>
15457 <port id="1" precision="FP16">
15458 <dim>32</dim>
15459 <dim>1</dim>
15460 <dim>3</dim>
15461 <dim>3</dim>
15462 </port>
15463 </output>
15464 </layer>
15465 <layer id="1062" name="bottleneck3_3/inner/dw1/conv/fq_weights_1/zero_point1277321207" type="Const" version="opset1">
15466 <data element_type="f16" offset="73410" shape="32,1,1,1" size="64"/>
15467 <output>
15468 <port id="0" precision="FP16">
15469 <dim>32</dim>
15470 <dim>1</dim>
15471 <dim>1</dim>
15472 <dim>1</dim>
15473 </port>
15474 </output>
15475 </layer>
15476 <layer id="1063" name="bottleneck3_3/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
15477 <data auto_broadcast="numpy"/>
15478 <input>
15479 <port id="0">
15480 <dim>32</dim>
15481 <dim>1</dim>
15482 <dim>3</dim>
15483 <dim>3</dim>
15484 </port>
15485 <port id="1">
15486 <dim>32</dim>
15487 <dim>1</dim>
15488 <dim>1</dim>
15489 <dim>1</dim>
15490 </port>
15491 </input>
15492 <output>
15493 <port id="2" precision="FP16">
15494 <dim>32</dim>
15495 <dim>1</dim>
15496 <dim>3</dim>
15497 <dim>3</dim>
15498 </port>
15499 </output>
15500 </layer>
15501 <layer id="1064" name="bottleneck3_3/inner/dw1/conv/fq_weights_1/scale1276820181" type="Const" version="opset1">
15502 <data element_type="f16" offset="73474" shape="32,1,1,1" size="64"/>
15503 <output>
15504 <port id="0" precision="FP16">
15505 <dim>32</dim>
15506 <dim>1</dim>
15507 <dim>1</dim>
15508 <dim>1</dim>
15509 </port>
15510 </output>
15511 </layer>
15512 <layer id="1065" name="bottleneck3_3/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
15513 <data auto_broadcast="numpy"/>
15514 <input>
15515 <port id="0">
15516 <dim>32</dim>
15517 <dim>1</dim>
15518 <dim>3</dim>
15519 <dim>3</dim>
15520 </port>
15521 <port id="1">
15522 <dim>32</dim>
15523 <dim>1</dim>
15524 <dim>1</dim>
15525 <dim>1</dim>
15526 </port>
15527 </input>
15528 <output>
15529 <port id="2" precision="FP16">
15530 <dim>32</dim>
15531 <dim>1</dim>
15532 <dim>3</dim>
15533 <dim>3</dim>
15534 </port>
15535 </output>
15536 </layer>
15537 <layer id="1066" name="16851" type="Reshape" version="opset1">
15538 <data special_zero="true"/>
15539 <input>
15540 <port id="0">
15541 <dim>32</dim>
15542 <dim>1</dim>
15543 <dim>3</dim>
15544 <dim>3</dim>
15545 </port>
15546 <port id="1">
15547 <dim>5</dim>
15548 </port>
15549 </input>
15550 <output>
15551 <port id="2" precision="FP16">
15552 <dim>32</dim>
15553 <dim>1</dim>
15554 <dim>1</dim>
15555 <dim>3</dim>
15556 <dim>3</dim>
15557 </port>
15558 </output>
15559 </layer>
15560 <layer id="1067" name="bottleneck3_3/inner/dw1/conv" type="GroupConvolution" version="opset1">
15561 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
15562 <input>
15563 <port id="0">
15564 <dim>1</dim>
15565 <dim>32</dim>
15566 <dim>40</dim>
15567 <dim>68</dim>
15568 </port>
15569 <port id="1">
15570 <dim>32</dim>
15571 <dim>1</dim>
15572 <dim>1</dim>
15573 <dim>3</dim>
15574 <dim>3</dim>
15575 </port>
15576 </input>
15577 <output>
15578 <port id="2" precision="FP16">
15579 <dim>1</dim>
15580 <dim>32</dim>
15581 <dim>40</dim>
15582 <dim>68</dim>
15583 </port>
15584 </output>
15585 </layer>
15586 <layer id="1068" name="data_add_2406524070107822515" type="Const" version="opset1">
15587 <data element_type="f16" offset="73538" shape="1,32,1,1" size="64"/>
15588 <output>
15589 <port id="0" precision="FP16">
15590 <dim>1</dim>
15591 <dim>32</dim>
15592 <dim>1</dim>
15593 <dim>1</dim>
15594 </port>
15595 </output>
15596 </layer>
15597 <layer id="1069" name="bottleneck3_3/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
15598 <data auto_broadcast="numpy"/>
15599 <input>
15600 <port id="0">
15601 <dim>1</dim>
15602 <dim>32</dim>
15603 <dim>40</dim>
15604 <dim>68</dim>
15605 </port>
15606 <port id="1">
15607 <dim>1</dim>
15608 <dim>32</dim>
15609 <dim>1</dim>
15610 <dim>1</dim>
15611 </port>
15612 </input>
15613 <output>
15614 <port id="2" names="bottleneck3_3/inner/dw1/conv" precision="FP16">
15615 <dim>1</dim>
15616 <dim>32</dim>
15617 <dim>40</dim>
15618 <dim>68</dim>
15619 </port>
15620 </output>
15621 </layer>
15622 <layer id="1070" name="bottleneck3_3/inner/dw1/fn/weights31136405621080" type="Const" version="opset1">
15623 <data element_type="f32" offset="1576" shape="1" size="4"/>
15624 <output>
15625 <port id="0" precision="FP32">
15626 <dim>1</dim>
15627 </port>
15628 </output>
15629 </layer>
15630 <layer id="1071" name="bottleneck3_3/inner/dw1/fn" type="PReLU" version="opset1">
15631 <input>
15632 <port id="0">
15633 <dim>1</dim>
15634 <dim>32</dim>
15635 <dim>40</dim>
15636 <dim>68</dim>
15637 </port>
15638 <port id="1">
15639 <dim>1</dim>
15640 </port>
15641 </input>
15642 <output>
15643 <port id="2" names="bottleneck3_3/inner/dw1/conv" precision="FP16">
15644 <dim>1</dim>
15645 <dim>32</dim>
15646 <dim>40</dim>
15647 <dim>68</dim>
15648 </port>
15649 </output>
15650 </layer>
15651 <layer id="1072" name="bottleneck3_3/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
15652 <data auto_broadcast="numpy" levels="256"/>
15653 <input>
15654 <port id="0">
15655 <dim>1</dim>
15656 <dim>32</dim>
15657 <dim>40</dim>
15658 <dim>68</dim>
15659 </port>
15660 <port id="1"/>
15661 <port id="2"/>
15662 <port id="3"/>
15663 <port id="4"/>
15664 </input>
15665 <output>
15666 <port id="5" precision="FP16">
15667 <dim>1</dim>
15668 <dim>32</dim>
15669 <dim>40</dim>
15670 <dim>68</dim>
15671 </port>
15672 </output>
15673 </layer>
15674 <layer id="1073" name="bottleneck3_3/dim_inc/bn/mean/Fused_Mul__copy108210236/quantized1220820997" type="Const" version="opset1">
15675 <data element_type="i8" offset="73602" shape="128,32,1,1" size="4096"/>
15676 <output>
15677 <port id="0" precision="I8">
15678 <dim>128</dim>
15679 <dim>32</dim>
15680 <dim>1</dim>
15681 <dim>1</dim>
15682 </port>
15683 </output>
15684 </layer>
15685 <layer id="1074" name="bottleneck3_3/dim_inc/bn/mean/Fused_Mul__copy108210236/quantized/to_f16" type="Convert" version="opset1">
15686 <data destination_type="f16"/>
15687 <input>
15688 <port id="0">
15689 <dim>128</dim>
15690 <dim>32</dim>
15691 <dim>1</dim>
15692 <dim>1</dim>
15693 </port>
15694 </input>
15695 <output>
15696 <port id="1" precision="FP16">
15697 <dim>128</dim>
15698 <dim>32</dim>
15699 <dim>1</dim>
15700 <dim>1</dim>
15701 </port>
15702 </output>
15703 </layer>
15704 <layer id="1075" name="bottleneck3_3/dim_inc/conv/fq_weights_1/zero_point1222120646" type="Const" version="opset1">
15705 <data element_type="f16" offset="77698" shape="128,1,1,1" size="256"/>
15706 <output>
15707 <port id="0" precision="FP16">
15708 <dim>128</dim>
15709 <dim>1</dim>
15710 <dim>1</dim>
15711 <dim>1</dim>
15712 </port>
15713 </output>
15714 </layer>
15715 <layer id="1076" name="bottleneck3_3/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
15716 <data auto_broadcast="numpy"/>
15717 <input>
15718 <port id="0">
15719 <dim>128</dim>
15720 <dim>32</dim>
15721 <dim>1</dim>
15722 <dim>1</dim>
15723 </port>
15724 <port id="1">
15725 <dim>128</dim>
15726 <dim>1</dim>
15727 <dim>1</dim>
15728 <dim>1</dim>
15729 </port>
15730 </input>
15731 <output>
15732 <port id="2" precision="FP16">
15733 <dim>128</dim>
15734 <dim>32</dim>
15735 <dim>1</dim>
15736 <dim>1</dim>
15737 </port>
15738 </output>
15739 </layer>
15740 <layer id="1077" name="bottleneck3_3/dim_inc/conv/fq_weights_1/scale1221622086" type="Const" version="opset1">
15741 <data element_type="f16" offset="77954" shape="128,1,1,1" size="256"/>
15742 <output>
15743 <port id="0" precision="FP16">
15744 <dim>128</dim>
15745 <dim>1</dim>
15746 <dim>1</dim>
15747 <dim>1</dim>
15748 </port>
15749 </output>
15750 </layer>
15751 <layer id="1078" name="bottleneck3_3/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
15752 <data auto_broadcast="numpy"/>
15753 <input>
15754 <port id="0">
15755 <dim>128</dim>
15756 <dim>32</dim>
15757 <dim>1</dim>
15758 <dim>1</dim>
15759 </port>
15760 <port id="1">
15761 <dim>128</dim>
15762 <dim>1</dim>
15763 <dim>1</dim>
15764 <dim>1</dim>
15765 </port>
15766 </input>
15767 <output>
15768 <port id="2" precision="FP16">
15769 <dim>128</dim>
15770 <dim>32</dim>
15771 <dim>1</dim>
15772 <dim>1</dim>
15773 </port>
15774 </output>
15775 </layer>
15776 <layer id="1079" name="bottleneck3_3/dim_inc/conv" type="Convolution" version="opset1">
15777 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
15778 <input>
15779 <port id="0">
15780 <dim>1</dim>
15781 <dim>32</dim>
15782 <dim>40</dim>
15783 <dim>68</dim>
15784 </port>
15785 <port id="1">
15786 <dim>128</dim>
15787 <dim>32</dim>
15788 <dim>1</dim>
15789 <dim>1</dim>
15790 </port>
15791 </input>
15792 <output>
15793 <port id="2" precision="FP16">
15794 <dim>1</dim>
15795 <dim>128</dim>
15796 <dim>40</dim>
15797 <dim>68</dim>
15798 </port>
15799 </output>
15800 </layer>
15801 <layer id="1080" name="data_add_2407324078108419977" type="Const" version="opset1">
15802 <data element_type="f16" offset="78210" shape="1,128,1,1" size="256"/>
15803 <output>
15804 <port id="0" precision="FP16">
15805 <dim>1</dim>
15806 <dim>128</dim>
15807 <dim>1</dim>
15808 <dim>1</dim>
15809 </port>
15810 </output>
15811 </layer>
15812 <layer id="1081" name="bottleneck3_3/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
15813 <data auto_broadcast="numpy"/>
15814 <input>
15815 <port id="0">
15816 <dim>1</dim>
15817 <dim>128</dim>
15818 <dim>40</dim>
15819 <dim>68</dim>
15820 </port>
15821 <port id="1">
15822 <dim>1</dim>
15823 <dim>128</dim>
15824 <dim>1</dim>
15825 <dim>1</dim>
15826 </port>
15827 </input>
15828 <output>
15829 <port id="2" names="bottleneck3_3/dim_inc/conv" precision="FP16">
15830 <dim>1</dim>
15831 <dim>128</dim>
15832 <dim>40</dim>
15833 <dim>68</dim>
15834 </port>
15835 </output>
15836 </layer>
15837 <layer id="1082" name="bottleneck3_3/add/fq_input_1" type="FakeQuantize" version="opset1">
15838 <data auto_broadcast="numpy" levels="256"/>
15839 <input>
15840 <port id="0">
15841 <dim>1</dim>
15842 <dim>128</dim>
15843 <dim>40</dim>
15844 <dim>68</dim>
15845 </port>
15846 <port id="1"/>
15847 <port id="2"/>
15848 <port id="3"/>
15849 <port id="4"/>
15850 </input>
15851 <output>
15852 <port id="5" precision="FP16">
15853 <dim>1</dim>
15854 <dim>128</dim>
15855 <dim>40</dim>
15856 <dim>68</dim>
15857 </port>
15858 </output>
15859 </layer>
15860 <layer id="1083" name="bottleneck3_3/add" type="Add" version="opset1">
15861 <data auto_broadcast="numpy"/>
15862 <input>
15863 <port id="0">
15864 <dim>1</dim>
15865 <dim>128</dim>
15866 <dim>40</dim>
15867 <dim>68</dim>
15868 </port>
15869 <port id="1">
15870 <dim>1</dim>
15871 <dim>128</dim>
15872 <dim>40</dim>
15873 <dim>68</dim>
15874 </port>
15875 </input>
15876 <output>
15877 <port id="2" names="bottleneck3_3/add" precision="FP16">
15878 <dim>1</dim>
15879 <dim>128</dim>
15880 <dim>40</dim>
15881 <dim>68</dim>
15882 </port>
15883 </output>
15884 </layer>
15885 <layer id="1084" name="bottleneck3_3/fn/weights31140400911087" type="Const" version="opset1">
15886 <data element_type="f32" offset="1576" shape="1" size="4"/>
15887 <output>
15888 <port id="0" precision="FP32">
15889 <dim>1</dim>
15890 </port>
15891 </output>
15892 </layer>
15893 <layer id="1085" name="bottleneck3_3/fn" type="PReLU" version="opset1">
15894 <input>
15895 <port id="0">
15896 <dim>1</dim>
15897 <dim>128</dim>
15898 <dim>40</dim>
15899 <dim>68</dim>
15900 </port>
15901 <port id="1">
15902 <dim>1</dim>
15903 </port>
15904 </input>
15905 <output>
15906 <port id="2" names="bottleneck3_3/add" precision="FP16">
15907 <dim>1</dim>
15908 <dim>128</dim>
15909 <dim>40</dim>
15910 <dim>68</dim>
15911 </port>
15912 </output>
15913 </layer>
15914 <layer id="1086" name="bottleneck3_4/add/fq_input_0" type="FakeQuantize" version="opset1">
15915 <data auto_broadcast="numpy" levels="256"/>
15916 <input>
15917 <port id="0">
15918 <dim>1</dim>
15919 <dim>128</dim>
15920 <dim>40</dim>
15921 <dim>68</dim>
15922 </port>
15923 <port id="1"/>
15924 <port id="2"/>
15925 <port id="3"/>
15926 <port id="4"/>
15927 </input>
15928 <output>
15929 <port id="5" precision="FP16">
15930 <dim>1</dim>
15931 <dim>128</dim>
15932 <dim>40</dim>
15933 <dim>68</dim>
15934 </port>
15935 </output>
15936 </layer>
15937 <layer id="1087" name="2634263820241" type="Const" version="opset1">
15938 <data element_type="f16" offset="78466" shape="" size="2"/>
15939 <output>
15940 <port id="0" precision="FP16"/>
15941 </output>
15942 </layer>
15943 <layer id="1088" name="2635263920337" type="Const" version="opset1">
15944 <data element_type="f16" offset="78468" shape="" size="2"/>
15945 <output>
15946 <port id="0" precision="FP16"/>
15947 </output>
15948 </layer>
15949 <layer id="1089" name="2636264021465" type="Const" version="opset1">
15950 <data element_type="f16" offset="78466" shape="" size="2"/>
15951 <output>
15952 <port id="0" precision="FP16"/>
15953 </output>
15954 </layer>
15955 <layer id="1090" name="2637264121453" type="Const" version="opset1">
15956 <data element_type="f16" offset="78468" shape="" size="2"/>
15957 <output>
15958 <port id="0" precision="FP16"/>
15959 </output>
15960 </layer>
15961 <layer id="1091" name="4824482820742" type="Const" version="opset1">
15962 <data element_type="f16" offset="78470" shape="" size="2"/>
15963 <output>
15964 <port id="0" precision="FP16"/>
15965 </output>
15966 </layer>
15967 <layer id="1092" name="4825482922785" type="Const" version="opset1">
15968 <data element_type="f16" offset="78472" shape="" size="2"/>
15969 <output>
15970 <port id="0" precision="FP16"/>
15971 </output>
15972 </layer>
15973 <layer id="1093" name="4826483020187" type="Const" version="opset1">
15974 <data element_type="f16" offset="78470" shape="" size="2"/>
15975 <output>
15976 <port id="0" precision="FP16"/>
15977 </output>
15978 </layer>
15979 <layer id="1094" name="4827483120166" type="Const" version="opset1">
15980 <data element_type="f16" offset="78472" shape="" size="2"/>
15981 <output>
15982 <port id="0" precision="FP16"/>
15983 </output>
15984 </layer>
15985 <layer id="1095" name="5284528822872" type="Const" version="opset1">
15986 <data element_type="f16" offset="78474" shape="1,32,1,1" size="64"/>
15987 <output>
15988 <port id="0" precision="FP16">
15989 <dim>1</dim>
15990 <dim>32</dim>
15991 <dim>1</dim>
15992 <dim>1</dim>
15993 </port>
15994 </output>
15995 </layer>
15996 <layer id="1096" name="5285528919452" type="Const" version="opset1">
15997 <data element_type="f16" offset="78538" shape="1,32,1,1" size="64"/>
15998 <output>
15999 <port id="0" precision="FP16">
16000 <dim>1</dim>
16001 <dim>32</dim>
16002 <dim>1</dim>
16003 <dim>1</dim>
16004 </port>
16005 </output>
16006 </layer>
16007 <layer id="1097" name="5286529022962" type="Const" version="opset1">
16008 <data element_type="f16" offset="78474" shape="1,32,1,1" size="64"/>
16009 <output>
16010 <port id="0" precision="FP16">
16011 <dim>1</dim>
16012 <dim>32</dim>
16013 <dim>1</dim>
16014 <dim>1</dim>
16015 </port>
16016 </output>
16017 </layer>
16018 <layer id="1098" name="5287529121930" type="Const" version="opset1">
16019 <data element_type="f16" offset="78538" shape="1,32,1,1" size="64"/>
16020 <output>
16021 <port id="0" precision="FP16">
16022 <dim>1</dim>
16023 <dim>32</dim>
16024 <dim>1</dim>
16025 <dim>1</dim>
16026 </port>
16027 </output>
16028 </layer>
16029 <layer id="1099" name="bottleneck3_4/dim_red/bn/mean/Fused_Mul__copy108910239/quantized1319221501" type="Const" version="opset1">
16030 <data element_type="i8" offset="78602" shape="32,128,1,1" size="4096"/>
16031 <output>
16032 <port id="0" precision="I8">
16033 <dim>32</dim>
16034 <dim>128</dim>
16035 <dim>1</dim>
16036 <dim>1</dim>
16037 </port>
16038 </output>
16039 </layer>
16040 <layer id="1100" name="bottleneck3_4/dim_red/bn/mean/Fused_Mul__copy108910239/quantized/to_f16" type="Convert" version="opset1">
16041 <data destination_type="f16"/>
16042 <input>
16043 <port id="0">
16044 <dim>32</dim>
16045 <dim>128</dim>
16046 <dim>1</dim>
16047 <dim>1</dim>
16048 </port>
16049 </input>
16050 <output>
16051 <port id="1" precision="FP16">
16052 <dim>32</dim>
16053 <dim>128</dim>
16054 <dim>1</dim>
16055 <dim>1</dim>
16056 </port>
16057 </output>
16058 </layer>
16059 <layer id="1101" name="bottleneck3_4/dim_red/conv/fq_weights_1/zero_point1320522071" type="Const" version="opset1">
16060 <data element_type="f16" offset="82698" shape="32,1,1,1" size="64"/>
16061 <output>
16062 <port id="0" precision="FP16">
16063 <dim>32</dim>
16064 <dim>1</dim>
16065 <dim>1</dim>
16066 <dim>1</dim>
16067 </port>
16068 </output>
16069 </layer>
16070 <layer id="1102" name="bottleneck3_4/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
16071 <data auto_broadcast="numpy"/>
16072 <input>
16073 <port id="0">
16074 <dim>32</dim>
16075 <dim>128</dim>
16076 <dim>1</dim>
16077 <dim>1</dim>
16078 </port>
16079 <port id="1">
16080 <dim>32</dim>
16081 <dim>1</dim>
16082 <dim>1</dim>
16083 <dim>1</dim>
16084 </port>
16085 </input>
16086 <output>
16087 <port id="2" precision="FP16">
16088 <dim>32</dim>
16089 <dim>128</dim>
16090 <dim>1</dim>
16091 <dim>1</dim>
16092 </port>
16093 </output>
16094 </layer>
16095 <layer id="1103" name="bottleneck3_4/dim_red/conv/fq_weights_1/scale1320020601" type="Const" version="opset1">
16096 <data element_type="f16" offset="82762" shape="32,1,1,1" size="64"/>
16097 <output>
16098 <port id="0" precision="FP16">
16099 <dim>32</dim>
16100 <dim>1</dim>
16101 <dim>1</dim>
16102 <dim>1</dim>
16103 </port>
16104 </output>
16105 </layer>
16106 <layer id="1104" name="bottleneck3_4/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
16107 <data auto_broadcast="numpy"/>
16108 <input>
16109 <port id="0">
16110 <dim>32</dim>
16111 <dim>128</dim>
16112 <dim>1</dim>
16113 <dim>1</dim>
16114 </port>
16115 <port id="1">
16116 <dim>32</dim>
16117 <dim>1</dim>
16118 <dim>1</dim>
16119 <dim>1</dim>
16120 </port>
16121 </input>
16122 <output>
16123 <port id="2" precision="FP16">
16124 <dim>32</dim>
16125 <dim>128</dim>
16126 <dim>1</dim>
16127 <dim>1</dim>
16128 </port>
16129 </output>
16130 </layer>
16131 <layer id="1105" name="bottleneck3_4/dim_red/conv" type="Convolution" version="opset1">
16132 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
16133 <input>
16134 <port id="0">
16135 <dim>1</dim>
16136 <dim>128</dim>
16137 <dim>40</dim>
16138 <dim>68</dim>
16139 </port>
16140 <port id="1">
16141 <dim>32</dim>
16142 <dim>128</dim>
16143 <dim>1</dim>
16144 <dim>1</dim>
16145 </port>
16146 </input>
16147 <output>
16148 <port id="2" precision="FP16">
16149 <dim>1</dim>
16150 <dim>32</dim>
16151 <dim>40</dim>
16152 <dim>68</dim>
16153 </port>
16154 </output>
16155 </layer>
16156 <layer id="1106" name="data_add_2408124086109121555" type="Const" version="opset1">
16157 <data element_type="f16" offset="82826" shape="1,32,1,1" size="64"/>
16158 <output>
16159 <port id="0" precision="FP16">
16160 <dim>1</dim>
16161 <dim>32</dim>
16162 <dim>1</dim>
16163 <dim>1</dim>
16164 </port>
16165 </output>
16166 </layer>
16167 <layer id="1107" name="bottleneck3_4/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
16168 <data auto_broadcast="numpy"/>
16169 <input>
16170 <port id="0">
16171 <dim>1</dim>
16172 <dim>32</dim>
16173 <dim>40</dim>
16174 <dim>68</dim>
16175 </port>
16176 <port id="1">
16177 <dim>1</dim>
16178 <dim>32</dim>
16179 <dim>1</dim>
16180 <dim>1</dim>
16181 </port>
16182 </input>
16183 <output>
16184 <port id="2" names="bottleneck3_4/dim_red/conv" precision="FP16">
16185 <dim>1</dim>
16186 <dim>32</dim>
16187 <dim>40</dim>
16188 <dim>68</dim>
16189 </port>
16190 </output>
16191 </layer>
16192 <layer id="1108" name="bottleneck3_4/dim_red/fn/weights31152397191093" type="Const" version="opset1">
16193 <data element_type="f32" offset="1576" shape="1" size="4"/>
16194 <output>
16195 <port id="0" precision="FP32">
16196 <dim>1</dim>
16197 </port>
16198 </output>
16199 </layer>
16200 <layer id="1109" name="bottleneck3_4/dim_red/fn" type="PReLU" version="opset1">
16201 <input>
16202 <port id="0">
16203 <dim>1</dim>
16204 <dim>32</dim>
16205 <dim>40</dim>
16206 <dim>68</dim>
16207 </port>
16208 <port id="1">
16209 <dim>1</dim>
16210 </port>
16211 </input>
16212 <output>
16213 <port id="2" names="bottleneck3_4/dim_red/conv" precision="FP16">
16214 <dim>1</dim>
16215 <dim>32</dim>
16216 <dim>40</dim>
16217 <dim>68</dim>
16218 </port>
16219 </output>
16220 </layer>
16221 <layer id="1110" name="bottleneck3_4/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
16222 <data auto_broadcast="numpy" levels="256"/>
16223 <input>
16224 <port id="0">
16225 <dim>1</dim>
16226 <dim>32</dim>
16227 <dim>40</dim>
16228 <dim>68</dim>
16229 </port>
16230 <port id="1">
16231 <dim>1</dim>
16232 <dim>32</dim>
16233 <dim>1</dim>
16234 <dim>1</dim>
16235 </port>
16236 <port id="2">
16237 <dim>1</dim>
16238 <dim>32</dim>
16239 <dim>1</dim>
16240 <dim>1</dim>
16241 </port>
16242 <port id="3">
16243 <dim>1</dim>
16244 <dim>32</dim>
16245 <dim>1</dim>
16246 <dim>1</dim>
16247 </port>
16248 <port id="4">
16249 <dim>1</dim>
16250 <dim>32</dim>
16251 <dim>1</dim>
16252 <dim>1</dim>
16253 </port>
16254 </input>
16255 <output>
16256 <port id="5" precision="FP16">
16257 <dim>1</dim>
16258 <dim>32</dim>
16259 <dim>40</dim>
16260 <dim>68</dim>
16261 </port>
16262 </output>
16263 </layer>
16264 <layer id="1111" name="16927/value1692920505" type="Const" version="opset1">
16265 <data element_type="i64" offset="43778" shape="5" size="40"/>
16266 <output>
16267 <port id="0" precision="I64">
16268 <dim>5</dim>
16269 </port>
16270 </output>
16271 </layer>
16272 <layer id="1112" name="bottleneck3_4/inner/dw1/bn/mean/Fused_Mul__copy109510242/quantized1165622575" type="Const" version="opset1">
16273 <data element_type="i8" offset="82890" shape="32,1,3,3" size="288"/>
16274 <output>
16275 <port id="0" precision="I8">
16276 <dim>32</dim>
16277 <dim>1</dim>
16278 <dim>3</dim>
16279 <dim>3</dim>
16280 </port>
16281 </output>
16282 </layer>
16283 <layer id="1113" name="bottleneck3_4/inner/dw1/bn/mean/Fused_Mul__copy109510242/quantized/to_f16" type="Convert" version="opset1">
16284 <data destination_type="f16"/>
16285 <input>
16286 <port id="0">
16287 <dim>32</dim>
16288 <dim>1</dim>
16289 <dim>3</dim>
16290 <dim>3</dim>
16291 </port>
16292 </input>
16293 <output>
16294 <port id="1" precision="FP16">
16295 <dim>32</dim>
16296 <dim>1</dim>
16297 <dim>3</dim>
16298 <dim>3</dim>
16299 </port>
16300 </output>
16301 </layer>
16302 <layer id="1114" name="bottleneck3_4/inner/dw1/conv/fq_weights_1/zero_point1166921306" type="Const" version="opset1">
16303 <data element_type="f16" offset="83178" shape="32,1,1,1" size="64"/>
16304 <output>
16305 <port id="0" precision="FP16">
16306 <dim>32</dim>
16307 <dim>1</dim>
16308 <dim>1</dim>
16309 <dim>1</dim>
16310 </port>
16311 </output>
16312 </layer>
16313 <layer id="1115" name="bottleneck3_4/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
16314 <data auto_broadcast="numpy"/>
16315 <input>
16316 <port id="0">
16317 <dim>32</dim>
16318 <dim>1</dim>
16319 <dim>3</dim>
16320 <dim>3</dim>
16321 </port>
16322 <port id="1">
16323 <dim>32</dim>
16324 <dim>1</dim>
16325 <dim>1</dim>
16326 <dim>1</dim>
16327 </port>
16328 </input>
16329 <output>
16330 <port id="2" precision="FP16">
16331 <dim>32</dim>
16332 <dim>1</dim>
16333 <dim>3</dim>
16334 <dim>3</dim>
16335 </port>
16336 </output>
16337 </layer>
16338 <layer id="1116" name="bottleneck3_4/inner/dw1/conv/fq_weights_1/scale1166421477" type="Const" version="opset1">
16339 <data element_type="f16" offset="83242" shape="32,1,1,1" size="64"/>
16340 <output>
16341 <port id="0" precision="FP16">
16342 <dim>32</dim>
16343 <dim>1</dim>
16344 <dim>1</dim>
16345 <dim>1</dim>
16346 </port>
16347 </output>
16348 </layer>
16349 <layer id="1117" name="bottleneck3_4/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
16350 <data auto_broadcast="numpy"/>
16351 <input>
16352 <port id="0">
16353 <dim>32</dim>
16354 <dim>1</dim>
16355 <dim>3</dim>
16356 <dim>3</dim>
16357 </port>
16358 <port id="1">
16359 <dim>32</dim>
16360 <dim>1</dim>
16361 <dim>1</dim>
16362 <dim>1</dim>
16363 </port>
16364 </input>
16365 <output>
16366 <port id="2" precision="FP16">
16367 <dim>32</dim>
16368 <dim>1</dim>
16369 <dim>3</dim>
16370 <dim>3</dim>
16371 </port>
16372 </output>
16373 </layer>
16374 <layer id="1118" name="16927" type="Reshape" version="opset1">
16375 <data special_zero="true"/>
16376 <input>
16377 <port id="0">
16378 <dim>32</dim>
16379 <dim>1</dim>
16380 <dim>3</dim>
16381 <dim>3</dim>
16382 </port>
16383 <port id="1">
16384 <dim>5</dim>
16385 </port>
16386 </input>
16387 <output>
16388 <port id="2" precision="FP16">
16389 <dim>32</dim>
16390 <dim>1</dim>
16391 <dim>1</dim>
16392 <dim>3</dim>
16393 <dim>3</dim>
16394 </port>
16395 </output>
16396 </layer>
16397 <layer id="1119" name="bottleneck3_4/inner/dw1/conv" type="GroupConvolution" version="opset1">
16398 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
16399 <input>
16400 <port id="0">
16401 <dim>1</dim>
16402 <dim>32</dim>
16403 <dim>40</dim>
16404 <dim>68</dim>
16405 </port>
16406 <port id="1">
16407 <dim>32</dim>
16408 <dim>1</dim>
16409 <dim>1</dim>
16410 <dim>3</dim>
16411 <dim>3</dim>
16412 </port>
16413 </input>
16414 <output>
16415 <port id="2" precision="FP16">
16416 <dim>1</dim>
16417 <dim>32</dim>
16418 <dim>40</dim>
16419 <dim>68</dim>
16420 </port>
16421 </output>
16422 </layer>
16423 <layer id="1120" name="data_add_2408924094109719548" type="Const" version="opset1">
16424 <data element_type="f16" offset="83306" shape="1,32,1,1" size="64"/>
16425 <output>
16426 <port id="0" precision="FP16">
16427 <dim>1</dim>
16428 <dim>32</dim>
16429 <dim>1</dim>
16430 <dim>1</dim>
16431 </port>
16432 </output>
16433 </layer>
16434 <layer id="1121" name="bottleneck3_4/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
16435 <data auto_broadcast="numpy"/>
16436 <input>
16437 <port id="0">
16438 <dim>1</dim>
16439 <dim>32</dim>
16440 <dim>40</dim>
16441 <dim>68</dim>
16442 </port>
16443 <port id="1">
16444 <dim>1</dim>
16445 <dim>32</dim>
16446 <dim>1</dim>
16447 <dim>1</dim>
16448 </port>
16449 </input>
16450 <output>
16451 <port id="2" names="bottleneck3_4/inner/dw1/conv" precision="FP16">
16452 <dim>1</dim>
16453 <dim>32</dim>
16454 <dim>40</dim>
16455 <dim>68</dim>
16456 </port>
16457 </output>
16458 </layer>
16459 <layer id="1122" name="bottleneck3_4/inner/dw1/fn/weights30980405111099" type="Const" version="opset1">
16460 <data element_type="f32" offset="1576" shape="1" size="4"/>
16461 <output>
16462 <port id="0" precision="FP32">
16463 <dim>1</dim>
16464 </port>
16465 </output>
16466 </layer>
16467 <layer id="1123" name="bottleneck3_4/inner/dw1/fn" type="PReLU" version="opset1">
16468 <input>
16469 <port id="0">
16470 <dim>1</dim>
16471 <dim>32</dim>
16472 <dim>40</dim>
16473 <dim>68</dim>
16474 </port>
16475 <port id="1">
16476 <dim>1</dim>
16477 </port>
16478 </input>
16479 <output>
16480 <port id="2" names="bottleneck3_4/inner/dw1/conv" precision="FP16">
16481 <dim>1</dim>
16482 <dim>32</dim>
16483 <dim>40</dim>
16484 <dim>68</dim>
16485 </port>
16486 </output>
16487 </layer>
16488 <layer id="1124" name="bottleneck3_4/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
16489 <data auto_broadcast="numpy" levels="256"/>
16490 <input>
16491 <port id="0">
16492 <dim>1</dim>
16493 <dim>32</dim>
16494 <dim>40</dim>
16495 <dim>68</dim>
16496 </port>
16497 <port id="1"/>
16498 <port id="2"/>
16499 <port id="3"/>
16500 <port id="4"/>
16501 </input>
16502 <output>
16503 <port id="5" precision="FP16">
16504 <dim>1</dim>
16505 <dim>32</dim>
16506 <dim>40</dim>
16507 <dim>68</dim>
16508 </port>
16509 </output>
16510 </layer>
16511 <layer id="1125" name="bottleneck3_4/dim_inc/bn/mean/Fused_Mul__copy110110245/quantized1204021180" type="Const" version="opset1">
16512 <data element_type="i8" offset="83370" shape="128,32,1,1" size="4096"/>
16513 <output>
16514 <port id="0" precision="I8">
16515 <dim>128</dim>
16516 <dim>32</dim>
16517 <dim>1</dim>
16518 <dim>1</dim>
16519 </port>
16520 </output>
16521 </layer>
16522 <layer id="1126" name="bottleneck3_4/dim_inc/bn/mean/Fused_Mul__copy110110245/quantized/to_f16" type="Convert" version="opset1">
16523 <data destination_type="f16"/>
16524 <input>
16525 <port id="0">
16526 <dim>128</dim>
16527 <dim>32</dim>
16528 <dim>1</dim>
16529 <dim>1</dim>
16530 </port>
16531 </input>
16532 <output>
16533 <port id="1" precision="FP16">
16534 <dim>128</dim>
16535 <dim>32</dim>
16536 <dim>1</dim>
16537 <dim>1</dim>
16538 </port>
16539 </output>
16540 </layer>
16541 <layer id="1127" name="bottleneck3_4/dim_inc/conv/fq_weights_1/zero_point1205320961" type="Const" version="opset1">
16542 <data element_type="f16" offset="87466" shape="128,1,1,1" size="256"/>
16543 <output>
16544 <port id="0" precision="FP16">
16545 <dim>128</dim>
16546 <dim>1</dim>
16547 <dim>1</dim>
16548 <dim>1</dim>
16549 </port>
16550 </output>
16551 </layer>
16552 <layer id="1128" name="bottleneck3_4/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
16553 <data auto_broadcast="numpy"/>
16554 <input>
16555 <port id="0">
16556 <dim>128</dim>
16557 <dim>32</dim>
16558 <dim>1</dim>
16559 <dim>1</dim>
16560 </port>
16561 <port id="1">
16562 <dim>128</dim>
16563 <dim>1</dim>
16564 <dim>1</dim>
16565 <dim>1</dim>
16566 </port>
16567 </input>
16568 <output>
16569 <port id="2" precision="FP16">
16570 <dim>128</dim>
16571 <dim>32</dim>
16572 <dim>1</dim>
16573 <dim>1</dim>
16574 </port>
16575 </output>
16576 </layer>
16577 <layer id="1129" name="bottleneck3_4/dim_inc/conv/fq_weights_1/scale1204821165" type="Const" version="opset1">
16578 <data element_type="f16" offset="87722" shape="128,1,1,1" size="256"/>
16579 <output>
16580 <port id="0" precision="FP16">
16581 <dim>128</dim>
16582 <dim>1</dim>
16583 <dim>1</dim>
16584 <dim>1</dim>
16585 </port>
16586 </output>
16587 </layer>
16588 <layer id="1130" name="bottleneck3_4/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
16589 <data auto_broadcast="numpy"/>
16590 <input>
16591 <port id="0">
16592 <dim>128</dim>
16593 <dim>32</dim>
16594 <dim>1</dim>
16595 <dim>1</dim>
16596 </port>
16597 <port id="1">
16598 <dim>128</dim>
16599 <dim>1</dim>
16600 <dim>1</dim>
16601 <dim>1</dim>
16602 </port>
16603 </input>
16604 <output>
16605 <port id="2" precision="FP16">
16606 <dim>128</dim>
16607 <dim>32</dim>
16608 <dim>1</dim>
16609 <dim>1</dim>
16610 </port>
16611 </output>
16612 </layer>
16613 <layer id="1131" name="bottleneck3_4/dim_inc/conv" type="Convolution" version="opset1">
16614 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
16615 <input>
16616 <port id="0">
16617 <dim>1</dim>
16618 <dim>32</dim>
16619 <dim>40</dim>
16620 <dim>68</dim>
16621 </port>
16622 <port id="1">
16623 <dim>128</dim>
16624 <dim>32</dim>
16625 <dim>1</dim>
16626 <dim>1</dim>
16627 </port>
16628 </input>
16629 <output>
16630 <port id="2" precision="FP16">
16631 <dim>1</dim>
16632 <dim>128</dim>
16633 <dim>40</dim>
16634 <dim>68</dim>
16635 </port>
16636 </output>
16637 </layer>
16638 <layer id="1132" name="data_add_2409724102110321096" type="Const" version="opset1">
16639 <data element_type="f16" offset="87978" shape="1,128,1,1" size="256"/>
16640 <output>
16641 <port id="0" precision="FP16">
16642 <dim>1</dim>
16643 <dim>128</dim>
16644 <dim>1</dim>
16645 <dim>1</dim>
16646 </port>
16647 </output>
16648 </layer>
16649 <layer id="1133" name="bottleneck3_4/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
16650 <data auto_broadcast="numpy"/>
16651 <input>
16652 <port id="0">
16653 <dim>1</dim>
16654 <dim>128</dim>
16655 <dim>40</dim>
16656 <dim>68</dim>
16657 </port>
16658 <port id="1">
16659 <dim>1</dim>
16660 <dim>128</dim>
16661 <dim>1</dim>
16662 <dim>1</dim>
16663 </port>
16664 </input>
16665 <output>
16666 <port id="2" names="bottleneck3_4/dim_inc/conv" precision="FP16">
16667 <dim>1</dim>
16668 <dim>128</dim>
16669 <dim>40</dim>
16670 <dim>68</dim>
16671 </port>
16672 </output>
16673 </layer>
16674 <layer id="1134" name="bottleneck3_4/add/fq_input_1" type="FakeQuantize" version="opset1">
16675 <data auto_broadcast="numpy" levels="256"/>
16676 <input>
16677 <port id="0">
16678 <dim>1</dim>
16679 <dim>128</dim>
16680 <dim>40</dim>
16681 <dim>68</dim>
16682 </port>
16683 <port id="1"/>
16684 <port id="2"/>
16685 <port id="3"/>
16686 <port id="4"/>
16687 </input>
16688 <output>
16689 <port id="5" precision="FP16">
16690 <dim>1</dim>
16691 <dim>128</dim>
16692 <dim>40</dim>
16693 <dim>68</dim>
16694 </port>
16695 </output>
16696 </layer>
16697 <layer id="1135" name="bottleneck3_4/add" type="Add" version="opset1">
16698 <data auto_broadcast="numpy"/>
16699 <input>
16700 <port id="0">
16701 <dim>1</dim>
16702 <dim>128</dim>
16703 <dim>40</dim>
16704 <dim>68</dim>
16705 </port>
16706 <port id="1">
16707 <dim>1</dim>
16708 <dim>128</dim>
16709 <dim>40</dim>
16710 <dim>68</dim>
16711 </port>
16712 </input>
16713 <output>
16714 <port id="2" names="bottleneck3_4/add" precision="FP16">
16715 <dim>1</dim>
16716 <dim>128</dim>
16717 <dim>40</dim>
16718 <dim>68</dim>
16719 </port>
16720 </output>
16721 </layer>
16722 <layer id="1136" name="bottleneck3_4/fn/weights30792399381106" type="Const" version="opset1">
16723 <data element_type="f32" offset="1576" shape="1" size="4"/>
16724 <output>
16725 <port id="0" precision="FP32">
16726 <dim>1</dim>
16727 </port>
16728 </output>
16729 </layer>
16730 <layer id="1137" name="bottleneck3_4/fn" type="PReLU" version="opset1">
16731 <input>
16732 <port id="0">
16733 <dim>1</dim>
16734 <dim>128</dim>
16735 <dim>40</dim>
16736 <dim>68</dim>
16737 </port>
16738 <port id="1">
16739 <dim>1</dim>
16740 </port>
16741 </input>
16742 <output>
16743 <port id="2" names="bottleneck3_4/add" precision="FP16">
16744 <dim>1</dim>
16745 <dim>128</dim>
16746 <dim>40</dim>
16747 <dim>68</dim>
16748 </port>
16749 </output>
16750 </layer>
16751 <layer id="1138" name="bottleneck3_5/add/fq_input_0" type="FakeQuantize" version="opset1">
16752 <data auto_broadcast="numpy" levels="256"/>
16753 <input>
16754 <port id="0">
16755 <dim>1</dim>
16756 <dim>128</dim>
16757 <dim>40</dim>
16758 <dim>68</dim>
16759 </port>
16760 <port id="1"/>
16761 <port id="2"/>
16762 <port id="3"/>
16763 <port id="4"/>
16764 </input>
16765 <output>
16766 <port id="5" precision="FP16">
16767 <dim>1</dim>
16768 <dim>128</dim>
16769 <dim>40</dim>
16770 <dim>68</dim>
16771 </port>
16772 </output>
16773 </layer>
16774 <layer id="1139" name="4034403822017" type="Const" version="opset1">
16775 <data element_type="f16" offset="88234" shape="" size="2"/>
16776 <output>
16777 <port id="0" precision="FP16"/>
16778 </output>
16779 </layer>
16780 <layer id="1140" name="4035403922605" type="Const" version="opset1">
16781 <data element_type="f16" offset="88236" shape="" size="2"/>
16782 <output>
16783 <port id="0" precision="FP16"/>
16784 </output>
16785 </layer>
16786 <layer id="1141" name="4036404022398" type="Const" version="opset1">
16787 <data element_type="f16" offset="88234" shape="" size="2"/>
16788 <output>
16789 <port id="0" precision="FP16"/>
16790 </output>
16791 </layer>
16792 <layer id="1142" name="4037404119542" type="Const" version="opset1">
16793 <data element_type="f16" offset="88236" shape="" size="2"/>
16794 <output>
16795 <port id="0" precision="FP16"/>
16796 </output>
16797 </layer>
16798 <layer id="1143" name="2924292820574" type="Const" version="opset1">
16799 <data element_type="f16" offset="88238" shape="" size="2"/>
16800 <output>
16801 <port id="0" precision="FP16"/>
16802 </output>
16803 </layer>
16804 <layer id="1144" name="2925292921171" type="Const" version="opset1">
16805 <data element_type="f16" offset="88240" shape="" size="2"/>
16806 <output>
16807 <port id="0" precision="FP16"/>
16808 </output>
16809 </layer>
16810 <layer id="1145" name="2926293020640" type="Const" version="opset1">
16811 <data element_type="f16" offset="88238" shape="" size="2"/>
16812 <output>
16813 <port id="0" precision="FP16"/>
16814 </output>
16815 </layer>
16816 <layer id="1146" name="2927293122992" type="Const" version="opset1">
16817 <data element_type="f16" offset="88240" shape="" size="2"/>
16818 <output>
16819 <port id="0" precision="FP16"/>
16820 </output>
16821 </layer>
16822 <layer id="1147" name="5064506820307" type="Const" version="opset1">
16823 <data element_type="f16" offset="88242" shape="1,32,1,1" size="64"/>
16824 <output>
16825 <port id="0" precision="FP16">
16826 <dim>1</dim>
16827 <dim>32</dim>
16828 <dim>1</dim>
16829 <dim>1</dim>
16830 </port>
16831 </output>
16832 </layer>
16833 <layer id="1148" name="5065506919701" type="Const" version="opset1">
16834 <data element_type="f16" offset="88306" shape="1,32,1,1" size="64"/>
16835 <output>
16836 <port id="0" precision="FP16">
16837 <dim>1</dim>
16838 <dim>32</dim>
16839 <dim>1</dim>
16840 <dim>1</dim>
16841 </port>
16842 </output>
16843 </layer>
16844 <layer id="1149" name="5066507019914" type="Const" version="opset1">
16845 <data element_type="f16" offset="88242" shape="1,32,1,1" size="64"/>
16846 <output>
16847 <port id="0" precision="FP16">
16848 <dim>1</dim>
16849 <dim>32</dim>
16850 <dim>1</dim>
16851 <dim>1</dim>
16852 </port>
16853 </output>
16854 </layer>
16855 <layer id="1150" name="5067507122128" type="Const" version="opset1">
16856 <data element_type="f16" offset="88306" shape="1,32,1,1" size="64"/>
16857 <output>
16858 <port id="0" precision="FP16">
16859 <dim>1</dim>
16860 <dim>32</dim>
16861 <dim>1</dim>
16862 <dim>1</dim>
16863 </port>
16864 </output>
16865 </layer>
16866 <layer id="1151" name="bottleneck3_5/dim_red/bn/mean/Fused_Mul__copy110810248/quantized1405620034" type="Const" version="opset1">
16867 <data element_type="i8" offset="88370" shape="32,128,1,1" size="4096"/>
16868 <output>
16869 <port id="0" precision="I8">
16870 <dim>32</dim>
16871 <dim>128</dim>
16872 <dim>1</dim>
16873 <dim>1</dim>
16874 </port>
16875 </output>
16876 </layer>
16877 <layer id="1152" name="bottleneck3_5/dim_red/bn/mean/Fused_Mul__copy110810248/quantized/to_f16" type="Convert" version="opset1">
16878 <data destination_type="f16"/>
16879 <input>
16880 <port id="0">
16881 <dim>32</dim>
16882 <dim>128</dim>
16883 <dim>1</dim>
16884 <dim>1</dim>
16885 </port>
16886 </input>
16887 <output>
16888 <port id="1" precision="FP16">
16889 <dim>32</dim>
16890 <dim>128</dim>
16891 <dim>1</dim>
16892 <dim>1</dim>
16893 </port>
16894 </output>
16895 </layer>
16896 <layer id="1153" name="bottleneck3_5/dim_red/conv/fq_weights_1/zero_point1406921690" type="Const" version="opset1">
16897 <data element_type="f16" offset="92466" shape="32,1,1,1" size="64"/>
16898 <output>
16899 <port id="0" precision="FP16">
16900 <dim>32</dim>
16901 <dim>1</dim>
16902 <dim>1</dim>
16903 <dim>1</dim>
16904 </port>
16905 </output>
16906 </layer>
16907 <layer id="1154" name="bottleneck3_5/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
16908 <data auto_broadcast="numpy"/>
16909 <input>
16910 <port id="0">
16911 <dim>32</dim>
16912 <dim>128</dim>
16913 <dim>1</dim>
16914 <dim>1</dim>
16915 </port>
16916 <port id="1">
16917 <dim>32</dim>
16918 <dim>1</dim>
16919 <dim>1</dim>
16920 <dim>1</dim>
16921 </port>
16922 </input>
16923 <output>
16924 <port id="2" precision="FP16">
16925 <dim>32</dim>
16926 <dim>128</dim>
16927 <dim>1</dim>
16928 <dim>1</dim>
16929 </port>
16930 </output>
16931 </layer>
16932 <layer id="1155" name="bottleneck3_5/dim_red/conv/fq_weights_1/scale1406419581" type="Const" version="opset1">
16933 <data element_type="f16" offset="92530" shape="32,1,1,1" size="64"/>
16934 <output>
16935 <port id="0" precision="FP16">
16936 <dim>32</dim>
16937 <dim>1</dim>
16938 <dim>1</dim>
16939 <dim>1</dim>
16940 </port>
16941 </output>
16942 </layer>
16943 <layer id="1156" name="bottleneck3_5/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
16944 <data auto_broadcast="numpy"/>
16945 <input>
16946 <port id="0">
16947 <dim>32</dim>
16948 <dim>128</dim>
16949 <dim>1</dim>
16950 <dim>1</dim>
16951 </port>
16952 <port id="1">
16953 <dim>32</dim>
16954 <dim>1</dim>
16955 <dim>1</dim>
16956 <dim>1</dim>
16957 </port>
16958 </input>
16959 <output>
16960 <port id="2" precision="FP16">
16961 <dim>32</dim>
16962 <dim>128</dim>
16963 <dim>1</dim>
16964 <dim>1</dim>
16965 </port>
16966 </output>
16967 </layer>
16968 <layer id="1157" name="bottleneck3_5/dim_red/conv" type="Convolution" version="opset1">
16969 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
16970 <input>
16971 <port id="0">
16972 <dim>1</dim>
16973 <dim>128</dim>
16974 <dim>40</dim>
16975 <dim>68</dim>
16976 </port>
16977 <port id="1">
16978 <dim>32</dim>
16979 <dim>128</dim>
16980 <dim>1</dim>
16981 <dim>1</dim>
16982 </port>
16983 </input>
16984 <output>
16985 <port id="2" precision="FP16">
16986 <dim>1</dim>
16987 <dim>32</dim>
16988 <dim>40</dim>
16989 <dim>68</dim>
16990 </port>
16991 </output>
16992 </layer>
16993 <layer id="1158" name="data_add_2410524110111019524" type="Const" version="opset1">
16994 <data element_type="f16" offset="92594" shape="1,32,1,1" size="64"/>
16995 <output>
16996 <port id="0" precision="FP16">
16997 <dim>1</dim>
16998 <dim>32</dim>
16999 <dim>1</dim>
17000 <dim>1</dim>
17001 </port>
17002 </output>
17003 </layer>
17004 <layer id="1159" name="bottleneck3_5/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
17005 <data auto_broadcast="numpy"/>
17006 <input>
17007 <port id="0">
17008 <dim>1</dim>
17009 <dim>32</dim>
17010 <dim>40</dim>
17011 <dim>68</dim>
17012 </port>
17013 <port id="1">
17014 <dim>1</dim>
17015 <dim>32</dim>
17016 <dim>1</dim>
17017 <dim>1</dim>
17018 </port>
17019 </input>
17020 <output>
17021 <port id="2" names="bottleneck3_5/dim_red/conv" precision="FP16">
17022 <dim>1</dim>
17023 <dim>32</dim>
17024 <dim>40</dim>
17025 <dim>68</dim>
17026 </port>
17027 </output>
17028 </layer>
17029 <layer id="1160" name="bottleneck3_5/dim_red/fn/weights30788405831112" type="Const" version="opset1">
17030 <data element_type="f32" offset="1576" shape="1" size="4"/>
17031 <output>
17032 <port id="0" precision="FP32">
17033 <dim>1</dim>
17034 </port>
17035 </output>
17036 </layer>
17037 <layer id="1161" name="bottleneck3_5/dim_red/fn" type="PReLU" version="opset1">
17038 <input>
17039 <port id="0">
17040 <dim>1</dim>
17041 <dim>32</dim>
17042 <dim>40</dim>
17043 <dim>68</dim>
17044 </port>
17045 <port id="1">
17046 <dim>1</dim>
17047 </port>
17048 </input>
17049 <output>
17050 <port id="2" names="bottleneck3_5/dim_red/conv" precision="FP16">
17051 <dim>1</dim>
17052 <dim>32</dim>
17053 <dim>40</dim>
17054 <dim>68</dim>
17055 </port>
17056 </output>
17057 </layer>
17058 <layer id="1162" name="bottleneck3_5/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
17059 <data auto_broadcast="numpy" levels="256"/>
17060 <input>
17061 <port id="0">
17062 <dim>1</dim>
17063 <dim>32</dim>
17064 <dim>40</dim>
17065 <dim>68</dim>
17066 </port>
17067 <port id="1">
17068 <dim>1</dim>
17069 <dim>32</dim>
17070 <dim>1</dim>
17071 <dim>1</dim>
17072 </port>
17073 <port id="2">
17074 <dim>1</dim>
17075 <dim>32</dim>
17076 <dim>1</dim>
17077 <dim>1</dim>
17078 </port>
17079 <port id="3">
17080 <dim>1</dim>
17081 <dim>32</dim>
17082 <dim>1</dim>
17083 <dim>1</dim>
17084 </port>
17085 <port id="4">
17086 <dim>1</dim>
17087 <dim>32</dim>
17088 <dim>1</dim>
17089 <dim>1</dim>
17090 </port>
17091 </input>
17092 <output>
17093 <port id="5" precision="FP16">
17094 <dim>1</dim>
17095 <dim>32</dim>
17096 <dim>40</dim>
17097 <dim>68</dim>
17098 </port>
17099 </output>
17100 </layer>
17101 <layer id="1163" name="16919/value1692120664" type="Const" version="opset1">
17102 <data element_type="i64" offset="43778" shape="5" size="40"/>
17103 <output>
17104 <port id="0" precision="I64">
17105 <dim>5</dim>
17106 </port>
17107 </output>
17108 </layer>
17109 <layer id="1164" name="bottleneck3_5/inner/dw1/bn/mean/Fused_Mul__copy111410251/quantized1218421648" type="Const" version="opset1">
17110 <data element_type="i8" offset="92658" shape="32,1,3,3" size="288"/>
17111 <output>
17112 <port id="0" precision="I8">
17113 <dim>32</dim>
17114 <dim>1</dim>
17115 <dim>3</dim>
17116 <dim>3</dim>
17117 </port>
17118 </output>
17119 </layer>
17120 <layer id="1165" name="bottleneck3_5/inner/dw1/bn/mean/Fused_Mul__copy111410251/quantized/to_f16" type="Convert" version="opset1">
17121 <data destination_type="f16"/>
17122 <input>
17123 <port id="0">
17124 <dim>32</dim>
17125 <dim>1</dim>
17126 <dim>3</dim>
17127 <dim>3</dim>
17128 </port>
17129 </input>
17130 <output>
17131 <port id="1" precision="FP16">
17132 <dim>32</dim>
17133 <dim>1</dim>
17134 <dim>3</dim>
17135 <dim>3</dim>
17136 </port>
17137 </output>
17138 </layer>
17139 <layer id="1166" name="bottleneck3_5/inner/dw1/conv/fq_weights_1/zero_point1219720637" type="Const" version="opset1">
17140 <data element_type="f16" offset="92946" shape="32,1,1,1" size="64"/>
17141 <output>
17142 <port id="0" precision="FP16">
17143 <dim>32</dim>
17144 <dim>1</dim>
17145 <dim>1</dim>
17146 <dim>1</dim>
17147 </port>
17148 </output>
17149 </layer>
17150 <layer id="1167" name="bottleneck3_5/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
17151 <data auto_broadcast="numpy"/>
17152 <input>
17153 <port id="0">
17154 <dim>32</dim>
17155 <dim>1</dim>
17156 <dim>3</dim>
17157 <dim>3</dim>
17158 </port>
17159 <port id="1">
17160 <dim>32</dim>
17161 <dim>1</dim>
17162 <dim>1</dim>
17163 <dim>1</dim>
17164 </port>
17165 </input>
17166 <output>
17167 <port id="2" precision="FP16">
17168 <dim>32</dim>
17169 <dim>1</dim>
17170 <dim>3</dim>
17171 <dim>3</dim>
17172 </port>
17173 </output>
17174 </layer>
17175 <layer id="1168" name="bottleneck3_5/inner/dw1/conv/fq_weights_1/scale1219221819" type="Const" version="opset1">
17176 <data element_type="f16" offset="93010" shape="32,1,1,1" size="64"/>
17177 <output>
17178 <port id="0" precision="FP16">
17179 <dim>32</dim>
17180 <dim>1</dim>
17181 <dim>1</dim>
17182 <dim>1</dim>
17183 </port>
17184 </output>
17185 </layer>
17186 <layer id="1169" name="bottleneck3_5/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
17187 <data auto_broadcast="numpy"/>
17188 <input>
17189 <port id="0">
17190 <dim>32</dim>
17191 <dim>1</dim>
17192 <dim>3</dim>
17193 <dim>3</dim>
17194 </port>
17195 <port id="1">
17196 <dim>32</dim>
17197 <dim>1</dim>
17198 <dim>1</dim>
17199 <dim>1</dim>
17200 </port>
17201 </input>
17202 <output>
17203 <port id="2" precision="FP16">
17204 <dim>32</dim>
17205 <dim>1</dim>
17206 <dim>3</dim>
17207 <dim>3</dim>
17208 </port>
17209 </output>
17210 </layer>
17211 <layer id="1170" name="16919" type="Reshape" version="opset1">
17212 <data special_zero="true"/>
17213 <input>
17214 <port id="0">
17215 <dim>32</dim>
17216 <dim>1</dim>
17217 <dim>3</dim>
17218 <dim>3</dim>
17219 </port>
17220 <port id="1">
17221 <dim>5</dim>
17222 </port>
17223 </input>
17224 <output>
17225 <port id="2" precision="FP16">
17226 <dim>32</dim>
17227 <dim>1</dim>
17228 <dim>1</dim>
17229 <dim>3</dim>
17230 <dim>3</dim>
17231 </port>
17232 </output>
17233 </layer>
17234 <layer id="1171" name="bottleneck3_5/inner/dw1/conv" type="GroupConvolution" version="opset1">
17235 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
17236 <input>
17237 <port id="0">
17238 <dim>1</dim>
17239 <dim>32</dim>
17240 <dim>40</dim>
17241 <dim>68</dim>
17242 </port>
17243 <port id="1">
17244 <dim>32</dim>
17245 <dim>1</dim>
17246 <dim>1</dim>
17247 <dim>3</dim>
17248 <dim>3</dim>
17249 </port>
17250 </input>
17251 <output>
17252 <port id="2" precision="FP16">
17253 <dim>1</dim>
17254 <dim>32</dim>
17255 <dim>40</dim>
17256 <dim>68</dim>
17257 </port>
17258 </output>
17259 </layer>
17260 <layer id="1172" name="data_add_2411324118111621126" type="Const" version="opset1">
17261 <data element_type="f16" offset="93074" shape="1,32,1,1" size="64"/>
17262 <output>
17263 <port id="0" precision="FP16">
17264 <dim>1</dim>
17265 <dim>32</dim>
17266 <dim>1</dim>
17267 <dim>1</dim>
17268 </port>
17269 </output>
17270 </layer>
17271 <layer id="1173" name="bottleneck3_5/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
17272 <data auto_broadcast="numpy"/>
17273 <input>
17274 <port id="0">
17275 <dim>1</dim>
17276 <dim>32</dim>
17277 <dim>40</dim>
17278 <dim>68</dim>
17279 </port>
17280 <port id="1">
17281 <dim>1</dim>
17282 <dim>32</dim>
17283 <dim>1</dim>
17284 <dim>1</dim>
17285 </port>
17286 </input>
17287 <output>
17288 <port id="2" names="bottleneck3_5/inner/dw1/conv" precision="FP16">
17289 <dim>1</dim>
17290 <dim>32</dim>
17291 <dim>40</dim>
17292 <dim>68</dim>
17293 </port>
17294 </output>
17295 </layer>
17296 <layer id="1174" name="bottleneck3_5/inner/dw1/fn/weights30796403941118" type="Const" version="opset1">
17297 <data element_type="f32" offset="1576" shape="1" size="4"/>
17298 <output>
17299 <port id="0" precision="FP32">
17300 <dim>1</dim>
17301 </port>
17302 </output>
17303 </layer>
17304 <layer id="1175" name="bottleneck3_5/inner/dw1/fn" type="PReLU" version="opset1">
17305 <input>
17306 <port id="0">
17307 <dim>1</dim>
17308 <dim>32</dim>
17309 <dim>40</dim>
17310 <dim>68</dim>
17311 </port>
17312 <port id="1">
17313 <dim>1</dim>
17314 </port>
17315 </input>
17316 <output>
17317 <port id="2" names="bottleneck3_5/inner/dw1/conv" precision="FP16">
17318 <dim>1</dim>
17319 <dim>32</dim>
17320 <dim>40</dim>
17321 <dim>68</dim>
17322 </port>
17323 </output>
17324 </layer>
17325 <layer id="1176" name="bottleneck3_5/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
17326 <data auto_broadcast="numpy" levels="256"/>
17327 <input>
17328 <port id="0">
17329 <dim>1</dim>
17330 <dim>32</dim>
17331 <dim>40</dim>
17332 <dim>68</dim>
17333 </port>
17334 <port id="1"/>
17335 <port id="2"/>
17336 <port id="3"/>
17337 <port id="4"/>
17338 </input>
17339 <output>
17340 <port id="5" precision="FP16">
17341 <dim>1</dim>
17342 <dim>32</dim>
17343 <dim>40</dim>
17344 <dim>68</dim>
17345 </port>
17346 </output>
17347 </layer>
17348 <layer id="1177" name="bottleneck3_5/dim_inc/bn/mean/Fused_Mul__copy112010254/quantized1338420571" type="Const" version="opset1">
17349 <data element_type="i8" offset="93138" shape="128,32,1,1" size="4096"/>
17350 <output>
17351 <port id="0" precision="I8">
17352 <dim>128</dim>
17353 <dim>32</dim>
17354 <dim>1</dim>
17355 <dim>1</dim>
17356 </port>
17357 </output>
17358 </layer>
17359 <layer id="1178" name="bottleneck3_5/dim_inc/bn/mean/Fused_Mul__copy112010254/quantized/to_f16" type="Convert" version="opset1">
17360 <data destination_type="f16"/>
17361 <input>
17362 <port id="0">
17363 <dim>128</dim>
17364 <dim>32</dim>
17365 <dim>1</dim>
17366 <dim>1</dim>
17367 </port>
17368 </input>
17369 <output>
17370 <port id="1" precision="FP16">
17371 <dim>128</dim>
17372 <dim>32</dim>
17373 <dim>1</dim>
17374 <dim>1</dim>
17375 </port>
17376 </output>
17377 </layer>
17378 <layer id="1179" name="bottleneck3_5/dim_inc/conv/fq_weights_1/zero_point1339721069" type="Const" version="opset1">
17379 <data element_type="f16" offset="97234" shape="128,1,1,1" size="256"/>
17380 <output>
17381 <port id="0" precision="FP16">
17382 <dim>128</dim>
17383 <dim>1</dim>
17384 <dim>1</dim>
17385 <dim>1</dim>
17386 </port>
17387 </output>
17388 </layer>
17389 <layer id="1180" name="bottleneck3_5/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
17390 <data auto_broadcast="numpy"/>
17391 <input>
17392 <port id="0">
17393 <dim>128</dim>
17394 <dim>32</dim>
17395 <dim>1</dim>
17396 <dim>1</dim>
17397 </port>
17398 <port id="1">
17399 <dim>128</dim>
17400 <dim>1</dim>
17401 <dim>1</dim>
17402 <dim>1</dim>
17403 </port>
17404 </input>
17405 <output>
17406 <port id="2" precision="FP16">
17407 <dim>128</dim>
17408 <dim>32</dim>
17409 <dim>1</dim>
17410 <dim>1</dim>
17411 </port>
17412 </output>
17413 </layer>
17414 <layer id="1181" name="bottleneck3_5/dim_inc/conv/fq_weights_1/scale1339219731" type="Const" version="opset1">
17415 <data element_type="f16" offset="97490" shape="128,1,1,1" size="256"/>
17416 <output>
17417 <port id="0" precision="FP16">
17418 <dim>128</dim>
17419 <dim>1</dim>
17420 <dim>1</dim>
17421 <dim>1</dim>
17422 </port>
17423 </output>
17424 </layer>
17425 <layer id="1182" name="bottleneck3_5/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
17426 <data auto_broadcast="numpy"/>
17427 <input>
17428 <port id="0">
17429 <dim>128</dim>
17430 <dim>32</dim>
17431 <dim>1</dim>
17432 <dim>1</dim>
17433 </port>
17434 <port id="1">
17435 <dim>128</dim>
17436 <dim>1</dim>
17437 <dim>1</dim>
17438 <dim>1</dim>
17439 </port>
17440 </input>
17441 <output>
17442 <port id="2" precision="FP16">
17443 <dim>128</dim>
17444 <dim>32</dim>
17445 <dim>1</dim>
17446 <dim>1</dim>
17447 </port>
17448 </output>
17449 </layer>
17450 <layer id="1183" name="bottleneck3_5/dim_inc/conv" type="Convolution" version="opset1">
17451 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
17452 <input>
17453 <port id="0">
17454 <dim>1</dim>
17455 <dim>32</dim>
17456 <dim>40</dim>
17457 <dim>68</dim>
17458 </port>
17459 <port id="1">
17460 <dim>128</dim>
17461 <dim>32</dim>
17462 <dim>1</dim>
17463 <dim>1</dim>
17464 </port>
17465 </input>
17466 <output>
17467 <port id="2" precision="FP16">
17468 <dim>1</dim>
17469 <dim>128</dim>
17470 <dim>40</dim>
17471 <dim>68</dim>
17472 </port>
17473 </output>
17474 </layer>
17475 <layer id="1184" name="data_add_2412124126112222830" type="Const" version="opset1">
17476 <data element_type="f16" offset="97746" shape="1,128,1,1" size="256"/>
17477 <output>
17478 <port id="0" precision="FP16">
17479 <dim>1</dim>
17480 <dim>128</dim>
17481 <dim>1</dim>
17482 <dim>1</dim>
17483 </port>
17484 </output>
17485 </layer>
17486 <layer id="1185" name="bottleneck3_5/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
17487 <data auto_broadcast="numpy"/>
17488 <input>
17489 <port id="0">
17490 <dim>1</dim>
17491 <dim>128</dim>
17492 <dim>40</dim>
17493 <dim>68</dim>
17494 </port>
17495 <port id="1">
17496 <dim>1</dim>
17497 <dim>128</dim>
17498 <dim>1</dim>
17499 <dim>1</dim>
17500 </port>
17501 </input>
17502 <output>
17503 <port id="2" names="bottleneck3_5/dim_inc/conv" precision="FP16">
17504 <dim>1</dim>
17505 <dim>128</dim>
17506 <dim>40</dim>
17507 <dim>68</dim>
17508 </port>
17509 </output>
17510 </layer>
17511 <layer id="1186" name="bottleneck3_5/add/fq_input_1" type="FakeQuantize" version="opset1">
17512 <data auto_broadcast="numpy" levels="256"/>
17513 <input>
17514 <port id="0">
17515 <dim>1</dim>
17516 <dim>128</dim>
17517 <dim>40</dim>
17518 <dim>68</dim>
17519 </port>
17520 <port id="1"/>
17521 <port id="2"/>
17522 <port id="3"/>
17523 <port id="4"/>
17524 </input>
17525 <output>
17526 <port id="5" precision="FP16">
17527 <dim>1</dim>
17528 <dim>128</dim>
17529 <dim>40</dim>
17530 <dim>68</dim>
17531 </port>
17532 </output>
17533 </layer>
17534 <layer id="1187" name="bottleneck3_5/add" type="Add" version="opset1">
17535 <data auto_broadcast="numpy"/>
17536 <input>
17537 <port id="0">
17538 <dim>1</dim>
17539 <dim>128</dim>
17540 <dim>40</dim>
17541 <dim>68</dim>
17542 </port>
17543 <port id="1">
17544 <dim>1</dim>
17545 <dim>128</dim>
17546 <dim>40</dim>
17547 <dim>68</dim>
17548 </port>
17549 </input>
17550 <output>
17551 <port id="2" names="bottleneck3_5/add" precision="FP16">
17552 <dim>1</dim>
17553 <dim>128</dim>
17554 <dim>40</dim>
17555 <dim>68</dim>
17556 </port>
17557 </output>
17558 </layer>
17559 <layer id="1188" name="bottleneck3_5/fn/weights30828406371125" type="Const" version="opset1">
17560 <data element_type="f32" offset="1576" shape="1" size="4"/>
17561 <output>
17562 <port id="0" precision="FP32">
17563 <dim>1</dim>
17564 </port>
17565 </output>
17566 </layer>
17567 <layer id="1189" name="bottleneck3_5/fn" type="PReLU" version="opset1">
17568 <input>
17569 <port id="0">
17570 <dim>1</dim>
17571 <dim>128</dim>
17572 <dim>40</dim>
17573 <dim>68</dim>
17574 </port>
17575 <port id="1">
17576 <dim>1</dim>
17577 </port>
17578 </input>
17579 <output>
17580 <port id="2" names="bottleneck3_5/add" precision="FP16">
17581 <dim>1</dim>
17582 <dim>128</dim>
17583 <dim>40</dim>
17584 <dim>68</dim>
17585 </port>
17586 </output>
17587 </layer>
17588 <layer id="1190" name="bottleneck3_6/add/fq_input_0" type="FakeQuantize" version="opset1">
17589 <data auto_broadcast="numpy" levels="256"/>
17590 <input>
17591 <port id="0">
17592 <dim>1</dim>
17593 <dim>128</dim>
17594 <dim>40</dim>
17595 <dim>68</dim>
17596 </port>
17597 <port id="1"/>
17598 <port id="2"/>
17599 <port id="3"/>
17600 <port id="4"/>
17601 </input>
17602 <output>
17603 <port id="5" precision="FP16">
17604 <dim>1</dim>
17605 <dim>128</dim>
17606 <dim>40</dim>
17607 <dim>68</dim>
17608 </port>
17609 </output>
17610 </layer>
17611 <layer id="1191" name="3834383822431" type="Const" version="opset1">
17612 <data element_type="f16" offset="98002" shape="" size="2"/>
17613 <output>
17614 <port id="0" precision="FP16"/>
17615 </output>
17616 </layer>
17617 <layer id="1192" name="3835383921294" type="Const" version="opset1">
17618 <data element_type="f16" offset="98004" shape="" size="2"/>
17619 <output>
17620 <port id="0" precision="FP16"/>
17621 </output>
17622 </layer>
17623 <layer id="1193" name="3836384019779" type="Const" version="opset1">
17624 <data element_type="f16" offset="98002" shape="" size="2"/>
17625 <output>
17626 <port id="0" precision="FP16"/>
17627 </output>
17628 </layer>
17629 <layer id="1194" name="3837384121441" type="Const" version="opset1">
17630 <data element_type="f16" offset="98004" shape="" size="2"/>
17631 <output>
17632 <port id="0" precision="FP16"/>
17633 </output>
17634 </layer>
17635 <layer id="1195" name="3464346820745" type="Const" version="opset1">
17636 <data element_type="f16" offset="98006" shape="" size="2"/>
17637 <output>
17638 <port id="0" precision="FP16"/>
17639 </output>
17640 </layer>
17641 <layer id="1196" name="3465346922710" type="Const" version="opset1">
17642 <data element_type="f16" offset="98008" shape="" size="2"/>
17643 <output>
17644 <port id="0" precision="FP16"/>
17645 </output>
17646 </layer>
17647 <layer id="1197" name="3466347022125" type="Const" version="opset1">
17648 <data element_type="f16" offset="98006" shape="" size="2"/>
17649 <output>
17650 <port id="0" precision="FP16"/>
17651 </output>
17652 </layer>
17653 <layer id="1198" name="3467347119662" type="Const" version="opset1">
17654 <data element_type="f16" offset="98008" shape="" size="2"/>
17655 <output>
17656 <port id="0" precision="FP16"/>
17657 </output>
17658 </layer>
17659 <layer id="1199" name="3344334819866" type="Const" version="opset1">
17660 <data element_type="f16" offset="98010" shape="1,32,1,1" size="64"/>
17661 <output>
17662 <port id="0" precision="FP16">
17663 <dim>1</dim>
17664 <dim>32</dim>
17665 <dim>1</dim>
17666 <dim>1</dim>
17667 </port>
17668 </output>
17669 </layer>
17670 <layer id="1200" name="3345334922596" type="Const" version="opset1">
17671 <data element_type="f16" offset="98074" shape="1,32,1,1" size="64"/>
17672 <output>
17673 <port id="0" precision="FP16">
17674 <dim>1</dim>
17675 <dim>32</dim>
17676 <dim>1</dim>
17677 <dim>1</dim>
17678 </port>
17679 </output>
17680 </layer>
17681 <layer id="1201" name="3346335022317" type="Const" version="opset1">
17682 <data element_type="f16" offset="98010" shape="1,32,1,1" size="64"/>
17683 <output>
17684 <port id="0" precision="FP16">
17685 <dim>1</dim>
17686 <dim>32</dim>
17687 <dim>1</dim>
17688 <dim>1</dim>
17689 </port>
17690 </output>
17691 </layer>
17692 <layer id="1202" name="3347335119845" type="Const" version="opset1">
17693 <data element_type="f16" offset="98074" shape="1,32,1,1" size="64"/>
17694 <output>
17695 <port id="0" precision="FP16">
17696 <dim>1</dim>
17697 <dim>32</dim>
17698 <dim>1</dim>
17699 <dim>1</dim>
17700 </port>
17701 </output>
17702 </layer>
17703 <layer id="1203" name="bottleneck3_6/dim_red/bn/mean/Fused_Mul__copy112710257/quantized1295222950" type="Const" version="opset1">
17704 <data element_type="i8" offset="98138" shape="32,128,1,1" size="4096"/>
17705 <output>
17706 <port id="0" precision="I8">
17707 <dim>32</dim>
17708 <dim>128</dim>
17709 <dim>1</dim>
17710 <dim>1</dim>
17711 </port>
17712 </output>
17713 </layer>
17714 <layer id="1204" name="bottleneck3_6/dim_red/bn/mean/Fused_Mul__copy112710257/quantized/to_f16" type="Convert" version="opset1">
17715 <data destination_type="f16"/>
17716 <input>
17717 <port id="0">
17718 <dim>32</dim>
17719 <dim>128</dim>
17720 <dim>1</dim>
17721 <dim>1</dim>
17722 </port>
17723 </input>
17724 <output>
17725 <port id="1" precision="FP16">
17726 <dim>32</dim>
17727 <dim>128</dim>
17728 <dim>1</dim>
17729 <dim>1</dim>
17730 </port>
17731 </output>
17732 </layer>
17733 <layer id="1205" name="bottleneck3_6/dim_red/conv/fq_weights_1/zero_point1296520625" type="Const" version="opset1">
17734 <data element_type="f16" offset="102234" shape="32,1,1,1" size="64"/>
17735 <output>
17736 <port id="0" precision="FP16">
17737 <dim>32</dim>
17738 <dim>1</dim>
17739 <dim>1</dim>
17740 <dim>1</dim>
17741 </port>
17742 </output>
17743 </layer>
17744 <layer id="1206" name="bottleneck3_6/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
17745 <data auto_broadcast="numpy"/>
17746 <input>
17747 <port id="0">
17748 <dim>32</dim>
17749 <dim>128</dim>
17750 <dim>1</dim>
17751 <dim>1</dim>
17752 </port>
17753 <port id="1">
17754 <dim>32</dim>
17755 <dim>1</dim>
17756 <dim>1</dim>
17757 <dim>1</dim>
17758 </port>
17759 </input>
17760 <output>
17761 <port id="2" precision="FP16">
17762 <dim>32</dim>
17763 <dim>128</dim>
17764 <dim>1</dim>
17765 <dim>1</dim>
17766 </port>
17767 </output>
17768 </layer>
17769 <layer id="1207" name="bottleneck3_6/dim_red/conv/fq_weights_1/scale1296020553" type="Const" version="opset1">
17770 <data element_type="f16" offset="102298" shape="32,1,1,1" size="64"/>
17771 <output>
17772 <port id="0" precision="FP16">
17773 <dim>32</dim>
17774 <dim>1</dim>
17775 <dim>1</dim>
17776 <dim>1</dim>
17777 </port>
17778 </output>
17779 </layer>
17780 <layer id="1208" name="bottleneck3_6/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
17781 <data auto_broadcast="numpy"/>
17782 <input>
17783 <port id="0">
17784 <dim>32</dim>
17785 <dim>128</dim>
17786 <dim>1</dim>
17787 <dim>1</dim>
17788 </port>
17789 <port id="1">
17790 <dim>32</dim>
17791 <dim>1</dim>
17792 <dim>1</dim>
17793 <dim>1</dim>
17794 </port>
17795 </input>
17796 <output>
17797 <port id="2" precision="FP16">
17798 <dim>32</dim>
17799 <dim>128</dim>
17800 <dim>1</dim>
17801 <dim>1</dim>
17802 </port>
17803 </output>
17804 </layer>
17805 <layer id="1209" name="bottleneck3_6/dim_red/conv" type="Convolution" version="opset1">
17806 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
17807 <input>
17808 <port id="0">
17809 <dim>1</dim>
17810 <dim>128</dim>
17811 <dim>40</dim>
17812 <dim>68</dim>
17813 </port>
17814 <port id="1">
17815 <dim>32</dim>
17816 <dim>128</dim>
17817 <dim>1</dim>
17818 <dim>1</dim>
17819 </port>
17820 </input>
17821 <output>
17822 <port id="2" precision="FP16">
17823 <dim>1</dim>
17824 <dim>32</dim>
17825 <dim>40</dim>
17826 <dim>68</dim>
17827 </port>
17828 </output>
17829 </layer>
17830 <layer id="1210" name="data_add_2412924134112922899" type="Const" version="opset1">
17831 <data element_type="f16" offset="102362" shape="1,32,1,1" size="64"/>
17832 <output>
17833 <port id="0" precision="FP16">
17834 <dim>1</dim>
17835 <dim>32</dim>
17836 <dim>1</dim>
17837 <dim>1</dim>
17838 </port>
17839 </output>
17840 </layer>
17841 <layer id="1211" name="bottleneck3_6/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
17842 <data auto_broadcast="numpy"/>
17843 <input>
17844 <port id="0">
17845 <dim>1</dim>
17846 <dim>32</dim>
17847 <dim>40</dim>
17848 <dim>68</dim>
17849 </port>
17850 <port id="1">
17851 <dim>1</dim>
17852 <dim>32</dim>
17853 <dim>1</dim>
17854 <dim>1</dim>
17855 </port>
17856 </input>
17857 <output>
17858 <port id="2" names="bottleneck3_6/dim_red/conv" precision="FP16">
17859 <dim>1</dim>
17860 <dim>32</dim>
17861 <dim>40</dim>
17862 <dim>68</dim>
17863 </port>
17864 </output>
17865 </layer>
17866 <layer id="1212" name="bottleneck3_6/dim_red/fn/weights30772406221131" type="Const" version="opset1">
17867 <data element_type="f32" offset="1576" shape="1" size="4"/>
17868 <output>
17869 <port id="0" precision="FP32">
17870 <dim>1</dim>
17871 </port>
17872 </output>
17873 </layer>
17874 <layer id="1213" name="bottleneck3_6/dim_red/fn" type="PReLU" version="opset1">
17875 <input>
17876 <port id="0">
17877 <dim>1</dim>
17878 <dim>32</dim>
17879 <dim>40</dim>
17880 <dim>68</dim>
17881 </port>
17882 <port id="1">
17883 <dim>1</dim>
17884 </port>
17885 </input>
17886 <output>
17887 <port id="2" names="bottleneck3_6/dim_red/conv" precision="FP16">
17888 <dim>1</dim>
17889 <dim>32</dim>
17890 <dim>40</dim>
17891 <dim>68</dim>
17892 </port>
17893 </output>
17894 </layer>
17895 <layer id="1214" name="bottleneck3_6/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
17896 <data auto_broadcast="numpy" levels="256"/>
17897 <input>
17898 <port id="0">
17899 <dim>1</dim>
17900 <dim>32</dim>
17901 <dim>40</dim>
17902 <dim>68</dim>
17903 </port>
17904 <port id="1">
17905 <dim>1</dim>
17906 <dim>32</dim>
17907 <dim>1</dim>
17908 <dim>1</dim>
17909 </port>
17910 <port id="2">
17911 <dim>1</dim>
17912 <dim>32</dim>
17913 <dim>1</dim>
17914 <dim>1</dim>
17915 </port>
17916 <port id="3">
17917 <dim>1</dim>
17918 <dim>32</dim>
17919 <dim>1</dim>
17920 <dim>1</dim>
17921 </port>
17922 <port id="4">
17923 <dim>1</dim>
17924 <dim>32</dim>
17925 <dim>1</dim>
17926 <dim>1</dim>
17927 </port>
17928 </input>
17929 <output>
17930 <port id="5" precision="FP16">
17931 <dim>1</dim>
17932 <dim>32</dim>
17933 <dim>40</dim>
17934 <dim>68</dim>
17935 </port>
17936 </output>
17937 </layer>
17938 <layer id="1215" name="16831/value1683321768" type="Const" version="opset1">
17939 <data element_type="i64" offset="43778" shape="5" size="40"/>
17940 <output>
17941 <port id="0" precision="I64">
17942 <dim>5</dim>
17943 </port>
17944 </output>
17945 </layer>
17946 <layer id="1216" name="bottleneck3_6/inner/dw1/bn/mean/Fused_Mul__copy113310260/quantized1252022770" type="Const" version="opset1">
17947 <data element_type="i8" offset="102426" shape="32,1,3,3" size="288"/>
17948 <output>
17949 <port id="0" precision="I8">
17950 <dim>32</dim>
17951 <dim>1</dim>
17952 <dim>3</dim>
17953 <dim>3</dim>
17954 </port>
17955 </output>
17956 </layer>
17957 <layer id="1217" name="bottleneck3_6/inner/dw1/bn/mean/Fused_Mul__copy113310260/quantized/to_f16" type="Convert" version="opset1">
17958 <data destination_type="f16"/>
17959 <input>
17960 <port id="0">
17961 <dim>32</dim>
17962 <dim>1</dim>
17963 <dim>3</dim>
17964 <dim>3</dim>
17965 </port>
17966 </input>
17967 <output>
17968 <port id="1" precision="FP16">
17969 <dim>32</dim>
17970 <dim>1</dim>
17971 <dim>3</dim>
17972 <dim>3</dim>
17973 </port>
17974 </output>
17975 </layer>
17976 <layer id="1218" name="bottleneck3_6/inner/dw1/conv/fq_weights_1/zero_point1253322650" type="Const" version="opset1">
17977 <data element_type="f16" offset="102714" shape="32,1,1,1" size="64"/>
17978 <output>
17979 <port id="0" precision="FP16">
17980 <dim>32</dim>
17981 <dim>1</dim>
17982 <dim>1</dim>
17983 <dim>1</dim>
17984 </port>
17985 </output>
17986 </layer>
17987 <layer id="1219" name="bottleneck3_6/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
17988 <data auto_broadcast="numpy"/>
17989 <input>
17990 <port id="0">
17991 <dim>32</dim>
17992 <dim>1</dim>
17993 <dim>3</dim>
17994 <dim>3</dim>
17995 </port>
17996 <port id="1">
17997 <dim>32</dim>
17998 <dim>1</dim>
17999 <dim>1</dim>
18000 <dim>1</dim>
18001 </port>
18002 </input>
18003 <output>
18004 <port id="2" precision="FP16">
18005 <dim>32</dim>
18006 <dim>1</dim>
18007 <dim>3</dim>
18008 <dim>3</dim>
18009 </port>
18010 </output>
18011 </layer>
18012 <layer id="1220" name="bottleneck3_6/inner/dw1/conv/fq_weights_1/scale1252821573" type="Const" version="opset1">
18013 <data element_type="f16" offset="102778" shape="32,1,1,1" size="64"/>
18014 <output>
18015 <port id="0" precision="FP16">
18016 <dim>32</dim>
18017 <dim>1</dim>
18018 <dim>1</dim>
18019 <dim>1</dim>
18020 </port>
18021 </output>
18022 </layer>
18023 <layer id="1221" name="bottleneck3_6/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
18024 <data auto_broadcast="numpy"/>
18025 <input>
18026 <port id="0">
18027 <dim>32</dim>
18028 <dim>1</dim>
18029 <dim>3</dim>
18030 <dim>3</dim>
18031 </port>
18032 <port id="1">
18033 <dim>32</dim>
18034 <dim>1</dim>
18035 <dim>1</dim>
18036 <dim>1</dim>
18037 </port>
18038 </input>
18039 <output>
18040 <port id="2" precision="FP16">
18041 <dim>32</dim>
18042 <dim>1</dim>
18043 <dim>3</dim>
18044 <dim>3</dim>
18045 </port>
18046 </output>
18047 </layer>
18048 <layer id="1222" name="16831" type="Reshape" version="opset1">
18049 <data special_zero="true"/>
18050 <input>
18051 <port id="0">
18052 <dim>32</dim>
18053 <dim>1</dim>
18054 <dim>3</dim>
18055 <dim>3</dim>
18056 </port>
18057 <port id="1">
18058 <dim>5</dim>
18059 </port>
18060 </input>
18061 <output>
18062 <port id="2" precision="FP16">
18063 <dim>32</dim>
18064 <dim>1</dim>
18065 <dim>1</dim>
18066 <dim>3</dim>
18067 <dim>3</dim>
18068 </port>
18069 </output>
18070 </layer>
18071 <layer id="1223" name="bottleneck3_6/inner/dw1/conv" type="GroupConvolution" version="opset1">
18072 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
18073 <input>
18074 <port id="0">
18075 <dim>1</dim>
18076 <dim>32</dim>
18077 <dim>40</dim>
18078 <dim>68</dim>
18079 </port>
18080 <port id="1">
18081 <dim>32</dim>
18082 <dim>1</dim>
18083 <dim>1</dim>
18084 <dim>3</dim>
18085 <dim>3</dim>
18086 </port>
18087 </input>
18088 <output>
18089 <port id="2" precision="FP16">
18090 <dim>1</dim>
18091 <dim>32</dim>
18092 <dim>40</dim>
18093 <dim>68</dim>
18094 </port>
18095 </output>
18096 </layer>
18097 <layer id="1224" name="data_add_2413724142113520937" type="Const" version="opset1">
18098 <data element_type="f16" offset="102842" shape="1,32,1,1" size="64"/>
18099 <output>
18100 <port id="0" precision="FP16">
18101 <dim>1</dim>
18102 <dim>32</dim>
18103 <dim>1</dim>
18104 <dim>1</dim>
18105 </port>
18106 </output>
18107 </layer>
18108 <layer id="1225" name="bottleneck3_6/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
18109 <data auto_broadcast="numpy"/>
18110 <input>
18111 <port id="0">
18112 <dim>1</dim>
18113 <dim>32</dim>
18114 <dim>40</dim>
18115 <dim>68</dim>
18116 </port>
18117 <port id="1">
18118 <dim>1</dim>
18119 <dim>32</dim>
18120 <dim>1</dim>
18121 <dim>1</dim>
18122 </port>
18123 </input>
18124 <output>
18125 <port id="2" names="bottleneck3_6/inner/dw1/conv" precision="FP16">
18126 <dim>1</dim>
18127 <dim>32</dim>
18128 <dim>40</dim>
18129 <dim>68</dim>
18130 </port>
18131 </output>
18132 </layer>
18133 <layer id="1226" name="bottleneck3_6/inner/dw1/fn/weights31124397131137" type="Const" version="opset1">
18134 <data element_type="f32" offset="1576" shape="1" size="4"/>
18135 <output>
18136 <port id="0" precision="FP32">
18137 <dim>1</dim>
18138 </port>
18139 </output>
18140 </layer>
18141 <layer id="1227" name="bottleneck3_6/inner/dw1/fn" type="PReLU" version="opset1">
18142 <input>
18143 <port id="0">
18144 <dim>1</dim>
18145 <dim>32</dim>
18146 <dim>40</dim>
18147 <dim>68</dim>
18148 </port>
18149 <port id="1">
18150 <dim>1</dim>
18151 </port>
18152 </input>
18153 <output>
18154 <port id="2" names="bottleneck3_6/inner/dw1/conv" precision="FP16">
18155 <dim>1</dim>
18156 <dim>32</dim>
18157 <dim>40</dim>
18158 <dim>68</dim>
18159 </port>
18160 </output>
18161 </layer>
18162 <layer id="1228" name="bottleneck3_6/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
18163 <data auto_broadcast="numpy" levels="256"/>
18164 <input>
18165 <port id="0">
18166 <dim>1</dim>
18167 <dim>32</dim>
18168 <dim>40</dim>
18169 <dim>68</dim>
18170 </port>
18171 <port id="1"/>
18172 <port id="2"/>
18173 <port id="3"/>
18174 <port id="4"/>
18175 </input>
18176 <output>
18177 <port id="5" precision="FP16">
18178 <dim>1</dim>
18179 <dim>32</dim>
18180 <dim>40</dim>
18181 <dim>68</dim>
18182 </port>
18183 </output>
18184 </layer>
18185 <layer id="1229" name="bottleneck3_6/dim_inc/bn/mean/Fused_Mul__copy113910263/quantized1336021981" type="Const" version="opset1">
18186 <data element_type="i8" offset="102906" shape="128,32,1,1" size="4096"/>
18187 <output>
18188 <port id="0" precision="I8">
18189 <dim>128</dim>
18190 <dim>32</dim>
18191 <dim>1</dim>
18192 <dim>1</dim>
18193 </port>
18194 </output>
18195 </layer>
18196 <layer id="1230" name="bottleneck3_6/dim_inc/bn/mean/Fused_Mul__copy113910263/quantized/to_f16" type="Convert" version="opset1">
18197 <data destination_type="f16"/>
18198 <input>
18199 <port id="0">
18200 <dim>128</dim>
18201 <dim>32</dim>
18202 <dim>1</dim>
18203 <dim>1</dim>
18204 </port>
18205 </input>
18206 <output>
18207 <port id="1" precision="FP16">
18208 <dim>128</dim>
18209 <dim>32</dim>
18210 <dim>1</dim>
18211 <dim>1</dim>
18212 </port>
18213 </output>
18214 </layer>
18215 <layer id="1231" name="bottleneck3_6/dim_inc/conv/fq_weights_1/zero_point1337322548" type="Const" version="opset1">
18216 <data element_type="f16" offset="107002" shape="128,1,1,1" size="256"/>
18217 <output>
18218 <port id="0" precision="FP16">
18219 <dim>128</dim>
18220 <dim>1</dim>
18221 <dim>1</dim>
18222 <dim>1</dim>
18223 </port>
18224 </output>
18225 </layer>
18226 <layer id="1232" name="bottleneck3_6/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
18227 <data auto_broadcast="numpy"/>
18228 <input>
18229 <port id="0">
18230 <dim>128</dim>
18231 <dim>32</dim>
18232 <dim>1</dim>
18233 <dim>1</dim>
18234 </port>
18235 <port id="1">
18236 <dim>128</dim>
18237 <dim>1</dim>
18238 <dim>1</dim>
18239 <dim>1</dim>
18240 </port>
18241 </input>
18242 <output>
18243 <port id="2" precision="FP16">
18244 <dim>128</dim>
18245 <dim>32</dim>
18246 <dim>1</dim>
18247 <dim>1</dim>
18248 </port>
18249 </output>
18250 </layer>
18251 <layer id="1233" name="bottleneck3_6/dim_inc/conv/fq_weights_1/scale1336821696" type="Const" version="opset1">
18252 <data element_type="f16" offset="107258" shape="128,1,1,1" size="256"/>
18253 <output>
18254 <port id="0" precision="FP16">
18255 <dim>128</dim>
18256 <dim>1</dim>
18257 <dim>1</dim>
18258 <dim>1</dim>
18259 </port>
18260 </output>
18261 </layer>
18262 <layer id="1234" name="bottleneck3_6/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
18263 <data auto_broadcast="numpy"/>
18264 <input>
18265 <port id="0">
18266 <dim>128</dim>
18267 <dim>32</dim>
18268 <dim>1</dim>
18269 <dim>1</dim>
18270 </port>
18271 <port id="1">
18272 <dim>128</dim>
18273 <dim>1</dim>
18274 <dim>1</dim>
18275 <dim>1</dim>
18276 </port>
18277 </input>
18278 <output>
18279 <port id="2" precision="FP16">
18280 <dim>128</dim>
18281 <dim>32</dim>
18282 <dim>1</dim>
18283 <dim>1</dim>
18284 </port>
18285 </output>
18286 </layer>
18287 <layer id="1235" name="bottleneck3_6/dim_inc/conv" type="Convolution" version="opset1">
18288 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
18289 <input>
18290 <port id="0">
18291 <dim>1</dim>
18292 <dim>32</dim>
18293 <dim>40</dim>
18294 <dim>68</dim>
18295 </port>
18296 <port id="1">
18297 <dim>128</dim>
18298 <dim>32</dim>
18299 <dim>1</dim>
18300 <dim>1</dim>
18301 </port>
18302 </input>
18303 <output>
18304 <port id="2" precision="FP16">
18305 <dim>1</dim>
18306 <dim>128</dim>
18307 <dim>40</dim>
18308 <dim>68</dim>
18309 </port>
18310 </output>
18311 </layer>
18312 <layer id="1236" name="data_add_2414524150114120967" type="Const" version="opset1">
18313 <data element_type="f16" offset="107514" shape="1,128,1,1" size="256"/>
18314 <output>
18315 <port id="0" precision="FP16">
18316 <dim>1</dim>
18317 <dim>128</dim>
18318 <dim>1</dim>
18319 <dim>1</dim>
18320 </port>
18321 </output>
18322 </layer>
18323 <layer id="1237" name="bottleneck3_6/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
18324 <data auto_broadcast="numpy"/>
18325 <input>
18326 <port id="0">
18327 <dim>1</dim>
18328 <dim>128</dim>
18329 <dim>40</dim>
18330 <dim>68</dim>
18331 </port>
18332 <port id="1">
18333 <dim>1</dim>
18334 <dim>128</dim>
18335 <dim>1</dim>
18336 <dim>1</dim>
18337 </port>
18338 </input>
18339 <output>
18340 <port id="2" names="bottleneck3_6/dim_inc/conv" precision="FP16">
18341 <dim>1</dim>
18342 <dim>128</dim>
18343 <dim>40</dim>
18344 <dim>68</dim>
18345 </port>
18346 </output>
18347 </layer>
18348 <layer id="1238" name="bottleneck3_6/add/fq_input_1" type="FakeQuantize" version="opset1">
18349 <data auto_broadcast="numpy" levels="256"/>
18350 <input>
18351 <port id="0">
18352 <dim>1</dim>
18353 <dim>128</dim>
18354 <dim>40</dim>
18355 <dim>68</dim>
18356 </port>
18357 <port id="1"/>
18358 <port id="2"/>
18359 <port id="3"/>
18360 <port id="4"/>
18361 </input>
18362 <output>
18363 <port id="5" precision="FP16">
18364 <dim>1</dim>
18365 <dim>128</dim>
18366 <dim>40</dim>
18367 <dim>68</dim>
18368 </port>
18369 </output>
18370 </layer>
18371 <layer id="1239" name="bottleneck3_6/add" type="Add" version="opset1">
18372 <data auto_broadcast="numpy"/>
18373 <input>
18374 <port id="0">
18375 <dim>1</dim>
18376 <dim>128</dim>
18377 <dim>40</dim>
18378 <dim>68</dim>
18379 </port>
18380 <port id="1">
18381 <dim>1</dim>
18382 <dim>128</dim>
18383 <dim>40</dim>
18384 <dim>68</dim>
18385 </port>
18386 </input>
18387 <output>
18388 <port id="2" names="bottleneck3_6/add" precision="FP16">
18389 <dim>1</dim>
18390 <dim>128</dim>
18391 <dim>40</dim>
18392 <dim>68</dim>
18393 </port>
18394 </output>
18395 </layer>
18396 <layer id="1240" name="bottleneck3_6/fn/weights31032396591144" type="Const" version="opset1">
18397 <data element_type="f32" offset="1576" shape="1" size="4"/>
18398 <output>
18399 <port id="0" precision="FP32">
18400 <dim>1</dim>
18401 </port>
18402 </output>
18403 </layer>
18404 <layer id="1241" name="bottleneck3_6/fn" type="PReLU" version="opset1">
18405 <input>
18406 <port id="0">
18407 <dim>1</dim>
18408 <dim>128</dim>
18409 <dim>40</dim>
18410 <dim>68</dim>
18411 </port>
18412 <port id="1">
18413 <dim>1</dim>
18414 </port>
18415 </input>
18416 <output>
18417 <port id="2" names="bottleneck3_6/add" precision="FP16">
18418 <dim>1</dim>
18419 <dim>128</dim>
18420 <dim>40</dim>
18421 <dim>68</dim>
18422 </port>
18423 </output>
18424 </layer>
18425 <layer id="1242" name="bottleneck3_7/add/fq_input_0" type="FakeQuantize" version="opset1">
18426 <data auto_broadcast="numpy" levels="256"/>
18427 <input>
18428 <port id="0">
18429 <dim>1</dim>
18430 <dim>128</dim>
18431 <dim>40</dim>
18432 <dim>68</dim>
18433 </port>
18434 <port id="1"/>
18435 <port id="2"/>
18436 <port id="3"/>
18437 <port id="4"/>
18438 </input>
18439 <output>
18440 <port id="5" precision="FP16">
18441 <dim>1</dim>
18442 <dim>128</dim>
18443 <dim>40</dim>
18444 <dim>68</dim>
18445 </port>
18446 </output>
18447 </layer>
18448 <layer id="1243" name="4674467820403" type="Const" version="opset1">
18449 <data element_type="f16" offset="107770" shape="" size="2"/>
18450 <output>
18451 <port id="0" precision="FP16"/>
18452 </output>
18453 </layer>
18454 <layer id="1244" name="4675467921108" type="Const" version="opset1">
18455 <data element_type="f16" offset="107772" shape="" size="2"/>
18456 <output>
18457 <port id="0" precision="FP16"/>
18458 </output>
18459 </layer>
18460 <layer id="1245" name="4676468022062" type="Const" version="opset1">
18461 <data element_type="f16" offset="107770" shape="" size="2"/>
18462 <output>
18463 <port id="0" precision="FP16"/>
18464 </output>
18465 </layer>
18466 <layer id="1246" name="4677468119893" type="Const" version="opset1">
18467 <data element_type="f16" offset="107772" shape="" size="2"/>
18468 <output>
18469 <port id="0" precision="FP16"/>
18470 </output>
18471 </layer>
18472 <layer id="1247" name="4344434821423" type="Const" version="opset1">
18473 <data element_type="f16" offset="107774" shape="" size="2"/>
18474 <output>
18475 <port id="0" precision="FP16"/>
18476 </output>
18477 </layer>
18478 <layer id="1248" name="4345434919683" type="Const" version="opset1">
18479 <data element_type="f16" offset="107776" shape="" size="2"/>
18480 <output>
18481 <port id="0" precision="FP16"/>
18482 </output>
18483 </layer>
18484 <layer id="1249" name="4346435022674" type="Const" version="opset1">
18485 <data element_type="f16" offset="107774" shape="" size="2"/>
18486 <output>
18487 <port id="0" precision="FP16"/>
18488 </output>
18489 </layer>
18490 <layer id="1250" name="4347435121381" type="Const" version="opset1">
18491 <data element_type="f16" offset="107776" shape="" size="2"/>
18492 <output>
18493 <port id="0" precision="FP16"/>
18494 </output>
18495 </layer>
18496 <layer id="1251" name="4744474821639" type="Const" version="opset1">
18497 <data element_type="f16" offset="107778" shape="1,32,1,1" size="64"/>
18498 <output>
18499 <port id="0" precision="FP16">
18500 <dim>1</dim>
18501 <dim>32</dim>
18502 <dim>1</dim>
18503 <dim>1</dim>
18504 </port>
18505 </output>
18506 </layer>
18507 <layer id="1252" name="4745474921519" type="Const" version="opset1">
18508 <data element_type="f16" offset="107842" shape="1,32,1,1" size="64"/>
18509 <output>
18510 <port id="0" precision="FP16">
18511 <dim>1</dim>
18512 <dim>32</dim>
18513 <dim>1</dim>
18514 <dim>1</dim>
18515 </port>
18516 </output>
18517 </layer>
18518 <layer id="1253" name="4746475020751" type="Const" version="opset1">
18519 <data element_type="f16" offset="107778" shape="1,32,1,1" size="64"/>
18520 <output>
18521 <port id="0" precision="FP16">
18522 <dim>1</dim>
18523 <dim>32</dim>
18524 <dim>1</dim>
18525 <dim>1</dim>
18526 </port>
18527 </output>
18528 </layer>
18529 <layer id="1254" name="4747475120016" type="Const" version="opset1">
18530 <data element_type="f16" offset="107842" shape="1,32,1,1" size="64"/>
18531 <output>
18532 <port id="0" precision="FP16">
18533 <dim>1</dim>
18534 <dim>32</dim>
18535 <dim>1</dim>
18536 <dim>1</dim>
18537 </port>
18538 </output>
18539 </layer>
18540 <layer id="1255" name="bottleneck3_7/dim_red/bn/mean/Fused_Mul__copy114610266/quantized1254419824" type="Const" version="opset1">
18541 <data element_type="i8" offset="107906" shape="32,128,1,1" size="4096"/>
18542 <output>
18543 <port id="0" precision="I8">
18544 <dim>32</dim>
18545 <dim>128</dim>
18546 <dim>1</dim>
18547 <dim>1</dim>
18548 </port>
18549 </output>
18550 </layer>
18551 <layer id="1256" name="bottleneck3_7/dim_red/bn/mean/Fused_Mul__copy114610266/quantized/to_f16" type="Convert" version="opset1">
18552 <data destination_type="f16"/>
18553 <input>
18554 <port id="0">
18555 <dim>32</dim>
18556 <dim>128</dim>
18557 <dim>1</dim>
18558 <dim>1</dim>
18559 </port>
18560 </input>
18561 <output>
18562 <port id="1" precision="FP16">
18563 <dim>32</dim>
18564 <dim>128</dim>
18565 <dim>1</dim>
18566 <dim>1</dim>
18567 </port>
18568 </output>
18569 </layer>
18570 <layer id="1257" name="bottleneck3_7/dim_red/conv/fq_weights_1/zero_point1255722533" type="Const" version="opset1">
18571 <data element_type="f16" offset="112002" shape="32,1,1,1" size="64"/>
18572 <output>
18573 <port id="0" precision="FP16">
18574 <dim>32</dim>
18575 <dim>1</dim>
18576 <dim>1</dim>
18577 <dim>1</dim>
18578 </port>
18579 </output>
18580 </layer>
18581 <layer id="1258" name="bottleneck3_7/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
18582 <data auto_broadcast="numpy"/>
18583 <input>
18584 <port id="0">
18585 <dim>32</dim>
18586 <dim>128</dim>
18587 <dim>1</dim>
18588 <dim>1</dim>
18589 </port>
18590 <port id="1">
18591 <dim>32</dim>
18592 <dim>1</dim>
18593 <dim>1</dim>
18594 <dim>1</dim>
18595 </port>
18596 </input>
18597 <output>
18598 <port id="2" precision="FP16">
18599 <dim>32</dim>
18600 <dim>128</dim>
18601 <dim>1</dim>
18602 <dim>1</dim>
18603 </port>
18604 </output>
18605 </layer>
18606 <layer id="1259" name="bottleneck3_7/dim_red/conv/fq_weights_1/scale1255220076" type="Const" version="opset1">
18607 <data element_type="f16" offset="112066" shape="32,1,1,1" size="64"/>
18608 <output>
18609 <port id="0" precision="FP16">
18610 <dim>32</dim>
18611 <dim>1</dim>
18612 <dim>1</dim>
18613 <dim>1</dim>
18614 </port>
18615 </output>
18616 </layer>
18617 <layer id="1260" name="bottleneck3_7/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
18618 <data auto_broadcast="numpy"/>
18619 <input>
18620 <port id="0">
18621 <dim>32</dim>
18622 <dim>128</dim>
18623 <dim>1</dim>
18624 <dim>1</dim>
18625 </port>
18626 <port id="1">
18627 <dim>32</dim>
18628 <dim>1</dim>
18629 <dim>1</dim>
18630 <dim>1</dim>
18631 </port>
18632 </input>
18633 <output>
18634 <port id="2" precision="FP16">
18635 <dim>32</dim>
18636 <dim>128</dim>
18637 <dim>1</dim>
18638 <dim>1</dim>
18639 </port>
18640 </output>
18641 </layer>
18642 <layer id="1261" name="bottleneck3_7/dim_red/conv" type="Convolution" version="opset1">
18643 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
18644 <input>
18645 <port id="0">
18646 <dim>1</dim>
18647 <dim>128</dim>
18648 <dim>40</dim>
18649 <dim>68</dim>
18650 </port>
18651 <port id="1">
18652 <dim>32</dim>
18653 <dim>128</dim>
18654 <dim>1</dim>
18655 <dim>1</dim>
18656 </port>
18657 </input>
18658 <output>
18659 <port id="2" precision="FP16">
18660 <dim>1</dim>
18661 <dim>32</dim>
18662 <dim>40</dim>
18663 <dim>68</dim>
18664 </port>
18665 </output>
18666 </layer>
18667 <layer id="1262" name="data_add_2415324158114819506" type="Const" version="opset1">
18668 <data element_type="f16" offset="112130" shape="1,32,1,1" size="64"/>
18669 <output>
18670 <port id="0" precision="FP16">
18671 <dim>1</dim>
18672 <dim>32</dim>
18673 <dim>1</dim>
18674 <dim>1</dim>
18675 </port>
18676 </output>
18677 </layer>
18678 <layer id="1263" name="bottleneck3_7/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
18679 <data auto_broadcast="numpy"/>
18680 <input>
18681 <port id="0">
18682 <dim>1</dim>
18683 <dim>32</dim>
18684 <dim>40</dim>
18685 <dim>68</dim>
18686 </port>
18687 <port id="1">
18688 <dim>1</dim>
18689 <dim>32</dim>
18690 <dim>1</dim>
18691 <dim>1</dim>
18692 </port>
18693 </input>
18694 <output>
18695 <port id="2" names="bottleneck3_7/dim_red/conv" precision="FP16">
18696 <dim>1</dim>
18697 <dim>32</dim>
18698 <dim>40</dim>
18699 <dim>68</dim>
18700 </port>
18701 </output>
18702 </layer>
18703 <layer id="1264" name="bottleneck3_7/dim_red/fn/weights30988406521150" type="Const" version="opset1">
18704 <data element_type="f32" offset="1576" shape="1" size="4"/>
18705 <output>
18706 <port id="0" precision="FP32">
18707 <dim>1</dim>
18708 </port>
18709 </output>
18710 </layer>
18711 <layer id="1265" name="bottleneck3_7/dim_red/fn" type="PReLU" version="opset1">
18712 <input>
18713 <port id="0">
18714 <dim>1</dim>
18715 <dim>32</dim>
18716 <dim>40</dim>
18717 <dim>68</dim>
18718 </port>
18719 <port id="1">
18720 <dim>1</dim>
18721 </port>
18722 </input>
18723 <output>
18724 <port id="2" names="bottleneck3_7/dim_red/conv" precision="FP16">
18725 <dim>1</dim>
18726 <dim>32</dim>
18727 <dim>40</dim>
18728 <dim>68</dim>
18729 </port>
18730 </output>
18731 </layer>
18732 <layer id="1266" name="bottleneck3_7/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
18733 <data auto_broadcast="numpy" levels="256"/>
18734 <input>
18735 <port id="0">
18736 <dim>1</dim>
18737 <dim>32</dim>
18738 <dim>40</dim>
18739 <dim>68</dim>
18740 </port>
18741 <port id="1">
18742 <dim>1</dim>
18743 <dim>32</dim>
18744 <dim>1</dim>
18745 <dim>1</dim>
18746 </port>
18747 <port id="2">
18748 <dim>1</dim>
18749 <dim>32</dim>
18750 <dim>1</dim>
18751 <dim>1</dim>
18752 </port>
18753 <port id="3">
18754 <dim>1</dim>
18755 <dim>32</dim>
18756 <dim>1</dim>
18757 <dim>1</dim>
18758 </port>
18759 <port id="4">
18760 <dim>1</dim>
18761 <dim>32</dim>
18762 <dim>1</dim>
18763 <dim>1</dim>
18764 </port>
18765 </input>
18766 <output>
18767 <port id="5" precision="FP16">
18768 <dim>1</dim>
18769 <dim>32</dim>
18770 <dim>40</dim>
18771 <dim>68</dim>
18772 </port>
18773 </output>
18774 </layer>
18775 <layer id="1267" name="16899/value1690122560" type="Const" version="opset1">
18776 <data element_type="i64" offset="43778" shape="5" size="40"/>
18777 <output>
18778 <port id="0" precision="I64">
18779 <dim>5</dim>
18780 </port>
18781 </output>
18782 </layer>
18783 <layer id="1268" name="bottleneck3_7/inner/dw1/bn/mean/Fused_Mul__copy115210269/quantized1148821078" type="Const" version="opset1">
18784 <data element_type="i8" offset="112194" shape="32,1,3,3" size="288"/>
18785 <output>
18786 <port id="0" precision="I8">
18787 <dim>32</dim>
18788 <dim>1</dim>
18789 <dim>3</dim>
18790 <dim>3</dim>
18791 </port>
18792 </output>
18793 </layer>
18794 <layer id="1269" name="bottleneck3_7/inner/dw1/bn/mean/Fused_Mul__copy115210269/quantized/to_f16" type="Convert" version="opset1">
18795 <data destination_type="f16"/>
18796 <input>
18797 <port id="0">
18798 <dim>32</dim>
18799 <dim>1</dim>
18800 <dim>3</dim>
18801 <dim>3</dim>
18802 </port>
18803 </input>
18804 <output>
18805 <port id="1" precision="FP16">
18806 <dim>32</dim>
18807 <dim>1</dim>
18808 <dim>3</dim>
18809 <dim>3</dim>
18810 </port>
18811 </output>
18812 </layer>
18813 <layer id="1270" name="bottleneck3_7/inner/dw1/conv/fq_weights_1/zero_point1150120268" type="Const" version="opset1">
18814 <data element_type="f16" offset="112482" shape="32,1,1,1" size="64"/>
18815 <output>
18816 <port id="0" precision="FP16">
18817 <dim>32</dim>
18818 <dim>1</dim>
18819 <dim>1</dim>
18820 <dim>1</dim>
18821 </port>
18822 </output>
18823 </layer>
18824 <layer id="1271" name="bottleneck3_7/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
18825 <data auto_broadcast="numpy"/>
18826 <input>
18827 <port id="0">
18828 <dim>32</dim>
18829 <dim>1</dim>
18830 <dim>3</dim>
18831 <dim>3</dim>
18832 </port>
18833 <port id="1">
18834 <dim>32</dim>
18835 <dim>1</dim>
18836 <dim>1</dim>
18837 <dim>1</dim>
18838 </port>
18839 </input>
18840 <output>
18841 <port id="2" precision="FP16">
18842 <dim>32</dim>
18843 <dim>1</dim>
18844 <dim>3</dim>
18845 <dim>3</dim>
18846 </port>
18847 </output>
18848 </layer>
18849 <layer id="1272" name="bottleneck3_7/inner/dw1/conv/fq_weights_1/scale1149622107" type="Const" version="opset1">
18850 <data element_type="f16" offset="112546" shape="32,1,1,1" size="64"/>
18851 <output>
18852 <port id="0" precision="FP16">
18853 <dim>32</dim>
18854 <dim>1</dim>
18855 <dim>1</dim>
18856 <dim>1</dim>
18857 </port>
18858 </output>
18859 </layer>
18860 <layer id="1273" name="bottleneck3_7/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
18861 <data auto_broadcast="numpy"/>
18862 <input>
18863 <port id="0">
18864 <dim>32</dim>
18865 <dim>1</dim>
18866 <dim>3</dim>
18867 <dim>3</dim>
18868 </port>
18869 <port id="1">
18870 <dim>32</dim>
18871 <dim>1</dim>
18872 <dim>1</dim>
18873 <dim>1</dim>
18874 </port>
18875 </input>
18876 <output>
18877 <port id="2" precision="FP16">
18878 <dim>32</dim>
18879 <dim>1</dim>
18880 <dim>3</dim>
18881 <dim>3</dim>
18882 </port>
18883 </output>
18884 </layer>
18885 <layer id="1274" name="16899" type="Reshape" version="opset1">
18886 <data special_zero="true"/>
18887 <input>
18888 <port id="0">
18889 <dim>32</dim>
18890 <dim>1</dim>
18891 <dim>3</dim>
18892 <dim>3</dim>
18893 </port>
18894 <port id="1">
18895 <dim>5</dim>
18896 </port>
18897 </input>
18898 <output>
18899 <port id="2" precision="FP16">
18900 <dim>32</dim>
18901 <dim>1</dim>
18902 <dim>1</dim>
18903 <dim>3</dim>
18904 <dim>3</dim>
18905 </port>
18906 </output>
18907 </layer>
18908 <layer id="1275" name="bottleneck3_7/inner/dw1/conv" type="GroupConvolution" version="opset1">
18909 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
18910 <input>
18911 <port id="0">
18912 <dim>1</dim>
18913 <dim>32</dim>
18914 <dim>40</dim>
18915 <dim>68</dim>
18916 </port>
18917 <port id="1">
18918 <dim>32</dim>
18919 <dim>1</dim>
18920 <dim>1</dim>
18921 <dim>3</dim>
18922 <dim>3</dim>
18923 </port>
18924 </input>
18925 <output>
18926 <port id="2" precision="FP16">
18927 <dim>1</dim>
18928 <dim>32</dim>
18929 <dim>40</dim>
18930 <dim>68</dim>
18931 </port>
18932 </output>
18933 </layer>
18934 <layer id="1276" name="data_add_2416124166115421378" type="Const" version="opset1">
18935 <data element_type="f16" offset="112610" shape="1,32,1,1" size="64"/>
18936 <output>
18937 <port id="0" precision="FP16">
18938 <dim>1</dim>
18939 <dim>32</dim>
18940 <dim>1</dim>
18941 <dim>1</dim>
18942 </port>
18943 </output>
18944 </layer>
18945 <layer id="1277" name="bottleneck3_7/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
18946 <data auto_broadcast="numpy"/>
18947 <input>
18948 <port id="0">
18949 <dim>1</dim>
18950 <dim>32</dim>
18951 <dim>40</dim>
18952 <dim>68</dim>
18953 </port>
18954 <port id="1">
18955 <dim>1</dim>
18956 <dim>32</dim>
18957 <dim>1</dim>
18958 <dim>1</dim>
18959 </port>
18960 </input>
18961 <output>
18962 <port id="2" names="bottleneck3_7/inner/dw1/conv" precision="FP16">
18963 <dim>1</dim>
18964 <dim>32</dim>
18965 <dim>40</dim>
18966 <dim>68</dim>
18967 </port>
18968 </output>
18969 </layer>
18970 <layer id="1278" name="bottleneck3_7/inner/dw1/fn/weights31064397071156" type="Const" version="opset1">
18971 <data element_type="f32" offset="1576" shape="1" size="4"/>
18972 <output>
18973 <port id="0" precision="FP32">
18974 <dim>1</dim>
18975 </port>
18976 </output>
18977 </layer>
18978 <layer id="1279" name="bottleneck3_7/inner/dw1/fn" type="PReLU" version="opset1">
18979 <input>
18980 <port id="0">
18981 <dim>1</dim>
18982 <dim>32</dim>
18983 <dim>40</dim>
18984 <dim>68</dim>
18985 </port>
18986 <port id="1">
18987 <dim>1</dim>
18988 </port>
18989 </input>
18990 <output>
18991 <port id="2" names="bottleneck3_7/inner/dw1/conv" precision="FP16">
18992 <dim>1</dim>
18993 <dim>32</dim>
18994 <dim>40</dim>
18995 <dim>68</dim>
18996 </port>
18997 </output>
18998 </layer>
18999 <layer id="1280" name="bottleneck3_7/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
19000 <data auto_broadcast="numpy" levels="256"/>
19001 <input>
19002 <port id="0">
19003 <dim>1</dim>
19004 <dim>32</dim>
19005 <dim>40</dim>
19006 <dim>68</dim>
19007 </port>
19008 <port id="1"/>
19009 <port id="2"/>
19010 <port id="3"/>
19011 <port id="4"/>
19012 </input>
19013 <output>
19014 <port id="5" precision="FP16">
19015 <dim>1</dim>
19016 <dim>32</dim>
19017 <dim>40</dim>
19018 <dim>68</dim>
19019 </port>
19020 </output>
19021 </layer>
19022 <layer id="1281" name="bottleneck3_7/dim_inc/bn/mean/Fused_Mul__copy115810272/quantized1240021735" type="Const" version="opset1">
19023 <data element_type="i8" offset="112674" shape="128,32,1,1" size="4096"/>
19024 <output>
19025 <port id="0" precision="I8">
19026 <dim>128</dim>
19027 <dim>32</dim>
19028 <dim>1</dim>
19029 <dim>1</dim>
19030 </port>
19031 </output>
19032 </layer>
19033 <layer id="1282" name="bottleneck3_7/dim_inc/bn/mean/Fused_Mul__copy115810272/quantized/to_f16" type="Convert" version="opset1">
19034 <data destination_type="f16"/>
19035 <input>
19036 <port id="0">
19037 <dim>128</dim>
19038 <dim>32</dim>
19039 <dim>1</dim>
19040 <dim>1</dim>
19041 </port>
19042 </input>
19043 <output>
19044 <port id="1" precision="FP16">
19045 <dim>128</dim>
19046 <dim>32</dim>
19047 <dim>1</dim>
19048 <dim>1</dim>
19049 </port>
19050 </output>
19051 </layer>
19052 <layer id="1283" name="bottleneck3_7/dim_inc/conv/fq_weights_1/zero_point1241322959" type="Const" version="opset1">
19053 <data element_type="f16" offset="116770" shape="128,1,1,1" size="256"/>
19054 <output>
19055 <port id="0" precision="FP16">
19056 <dim>128</dim>
19057 <dim>1</dim>
19058 <dim>1</dim>
19059 <dim>1</dim>
19060 </port>
19061 </output>
19062 </layer>
19063 <layer id="1284" name="bottleneck3_7/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
19064 <data auto_broadcast="numpy"/>
19065 <input>
19066 <port id="0">
19067 <dim>128</dim>
19068 <dim>32</dim>
19069 <dim>1</dim>
19070 <dim>1</dim>
19071 </port>
19072 <port id="1">
19073 <dim>128</dim>
19074 <dim>1</dim>
19075 <dim>1</dim>
19076 <dim>1</dim>
19077 </port>
19078 </input>
19079 <output>
19080 <port id="2" precision="FP16">
19081 <dim>128</dim>
19082 <dim>32</dim>
19083 <dim>1</dim>
19084 <dim>1</dim>
19085 </port>
19086 </output>
19087 </layer>
19088 <layer id="1285" name="bottleneck3_7/dim_inc/conv/fq_weights_1/scale1240820940" type="Const" version="opset1">
19089 <data element_type="f16" offset="117026" shape="128,1,1,1" size="256"/>
19090 <output>
19091 <port id="0" precision="FP16">
19092 <dim>128</dim>
19093 <dim>1</dim>
19094 <dim>1</dim>
19095 <dim>1</dim>
19096 </port>
19097 </output>
19098 </layer>
19099 <layer id="1286" name="bottleneck3_7/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
19100 <data auto_broadcast="numpy"/>
19101 <input>
19102 <port id="0">
19103 <dim>128</dim>
19104 <dim>32</dim>
19105 <dim>1</dim>
19106 <dim>1</dim>
19107 </port>
19108 <port id="1">
19109 <dim>128</dim>
19110 <dim>1</dim>
19111 <dim>1</dim>
19112 <dim>1</dim>
19113 </port>
19114 </input>
19115 <output>
19116 <port id="2" precision="FP16">
19117 <dim>128</dim>
19118 <dim>32</dim>
19119 <dim>1</dim>
19120 <dim>1</dim>
19121 </port>
19122 </output>
19123 </layer>
19124 <layer id="1287" name="bottleneck3_7/dim_inc/conv" type="Convolution" version="opset1">
19125 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
19126 <input>
19127 <port id="0">
19128 <dim>1</dim>
19129 <dim>32</dim>
19130 <dim>40</dim>
19131 <dim>68</dim>
19132 </port>
19133 <port id="1">
19134 <dim>128</dim>
19135 <dim>32</dim>
19136 <dim>1</dim>
19137 <dim>1</dim>
19138 </port>
19139 </input>
19140 <output>
19141 <port id="2" precision="FP16">
19142 <dim>1</dim>
19143 <dim>128</dim>
19144 <dim>40</dim>
19145 <dim>68</dim>
19146 </port>
19147 </output>
19148 </layer>
19149 <layer id="1288" name="data_add_2416924174116020970" type="Const" version="opset1">
19150 <data element_type="f16" offset="117282" shape="1,128,1,1" size="256"/>
19151 <output>
19152 <port id="0" precision="FP16">
19153 <dim>1</dim>
19154 <dim>128</dim>
19155 <dim>1</dim>
19156 <dim>1</dim>
19157 </port>
19158 </output>
19159 </layer>
19160 <layer id="1289" name="bottleneck3_7/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
19161 <data auto_broadcast="numpy"/>
19162 <input>
19163 <port id="0">
19164 <dim>1</dim>
19165 <dim>128</dim>
19166 <dim>40</dim>
19167 <dim>68</dim>
19168 </port>
19169 <port id="1">
19170 <dim>1</dim>
19171 <dim>128</dim>
19172 <dim>1</dim>
19173 <dim>1</dim>
19174 </port>
19175 </input>
19176 <output>
19177 <port id="2" names="bottleneck3_7/dim_inc/conv" precision="FP16">
19178 <dim>1</dim>
19179 <dim>128</dim>
19180 <dim>40</dim>
19181 <dim>68</dim>
19182 </port>
19183 </output>
19184 </layer>
19185 <layer id="1290" name="bottleneck3_7/add/fq_input_1" type="FakeQuantize" version="opset1">
19186 <data auto_broadcast="numpy" levels="256"/>
19187 <input>
19188 <port id="0">
19189 <dim>1</dim>
19190 <dim>128</dim>
19191 <dim>40</dim>
19192 <dim>68</dim>
19193 </port>
19194 <port id="1"/>
19195 <port id="2"/>
19196 <port id="3"/>
19197 <port id="4"/>
19198 </input>
19199 <output>
19200 <port id="5" precision="FP16">
19201 <dim>1</dim>
19202 <dim>128</dim>
19203 <dim>40</dim>
19204 <dim>68</dim>
19205 </port>
19206 </output>
19207 </layer>
19208 <layer id="1291" name="bottleneck3_7/add" type="Add" version="opset1">
19209 <data auto_broadcast="numpy"/>
19210 <input>
19211 <port id="0">
19212 <dim>1</dim>
19213 <dim>128</dim>
19214 <dim>40</dim>
19215 <dim>68</dim>
19216 </port>
19217 <port id="1">
19218 <dim>1</dim>
19219 <dim>128</dim>
19220 <dim>40</dim>
19221 <dim>68</dim>
19222 </port>
19223 </input>
19224 <output>
19225 <port id="2" names="bottleneck3_7/add" precision="FP16">
19226 <dim>1</dim>
19227 <dim>128</dim>
19228 <dim>40</dim>
19229 <dim>68</dim>
19230 </port>
19231 </output>
19232 </layer>
19233 <layer id="1292" name="bottleneck3_7/fn/weights30912398481163" type="Const" version="opset1">
19234 <data element_type="f32" offset="1576" shape="1" size="4"/>
19235 <output>
19236 <port id="0" precision="FP32">
19237 <dim>1</dim>
19238 </port>
19239 </output>
19240 </layer>
19241 <layer id="1293" name="bottleneck3_7/fn" type="PReLU" version="opset1">
19242 <input>
19243 <port id="0">
19244 <dim>1</dim>
19245 <dim>128</dim>
19246 <dim>40</dim>
19247 <dim>68</dim>
19248 </port>
19249 <port id="1">
19250 <dim>1</dim>
19251 </port>
19252 </input>
19253 <output>
19254 <port id="2" names="bottleneck3_7/add" precision="FP16">
19255 <dim>1</dim>
19256 <dim>128</dim>
19257 <dim>40</dim>
19258 <dim>68</dim>
19259 </port>
19260 </output>
19261 </layer>
19262 <layer id="1294" name="bottleneck3_8/add/fq_input_0" type="FakeQuantize" version="opset1">
19263 <data auto_broadcast="numpy" levels="256"/>
19264 <input>
19265 <port id="0">
19266 <dim>1</dim>
19267 <dim>128</dim>
19268 <dim>40</dim>
19269 <dim>68</dim>
19270 </port>
19271 <port id="1"/>
19272 <port id="2"/>
19273 <port id="3"/>
19274 <port id="4"/>
19275 </input>
19276 <output>
19277 <port id="5" precision="FP16">
19278 <dim>1</dim>
19279 <dim>128</dim>
19280 <dim>40</dim>
19281 <dim>68</dim>
19282 </port>
19283 </output>
19284 </layer>
19285 <layer id="1295" name="4794479819713" type="Const" version="opset1">
19286 <data element_type="f16" offset="117538" shape="" size="2"/>
19287 <output>
19288 <port id="0" precision="FP16"/>
19289 </output>
19290 </layer>
19291 <layer id="1296" name="4795479920007" type="Const" version="opset1">
19292 <data element_type="f16" offset="117540" shape="" size="2"/>
19293 <output>
19294 <port id="0" precision="FP16"/>
19295 </output>
19296 </layer>
19297 <layer id="1297" name="4796480019725" type="Const" version="opset1">
19298 <data element_type="f16" offset="117538" shape="" size="2"/>
19299 <output>
19300 <port id="0" precision="FP16"/>
19301 </output>
19302 </layer>
19303 <layer id="1298" name="4797480122722" type="Const" version="opset1">
19304 <data element_type="f16" offset="117540" shape="" size="2"/>
19305 <output>
19306 <port id="0" precision="FP16"/>
19307 </output>
19308 </layer>
19309 <layer id="1299" name="3504350821351" type="Const" version="opset1">
19310 <data element_type="f16" offset="117542" shape="" size="2"/>
19311 <output>
19312 <port id="0" precision="FP16"/>
19313 </output>
19314 </layer>
19315 <layer id="1300" name="3505350919455" type="Const" version="opset1">
19316 <data element_type="f16" offset="117544" shape="" size="2"/>
19317 <output>
19318 <port id="0" precision="FP16"/>
19319 </output>
19320 </layer>
19321 <layer id="1301" name="3506351020973" type="Const" version="opset1">
19322 <data element_type="f16" offset="117542" shape="" size="2"/>
19323 <output>
19324 <port id="0" precision="FP16"/>
19325 </output>
19326 </layer>
19327 <layer id="1302" name="3507351122941" type="Const" version="opset1">
19328 <data element_type="f16" offset="117544" shape="" size="2"/>
19329 <output>
19330 <port id="0" precision="FP16"/>
19331 </output>
19332 </layer>
19333 <layer id="1303" name="4844484822203" type="Const" version="opset1">
19334 <data element_type="f16" offset="117546" shape="1,32,1,1" size="64"/>
19335 <output>
19336 <port id="0" precision="FP16">
19337 <dim>1</dim>
19338 <dim>32</dim>
19339 <dim>1</dim>
19340 <dim>1</dim>
19341 </port>
19342 </output>
19343 </layer>
19344 <layer id="1304" name="4845484920283" type="Const" version="opset1">
19345 <data element_type="f16" offset="117610" shape="1,32,1,1" size="64"/>
19346 <output>
19347 <port id="0" precision="FP16">
19348 <dim>1</dim>
19349 <dim>32</dim>
19350 <dim>1</dim>
19351 <dim>1</dim>
19352 </port>
19353 </output>
19354 </layer>
19355 <layer id="1305" name="4846485021891" type="Const" version="opset1">
19356 <data element_type="f16" offset="117546" shape="1,32,1,1" size="64"/>
19357 <output>
19358 <port id="0" precision="FP16">
19359 <dim>1</dim>
19360 <dim>32</dim>
19361 <dim>1</dim>
19362 <dim>1</dim>
19363 </port>
19364 </output>
19365 </layer>
19366 <layer id="1306" name="4847485121924" type="Const" version="opset1">
19367 <data element_type="f16" offset="117610" shape="1,32,1,1" size="64"/>
19368 <output>
19369 <port id="0" precision="FP16">
19370 <dim>1</dim>
19371 <dim>32</dim>
19372 <dim>1</dim>
19373 <dim>1</dim>
19374 </port>
19375 </output>
19376 </layer>
19377 <layer id="1307" name="bottleneck3_8/dim_red/bn/mean/Fused_Mul__copy116510275/quantized1333620232" type="Const" version="opset1">
19378 <data element_type="i8" offset="117674" shape="32,128,1,1" size="4096"/>
19379 <output>
19380 <port id="0" precision="I8">
19381 <dim>32</dim>
19382 <dim>128</dim>
19383 <dim>1</dim>
19384 <dim>1</dim>
19385 </port>
19386 </output>
19387 </layer>
19388 <layer id="1308" name="bottleneck3_8/dim_red/bn/mean/Fused_Mul__copy116510275/quantized/to_f16" type="Convert" version="opset1">
19389 <data destination_type="f16"/>
19390 <input>
19391 <port id="0">
19392 <dim>32</dim>
19393 <dim>128</dim>
19394 <dim>1</dim>
19395 <dim>1</dim>
19396 </port>
19397 </input>
19398 <output>
19399 <port id="1" precision="FP16">
19400 <dim>32</dim>
19401 <dim>128</dim>
19402 <dim>1</dim>
19403 <dim>1</dim>
19404 </port>
19405 </output>
19406 </layer>
19407 <layer id="1309" name="bottleneck3_8/dim_red/conv/fq_weights_1/zero_point1334919833" type="Const" version="opset1">
19408 <data element_type="f16" offset="121770" shape="32,1,1,1" size="64"/>
19409 <output>
19410 <port id="0" precision="FP16">
19411 <dim>32</dim>
19412 <dim>1</dim>
19413 <dim>1</dim>
19414 <dim>1</dim>
19415 </port>
19416 </output>
19417 </layer>
19418 <layer id="1310" name="bottleneck3_8/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
19419 <data auto_broadcast="numpy"/>
19420 <input>
19421 <port id="0">
19422 <dim>32</dim>
19423 <dim>128</dim>
19424 <dim>1</dim>
19425 <dim>1</dim>
19426 </port>
19427 <port id="1">
19428 <dim>32</dim>
19429 <dim>1</dim>
19430 <dim>1</dim>
19431 <dim>1</dim>
19432 </port>
19433 </input>
19434 <output>
19435 <port id="2" precision="FP16">
19436 <dim>32</dim>
19437 <dim>128</dim>
19438 <dim>1</dim>
19439 <dim>1</dim>
19440 </port>
19441 </output>
19442 </layer>
19443 <layer id="1311" name="bottleneck3_8/dim_red/conv/fq_weights_1/scale1334422725" type="Const" version="opset1">
19444 <data element_type="f16" offset="121834" shape="32,1,1,1" size="64"/>
19445 <output>
19446 <port id="0" precision="FP16">
19447 <dim>32</dim>
19448 <dim>1</dim>
19449 <dim>1</dim>
19450 <dim>1</dim>
19451 </port>
19452 </output>
19453 </layer>
19454 <layer id="1312" name="bottleneck3_8/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
19455 <data auto_broadcast="numpy"/>
19456 <input>
19457 <port id="0">
19458 <dim>32</dim>
19459 <dim>128</dim>
19460 <dim>1</dim>
19461 <dim>1</dim>
19462 </port>
19463 <port id="1">
19464 <dim>32</dim>
19465 <dim>1</dim>
19466 <dim>1</dim>
19467 <dim>1</dim>
19468 </port>
19469 </input>
19470 <output>
19471 <port id="2" precision="FP16">
19472 <dim>32</dim>
19473 <dim>128</dim>
19474 <dim>1</dim>
19475 <dim>1</dim>
19476 </port>
19477 </output>
19478 </layer>
19479 <layer id="1313" name="bottleneck3_8/dim_red/conv" type="Convolution" version="opset1">
19480 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
19481 <input>
19482 <port id="0">
19483 <dim>1</dim>
19484 <dim>128</dim>
19485 <dim>40</dim>
19486 <dim>68</dim>
19487 </port>
19488 <port id="1">
19489 <dim>32</dim>
19490 <dim>128</dim>
19491 <dim>1</dim>
19492 <dim>1</dim>
19493 </port>
19494 </input>
19495 <output>
19496 <port id="2" precision="FP16">
19497 <dim>1</dim>
19498 <dim>32</dim>
19499 <dim>40</dim>
19500 <dim>68</dim>
19501 </port>
19502 </output>
19503 </layer>
19504 <layer id="1314" name="data_add_2417724182116722404" type="Const" version="opset1">
19505 <data element_type="f16" offset="121898" shape="1,32,1,1" size="64"/>
19506 <output>
19507 <port id="0" precision="FP16">
19508 <dim>1</dim>
19509 <dim>32</dim>
19510 <dim>1</dim>
19511 <dim>1</dim>
19512 </port>
19513 </output>
19514 </layer>
19515 <layer id="1315" name="bottleneck3_8/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
19516 <data auto_broadcast="numpy"/>
19517 <input>
19518 <port id="0">
19519 <dim>1</dim>
19520 <dim>32</dim>
19521 <dim>40</dim>
19522 <dim>68</dim>
19523 </port>
19524 <port id="1">
19525 <dim>1</dim>
19526 <dim>32</dim>
19527 <dim>1</dim>
19528 <dim>1</dim>
19529 </port>
19530 </input>
19531 <output>
19532 <port id="2" names="bottleneck3_8/dim_red/conv" precision="FP16">
19533 <dim>1</dim>
19534 <dim>32</dim>
19535 <dim>40</dim>
19536 <dim>68</dim>
19537 </port>
19538 </output>
19539 </layer>
19540 <layer id="1316" name="bottleneck3_8/dim_red/fn/weights30896401691169" type="Const" version="opset1">
19541 <data element_type="f32" offset="1576" shape="1" size="4"/>
19542 <output>
19543 <port id="0" precision="FP32">
19544 <dim>1</dim>
19545 </port>
19546 </output>
19547 </layer>
19548 <layer id="1317" name="bottleneck3_8/dim_red/fn" type="PReLU" version="opset1">
19549 <input>
19550 <port id="0">
19551 <dim>1</dim>
19552 <dim>32</dim>
19553 <dim>40</dim>
19554 <dim>68</dim>
19555 </port>
19556 <port id="1">
19557 <dim>1</dim>
19558 </port>
19559 </input>
19560 <output>
19561 <port id="2" names="bottleneck3_8/dim_red/conv" precision="FP16">
19562 <dim>1</dim>
19563 <dim>32</dim>
19564 <dim>40</dim>
19565 <dim>68</dim>
19566 </port>
19567 </output>
19568 </layer>
19569 <layer id="1318" name="bottleneck3_8/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
19570 <data auto_broadcast="numpy" levels="256"/>
19571 <input>
19572 <port id="0">
19573 <dim>1</dim>
19574 <dim>32</dim>
19575 <dim>40</dim>
19576 <dim>68</dim>
19577 </port>
19578 <port id="1">
19579 <dim>1</dim>
19580 <dim>32</dim>
19581 <dim>1</dim>
19582 <dim>1</dim>
19583 </port>
19584 <port id="2">
19585 <dim>1</dim>
19586 <dim>32</dim>
19587 <dim>1</dim>
19588 <dim>1</dim>
19589 </port>
19590 <port id="3">
19591 <dim>1</dim>
19592 <dim>32</dim>
19593 <dim>1</dim>
19594 <dim>1</dim>
19595 </port>
19596 <port id="4">
19597 <dim>1</dim>
19598 <dim>32</dim>
19599 <dim>1</dim>
19600 <dim>1</dim>
19601 </port>
19602 </input>
19603 <output>
19604 <port id="5" precision="FP16">
19605 <dim>1</dim>
19606 <dim>32</dim>
19607 <dim>40</dim>
19608 <dim>68</dim>
19609 </port>
19610 </output>
19611 </layer>
19612 <layer id="1319" name="16907/value1690921534" type="Const" version="opset1">
19613 <data element_type="i64" offset="43778" shape="5" size="40"/>
19614 <output>
19615 <port id="0" precision="I64">
19616 <dim>5</dim>
19617 </port>
19618 </output>
19619 </layer>
19620 <layer id="1320" name="bottleneck3_8/inner/dw1/bn/mean/Fused_Mul__copy117110278/quantized1160820304" type="Const" version="opset1">
19621 <data element_type="i8" offset="121962" shape="32,1,3,3" size="288"/>
19622 <output>
19623 <port id="0" precision="I8">
19624 <dim>32</dim>
19625 <dim>1</dim>
19626 <dim>3</dim>
19627 <dim>3</dim>
19628 </port>
19629 </output>
19630 </layer>
19631 <layer id="1321" name="bottleneck3_8/inner/dw1/bn/mean/Fused_Mul__copy117110278/quantized/to_f16" type="Convert" version="opset1">
19632 <data destination_type="f16"/>
19633 <input>
19634 <port id="0">
19635 <dim>32</dim>
19636 <dim>1</dim>
19637 <dim>3</dim>
19638 <dim>3</dim>
19639 </port>
19640 </input>
19641 <output>
19642 <port id="1" precision="FP16">
19643 <dim>32</dim>
19644 <dim>1</dim>
19645 <dim>3</dim>
19646 <dim>3</dim>
19647 </port>
19648 </output>
19649 </layer>
19650 <layer id="1322" name="bottleneck3_8/inner/dw1/conv/fq_weights_1/zero_point1162120820" type="Const" version="opset1">
19651 <data element_type="f16" offset="122250" shape="32,1,1,1" size="64"/>
19652 <output>
19653 <port id="0" precision="FP16">
19654 <dim>32</dim>
19655 <dim>1</dim>
19656 <dim>1</dim>
19657 <dim>1</dim>
19658 </port>
19659 </output>
19660 </layer>
19661 <layer id="1323" name="bottleneck3_8/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
19662 <data auto_broadcast="numpy"/>
19663 <input>
19664 <port id="0">
19665 <dim>32</dim>
19666 <dim>1</dim>
19667 <dim>3</dim>
19668 <dim>3</dim>
19669 </port>
19670 <port id="1">
19671 <dim>32</dim>
19672 <dim>1</dim>
19673 <dim>1</dim>
19674 <dim>1</dim>
19675 </port>
19676 </input>
19677 <output>
19678 <port id="2" precision="FP16">
19679 <dim>32</dim>
19680 <dim>1</dim>
19681 <dim>3</dim>
19682 <dim>3</dim>
19683 </port>
19684 </output>
19685 </layer>
19686 <layer id="1324" name="bottleneck3_8/inner/dw1/conv/fq_weights_1/scale1161622749" type="Const" version="opset1">
19687 <data element_type="f16" offset="122314" shape="32,1,1,1" size="64"/>
19688 <output>
19689 <port id="0" precision="FP16">
19690 <dim>32</dim>
19691 <dim>1</dim>
19692 <dim>1</dim>
19693 <dim>1</dim>
19694 </port>
19695 </output>
19696 </layer>
19697 <layer id="1325" name="bottleneck3_8/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
19698 <data auto_broadcast="numpy"/>
19699 <input>
19700 <port id="0">
19701 <dim>32</dim>
19702 <dim>1</dim>
19703 <dim>3</dim>
19704 <dim>3</dim>
19705 </port>
19706 <port id="1">
19707 <dim>32</dim>
19708 <dim>1</dim>
19709 <dim>1</dim>
19710 <dim>1</dim>
19711 </port>
19712 </input>
19713 <output>
19714 <port id="2" precision="FP16">
19715 <dim>32</dim>
19716 <dim>1</dim>
19717 <dim>3</dim>
19718 <dim>3</dim>
19719 </port>
19720 </output>
19721 </layer>
19722 <layer id="1326" name="16907" type="Reshape" version="opset1">
19723 <data special_zero="true"/>
19724 <input>
19725 <port id="0">
19726 <dim>32</dim>
19727 <dim>1</dim>
19728 <dim>3</dim>
19729 <dim>3</dim>
19730 </port>
19731 <port id="1">
19732 <dim>5</dim>
19733 </port>
19734 </input>
19735 <output>
19736 <port id="2" precision="FP16">
19737 <dim>32</dim>
19738 <dim>1</dim>
19739 <dim>1</dim>
19740 <dim>3</dim>
19741 <dim>3</dim>
19742 </port>
19743 </output>
19744 </layer>
19745 <layer id="1327" name="bottleneck3_8/inner/dw1/conv" type="GroupConvolution" version="opset1">
19746 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
19747 <input>
19748 <port id="0">
19749 <dim>1</dim>
19750 <dim>32</dim>
19751 <dim>40</dim>
19752 <dim>68</dim>
19753 </port>
19754 <port id="1">
19755 <dim>32</dim>
19756 <dim>1</dim>
19757 <dim>1</dim>
19758 <dim>3</dim>
19759 <dim>3</dim>
19760 </port>
19761 </input>
19762 <output>
19763 <port id="2" precision="FP16">
19764 <dim>1</dim>
19765 <dim>32</dim>
19766 <dim>40</dim>
19767 <dim>68</dim>
19768 </port>
19769 </output>
19770 </layer>
19771 <layer id="1328" name="data_add_2418524190117321345" type="Const" version="opset1">
19772 <data element_type="f16" offset="122378" shape="1,32,1,1" size="64"/>
19773 <output>
19774 <port id="0" precision="FP16">
19775 <dim>1</dim>
19776 <dim>32</dim>
19777 <dim>1</dim>
19778 <dim>1</dim>
19779 </port>
19780 </output>
19781 </layer>
19782 <layer id="1329" name="bottleneck3_8/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
19783 <data auto_broadcast="numpy"/>
19784 <input>
19785 <port id="0">
19786 <dim>1</dim>
19787 <dim>32</dim>
19788 <dim>40</dim>
19789 <dim>68</dim>
19790 </port>
19791 <port id="1">
19792 <dim>1</dim>
19793 <dim>32</dim>
19794 <dim>1</dim>
19795 <dim>1</dim>
19796 </port>
19797 </input>
19798 <output>
19799 <port id="2" names="bottleneck3_8/inner/dw1/conv" precision="FP16">
19800 <dim>1</dim>
19801 <dim>32</dim>
19802 <dim>40</dim>
19803 <dim>68</dim>
19804 </port>
19805 </output>
19806 </layer>
19807 <layer id="1330" name="bottleneck3_8/inner/dw1/fn/weights31020399561175" type="Const" version="opset1">
19808 <data element_type="f32" offset="1576" shape="1" size="4"/>
19809 <output>
19810 <port id="0" precision="FP32">
19811 <dim>1</dim>
19812 </port>
19813 </output>
19814 </layer>
19815 <layer id="1331" name="bottleneck3_8/inner/dw1/fn" type="PReLU" version="opset1">
19816 <input>
19817 <port id="0">
19818 <dim>1</dim>
19819 <dim>32</dim>
19820 <dim>40</dim>
19821 <dim>68</dim>
19822 </port>
19823 <port id="1">
19824 <dim>1</dim>
19825 </port>
19826 </input>
19827 <output>
19828 <port id="2" names="bottleneck3_8/inner/dw1/conv" precision="FP16">
19829 <dim>1</dim>
19830 <dim>32</dim>
19831 <dim>40</dim>
19832 <dim>68</dim>
19833 </port>
19834 </output>
19835 </layer>
19836 <layer id="1332" name="bottleneck3_8/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
19837 <data auto_broadcast="numpy" levels="256"/>
19838 <input>
19839 <port id="0">
19840 <dim>1</dim>
19841 <dim>32</dim>
19842 <dim>40</dim>
19843 <dim>68</dim>
19844 </port>
19845 <port id="1"/>
19846 <port id="2"/>
19847 <port id="3"/>
19848 <port id="4"/>
19849 </input>
19850 <output>
19851 <port id="5" precision="FP16">
19852 <dim>1</dim>
19853 <dim>32</dim>
19854 <dim>40</dim>
19855 <dim>68</dim>
19856 </port>
19857 </output>
19858 </layer>
19859 <layer id="1333" name="bottleneck3_8/dim_inc/bn/mean/Fused_Mul__copy117710281/quantized1194421528" type="Const" version="opset1">
19860 <data element_type="i8" offset="122442" shape="128,32,1,1" size="4096"/>
19861 <output>
19862 <port id="0" precision="I8">
19863 <dim>128</dim>
19864 <dim>32</dim>
19865 <dim>1</dim>
19866 <dim>1</dim>
19867 </port>
19868 </output>
19869 </layer>
19870 <layer id="1334" name="bottleneck3_8/dim_inc/bn/mean/Fused_Mul__copy117710281/quantized/to_f16" type="Convert" version="opset1">
19871 <data destination_type="f16"/>
19872 <input>
19873 <port id="0">
19874 <dim>128</dim>
19875 <dim>32</dim>
19876 <dim>1</dim>
19877 <dim>1</dim>
19878 </port>
19879 </input>
19880 <output>
19881 <port id="1" precision="FP16">
19882 <dim>128</dim>
19883 <dim>32</dim>
19884 <dim>1</dim>
19885 <dim>1</dim>
19886 </port>
19887 </output>
19888 </layer>
19889 <layer id="1335" name="bottleneck3_8/dim_inc/conv/fq_weights_1/zero_point1195720862" type="Const" version="opset1">
19890 <data element_type="f16" offset="126538" shape="128,1,1,1" size="256"/>
19891 <output>
19892 <port id="0" precision="FP16">
19893 <dim>128</dim>
19894 <dim>1</dim>
19895 <dim>1</dim>
19896 <dim>1</dim>
19897 </port>
19898 </output>
19899 </layer>
19900 <layer id="1336" name="bottleneck3_8/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
19901 <data auto_broadcast="numpy"/>
19902 <input>
19903 <port id="0">
19904 <dim>128</dim>
19905 <dim>32</dim>
19906 <dim>1</dim>
19907 <dim>1</dim>
19908 </port>
19909 <port id="1">
19910 <dim>128</dim>
19911 <dim>1</dim>
19912 <dim>1</dim>
19913 <dim>1</dim>
19914 </port>
19915 </input>
19916 <output>
19917 <port id="2" precision="FP16">
19918 <dim>128</dim>
19919 <dim>32</dim>
19920 <dim>1</dim>
19921 <dim>1</dim>
19922 </port>
19923 </output>
19924 </layer>
19925 <layer id="1337" name="bottleneck3_8/dim_inc/conv/fq_weights_1/scale1195221756" type="Const" version="opset1">
19926 <data element_type="f16" offset="126794" shape="128,1,1,1" size="256"/>
19927 <output>
19928 <port id="0" precision="FP16">
19929 <dim>128</dim>
19930 <dim>1</dim>
19931 <dim>1</dim>
19932 <dim>1</dim>
19933 </port>
19934 </output>
19935 </layer>
19936 <layer id="1338" name="bottleneck3_8/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
19937 <data auto_broadcast="numpy"/>
19938 <input>
19939 <port id="0">
19940 <dim>128</dim>
19941 <dim>32</dim>
19942 <dim>1</dim>
19943 <dim>1</dim>
19944 </port>
19945 <port id="1">
19946 <dim>128</dim>
19947 <dim>1</dim>
19948 <dim>1</dim>
19949 <dim>1</dim>
19950 </port>
19951 </input>
19952 <output>
19953 <port id="2" precision="FP16">
19954 <dim>128</dim>
19955 <dim>32</dim>
19956 <dim>1</dim>
19957 <dim>1</dim>
19958 </port>
19959 </output>
19960 </layer>
19961 <layer id="1339" name="bottleneck3_8/dim_inc/conv" type="Convolution" version="opset1">
19962 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
19963 <input>
19964 <port id="0">
19965 <dim>1</dim>
19966 <dim>32</dim>
19967 <dim>40</dim>
19968 <dim>68</dim>
19969 </port>
19970 <port id="1">
19971 <dim>128</dim>
19972 <dim>32</dim>
19973 <dim>1</dim>
19974 <dim>1</dim>
19975 </port>
19976 </input>
19977 <output>
19978 <port id="2" precision="FP16">
19979 <dim>1</dim>
19980 <dim>128</dim>
19981 <dim>40</dim>
19982 <dim>68</dim>
19983 </port>
19984 </output>
19985 </layer>
19986 <layer id="1340" name="data_add_2419324198117920205" type="Const" version="opset1">
19987 <data element_type="f16" offset="127050" shape="1,128,1,1" size="256"/>
19988 <output>
19989 <port id="0" precision="FP16">
19990 <dim>1</dim>
19991 <dim>128</dim>
19992 <dim>1</dim>
19993 <dim>1</dim>
19994 </port>
19995 </output>
19996 </layer>
19997 <layer id="1341" name="bottleneck3_8/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
19998 <data auto_broadcast="numpy"/>
19999 <input>
20000 <port id="0">
20001 <dim>1</dim>
20002 <dim>128</dim>
20003 <dim>40</dim>
20004 <dim>68</dim>
20005 </port>
20006 <port id="1">
20007 <dim>1</dim>
20008 <dim>128</dim>
20009 <dim>1</dim>
20010 <dim>1</dim>
20011 </port>
20012 </input>
20013 <output>
20014 <port id="2" names="bottleneck3_8/dim_inc/conv" precision="FP16">
20015 <dim>1</dim>
20016 <dim>128</dim>
20017 <dim>40</dim>
20018 <dim>68</dim>
20019 </port>
20020 </output>
20021 </layer>
20022 <layer id="1342" name="bottleneck3_8/add/fq_input_1" type="FakeQuantize" version="opset1">
20023 <data auto_broadcast="numpy" levels="256"/>
20024 <input>
20025 <port id="0">
20026 <dim>1</dim>
20027 <dim>128</dim>
20028 <dim>40</dim>
20029 <dim>68</dim>
20030 </port>
20031 <port id="1"/>
20032 <port id="2"/>
20033 <port id="3"/>
20034 <port id="4"/>
20035 </input>
20036 <output>
20037 <port id="5" precision="FP16">
20038 <dim>1</dim>
20039 <dim>128</dim>
20040 <dim>40</dim>
20041 <dim>68</dim>
20042 </port>
20043 </output>
20044 </layer>
20045 <layer id="1343" name="bottleneck3_8/add" type="Add" version="opset1">
20046 <data auto_broadcast="numpy"/>
20047 <input>
20048 <port id="0">
20049 <dim>1</dim>
20050 <dim>128</dim>
20051 <dim>40</dim>
20052 <dim>68</dim>
20053 </port>
20054 <port id="1">
20055 <dim>1</dim>
20056 <dim>128</dim>
20057 <dim>40</dim>
20058 <dim>68</dim>
20059 </port>
20060 </input>
20061 <output>
20062 <port id="2" names="bottleneck3_8/add" precision="FP16">
20063 <dim>1</dim>
20064 <dim>128</dim>
20065 <dim>40</dim>
20066 <dim>68</dim>
20067 </port>
20068 </output>
20069 </layer>
20070 <layer id="1344" name="bottleneck3_8/fn/weights31160404061182" type="Const" version="opset1">
20071 <data element_type="f32" offset="1576" shape="1" size="4"/>
20072 <output>
20073 <port id="0" precision="FP32">
20074 <dim>1</dim>
20075 </port>
20076 </output>
20077 </layer>
20078 <layer id="1345" name="bottleneck3_8/fn" type="PReLU" version="opset1">
20079 <input>
20080 <port id="0">
20081 <dim>1</dim>
20082 <dim>128</dim>
20083 <dim>40</dim>
20084 <dim>68</dim>
20085 </port>
20086 <port id="1">
20087 <dim>1</dim>
20088 </port>
20089 </input>
20090 <output>
20091 <port id="2" names="bottleneck3_8/add" precision="FP16">
20092 <dim>1</dim>
20093 <dim>128</dim>
20094 <dim>40</dim>
20095 <dim>68</dim>
20096 </port>
20097 </output>
20098 </layer>
20099 <layer id="1346" name="bottleneck3_9/add/fq_input_0" type="FakeQuantize" version="opset1">
20100 <data auto_broadcast="numpy" levels="256"/>
20101 <input>
20102 <port id="0">
20103 <dim>1</dim>
20104 <dim>128</dim>
20105 <dim>40</dim>
20106 <dim>68</dim>
20107 </port>
20108 <port id="1"/>
20109 <port id="2"/>
20110 <port id="3"/>
20111 <port id="4"/>
20112 </input>
20113 <output>
20114 <port id="5" precision="FP16">
20115 <dim>1</dim>
20116 <dim>128</dim>
20117 <dim>40</dim>
20118 <dim>68</dim>
20119 </port>
20120 </output>
20121 </layer>
20122 <layer id="1347" name="4514451822098" type="Const" version="opset1">
20123 <data element_type="f16" offset="127306" shape="" size="2"/>
20124 <output>
20125 <port id="0" precision="FP16"/>
20126 </output>
20127 </layer>
20128 <layer id="1348" name="4515451919431" type="Const" version="opset1">
20129 <data element_type="f16" offset="127308" shape="" size="2"/>
20130 <output>
20131 <port id="0" precision="FP16"/>
20132 </output>
20133 </layer>
20134 <layer id="1349" name="4516452020946" type="Const" version="opset1">
20135 <data element_type="f16" offset="127306" shape="" size="2"/>
20136 <output>
20137 <port id="0" precision="FP16"/>
20138 </output>
20139 </layer>
20140 <layer id="1350" name="4517452122110" type="Const" version="opset1">
20141 <data element_type="f16" offset="127308" shape="" size="2"/>
20142 <output>
20143 <port id="0" precision="FP16"/>
20144 </output>
20145 </layer>
20146 <layer id="1351" name="4144414821894" type="Const" version="opset1">
20147 <data element_type="f16" offset="127310" shape="" size="2"/>
20148 <output>
20149 <port id="0" precision="FP16"/>
20150 </output>
20151 </layer>
20152 <layer id="1352" name="4145414919884" type="Const" version="opset1">
20153 <data element_type="f16" offset="49164" shape="" size="2"/>
20154 <output>
20155 <port id="0" precision="FP16"/>
20156 </output>
20157 </layer>
20158 <layer id="1353" name="4146415022497" type="Const" version="opset1">
20159 <data element_type="f16" offset="127310" shape="" size="2"/>
20160 <output>
20161 <port id="0" precision="FP16"/>
20162 </output>
20163 </layer>
20164 <layer id="1354" name="4147415121213" type="Const" version="opset1">
20165 <data element_type="f16" offset="49164" shape="" size="2"/>
20166 <output>
20167 <port id="0" precision="FP16"/>
20168 </output>
20169 </layer>
20170 <layer id="1355" name="3164316820091" type="Const" version="opset1">
20171 <data element_type="f16" offset="127312" shape="1,32,1,1" size="64"/>
20172 <output>
20173 <port id="0" precision="FP16">
20174 <dim>1</dim>
20175 <dim>32</dim>
20176 <dim>1</dim>
20177 <dim>1</dim>
20178 </port>
20179 </output>
20180 </layer>
20181 <layer id="1356" name="3165316920031" type="Const" version="opset1">
20182 <data element_type="f16" offset="127376" shape="1,32,1,1" size="64"/>
20183 <output>
20184 <port id="0" precision="FP16">
20185 <dim>1</dim>
20186 <dim>32</dim>
20187 <dim>1</dim>
20188 <dim>1</dim>
20189 </port>
20190 </output>
20191 </layer>
20192 <layer id="1357" name="3166317021051" type="Const" version="opset1">
20193 <data element_type="f16" offset="127312" shape="1,32,1,1" size="64"/>
20194 <output>
20195 <port id="0" precision="FP16">
20196 <dim>1</dim>
20197 <dim>32</dim>
20198 <dim>1</dim>
20199 <dim>1</dim>
20200 </port>
20201 </output>
20202 </layer>
20203 <layer id="1358" name="3167317120763" type="Const" version="opset1">
20204 <data element_type="f16" offset="127376" shape="1,32,1,1" size="64"/>
20205 <output>
20206 <port id="0" precision="FP16">
20207 <dim>1</dim>
20208 <dim>32</dim>
20209 <dim>1</dim>
20210 <dim>1</dim>
20211 </port>
20212 </output>
20213 </layer>
20214 <layer id="1359" name="bottleneck3_9/dim_red/bn/mean/Fused_Mul__copy118410284/quantized1168019614" type="Const" version="opset1">
20215 <data element_type="i8" offset="127440" shape="32,128,1,1" size="4096"/>
20216 <output>
20217 <port id="0" precision="I8">
20218 <dim>32</dim>
20219 <dim>128</dim>
20220 <dim>1</dim>
20221 <dim>1</dim>
20222 </port>
20223 </output>
20224 </layer>
20225 <layer id="1360" name="bottleneck3_9/dim_red/bn/mean/Fused_Mul__copy118410284/quantized/to_f16" type="Convert" version="opset1">
20226 <data destination_type="f16"/>
20227 <input>
20228 <port id="0">
20229 <dim>32</dim>
20230 <dim>128</dim>
20231 <dim>1</dim>
20232 <dim>1</dim>
20233 </port>
20234 </input>
20235 <output>
20236 <port id="1" precision="FP16">
20237 <dim>32</dim>
20238 <dim>128</dim>
20239 <dim>1</dim>
20240 <dim>1</dim>
20241 </port>
20242 </output>
20243 </layer>
20244 <layer id="1361" name="bottleneck3_9/dim_red/conv/fq_weights_1/zero_point1169322359" type="Const" version="opset1">
20245 <data element_type="f16" offset="131536" shape="32,1,1,1" size="64"/>
20246 <output>
20247 <port id="0" precision="FP16">
20248 <dim>32</dim>
20249 <dim>1</dim>
20250 <dim>1</dim>
20251 <dim>1</dim>
20252 </port>
20253 </output>
20254 </layer>
20255 <layer id="1362" name="bottleneck3_9/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
20256 <data auto_broadcast="numpy"/>
20257 <input>
20258 <port id="0">
20259 <dim>32</dim>
20260 <dim>128</dim>
20261 <dim>1</dim>
20262 <dim>1</dim>
20263 </port>
20264 <port id="1">
20265 <dim>32</dim>
20266 <dim>1</dim>
20267 <dim>1</dim>
20268 <dim>1</dim>
20269 </port>
20270 </input>
20271 <output>
20272 <port id="2" precision="FP16">
20273 <dim>32</dim>
20274 <dim>128</dim>
20275 <dim>1</dim>
20276 <dim>1</dim>
20277 </port>
20278 </output>
20279 </layer>
20280 <layer id="1363" name="bottleneck3_9/dim_red/conv/fq_weights_1/scale1168820730" type="Const" version="opset1">
20281 <data element_type="f16" offset="131600" shape="32,1,1,1" size="64"/>
20282 <output>
20283 <port id="0" precision="FP16">
20284 <dim>32</dim>
20285 <dim>1</dim>
20286 <dim>1</dim>
20287 <dim>1</dim>
20288 </port>
20289 </output>
20290 </layer>
20291 <layer id="1364" name="bottleneck3_9/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
20292 <data auto_broadcast="numpy"/>
20293 <input>
20294 <port id="0">
20295 <dim>32</dim>
20296 <dim>128</dim>
20297 <dim>1</dim>
20298 <dim>1</dim>
20299 </port>
20300 <port id="1">
20301 <dim>32</dim>
20302 <dim>1</dim>
20303 <dim>1</dim>
20304 <dim>1</dim>
20305 </port>
20306 </input>
20307 <output>
20308 <port id="2" precision="FP16">
20309 <dim>32</dim>
20310 <dim>128</dim>
20311 <dim>1</dim>
20312 <dim>1</dim>
20313 </port>
20314 </output>
20315 </layer>
20316 <layer id="1365" name="bottleneck3_9/dim_red/conv" type="Convolution" version="opset1">
20317 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
20318 <input>
20319 <port id="0">
20320 <dim>1</dim>
20321 <dim>128</dim>
20322 <dim>40</dim>
20323 <dim>68</dim>
20324 </port>
20325 <port id="1">
20326 <dim>32</dim>
20327 <dim>128</dim>
20328 <dim>1</dim>
20329 <dim>1</dim>
20330 </port>
20331 </input>
20332 <output>
20333 <port id="2" precision="FP16">
20334 <dim>1</dim>
20335 <dim>32</dim>
20336 <dim>40</dim>
20337 <dim>68</dim>
20338 </port>
20339 </output>
20340 </layer>
20341 <layer id="1366" name="data_add_2420124206118620067" type="Const" version="opset1">
20342 <data element_type="f16" offset="131664" shape="1,32,1,1" size="64"/>
20343 <output>
20344 <port id="0" precision="FP16">
20345 <dim>1</dim>
20346 <dim>32</dim>
20347 <dim>1</dim>
20348 <dim>1</dim>
20349 </port>
20350 </output>
20351 </layer>
20352 <layer id="1367" name="bottleneck3_9/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
20353 <data auto_broadcast="numpy"/>
20354 <input>
20355 <port id="0">
20356 <dim>1</dim>
20357 <dim>32</dim>
20358 <dim>40</dim>
20359 <dim>68</dim>
20360 </port>
20361 <port id="1">
20362 <dim>1</dim>
20363 <dim>32</dim>
20364 <dim>1</dim>
20365 <dim>1</dim>
20366 </port>
20367 </input>
20368 <output>
20369 <port id="2" names="bottleneck3_9/dim_red/conv" precision="FP16">
20370 <dim>1</dim>
20371 <dim>32</dim>
20372 <dim>40</dim>
20373 <dim>68</dim>
20374 </port>
20375 </output>
20376 </layer>
20377 <layer id="1368" name="bottleneck3_9/dim_red/fn/weights30848398691188" type="Const" version="opset1">
20378 <data element_type="f32" offset="1576" shape="1" size="4"/>
20379 <output>
20380 <port id="0" precision="FP32">
20381 <dim>1</dim>
20382 </port>
20383 </output>
20384 </layer>
20385 <layer id="1369" name="bottleneck3_9/dim_red/fn" type="PReLU" version="opset1">
20386 <input>
20387 <port id="0">
20388 <dim>1</dim>
20389 <dim>32</dim>
20390 <dim>40</dim>
20391 <dim>68</dim>
20392 </port>
20393 <port id="1">
20394 <dim>1</dim>
20395 </port>
20396 </input>
20397 <output>
20398 <port id="2" names="bottleneck3_9/dim_red/conv" precision="FP16">
20399 <dim>1</dim>
20400 <dim>32</dim>
20401 <dim>40</dim>
20402 <dim>68</dim>
20403 </port>
20404 </output>
20405 </layer>
20406 <layer id="1370" name="bottleneck3_9/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
20407 <data auto_broadcast="numpy" levels="256"/>
20408 <input>
20409 <port id="0">
20410 <dim>1</dim>
20411 <dim>32</dim>
20412 <dim>40</dim>
20413 <dim>68</dim>
20414 </port>
20415 <port id="1">
20416 <dim>1</dim>
20417 <dim>32</dim>
20418 <dim>1</dim>
20419 <dim>1</dim>
20420 </port>
20421 <port id="2">
20422 <dim>1</dim>
20423 <dim>32</dim>
20424 <dim>1</dim>
20425 <dim>1</dim>
20426 </port>
20427 <port id="3">
20428 <dim>1</dim>
20429 <dim>32</dim>
20430 <dim>1</dim>
20431 <dim>1</dim>
20432 </port>
20433 <port id="4">
20434 <dim>1</dim>
20435 <dim>32</dim>
20436 <dim>1</dim>
20437 <dim>1</dim>
20438 </port>
20439 </input>
20440 <output>
20441 <port id="5" precision="FP16">
20442 <dim>1</dim>
20443 <dim>32</dim>
20444 <dim>40</dim>
20445 <dim>68</dim>
20446 </port>
20447 </output>
20448 </layer>
20449 <layer id="1371" name="16819/value1682120793" type="Const" version="opset1">
20450 <data element_type="i64" offset="43778" shape="5" size="40"/>
20451 <output>
20452 <port id="0" precision="I64">
20453 <dim>5</dim>
20454 </port>
20455 </output>
20456 </layer>
20457 <layer id="1372" name="bottleneck3_9/inner/dw1/bn/mean/Fused_Mul__copy119010287/quantized1184819482" type="Const" version="opset1">
20458 <data element_type="i8" offset="131728" shape="32,1,3,3" size="288"/>
20459 <output>
20460 <port id="0" precision="I8">
20461 <dim>32</dim>
20462 <dim>1</dim>
20463 <dim>3</dim>
20464 <dim>3</dim>
20465 </port>
20466 </output>
20467 </layer>
20468 <layer id="1373" name="bottleneck3_9/inner/dw1/bn/mean/Fused_Mul__copy119010287/quantized/to_f16" type="Convert" version="opset1">
20469 <data destination_type="f16"/>
20470 <input>
20471 <port id="0">
20472 <dim>32</dim>
20473 <dim>1</dim>
20474 <dim>3</dim>
20475 <dim>3</dim>
20476 </port>
20477 </input>
20478 <output>
20479 <port id="1" precision="FP16">
20480 <dim>32</dim>
20481 <dim>1</dim>
20482 <dim>3</dim>
20483 <dim>3</dim>
20484 </port>
20485 </output>
20486 </layer>
20487 <layer id="1374" name="bottleneck3_9/inner/dw1/conv/fq_weights_1/zero_point1186120949" type="Const" version="opset1">
20488 <data element_type="f16" offset="132016" shape="32,1,1,1" size="64"/>
20489 <output>
20490 <port id="0" precision="FP16">
20491 <dim>32</dim>
20492 <dim>1</dim>
20493 <dim>1</dim>
20494 <dim>1</dim>
20495 </port>
20496 </output>
20497 </layer>
20498 <layer id="1375" name="bottleneck3_9/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
20499 <data auto_broadcast="numpy"/>
20500 <input>
20501 <port id="0">
20502 <dim>32</dim>
20503 <dim>1</dim>
20504 <dim>3</dim>
20505 <dim>3</dim>
20506 </port>
20507 <port id="1">
20508 <dim>32</dim>
20509 <dim>1</dim>
20510 <dim>1</dim>
20511 <dim>1</dim>
20512 </port>
20513 </input>
20514 <output>
20515 <port id="2" precision="FP16">
20516 <dim>32</dim>
20517 <dim>1</dim>
20518 <dim>3</dim>
20519 <dim>3</dim>
20520 </port>
20521 </output>
20522 </layer>
20523 <layer id="1376" name="bottleneck3_9/inner/dw1/conv/fq_weights_1/scale1185621012" type="Const" version="opset1">
20524 <data element_type="f16" offset="132080" shape="32,1,1,1" size="64"/>
20525 <output>
20526 <port id="0" precision="FP16">
20527 <dim>32</dim>
20528 <dim>1</dim>
20529 <dim>1</dim>
20530 <dim>1</dim>
20531 </port>
20532 </output>
20533 </layer>
20534 <layer id="1377" name="bottleneck3_9/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
20535 <data auto_broadcast="numpy"/>
20536 <input>
20537 <port id="0">
20538 <dim>32</dim>
20539 <dim>1</dim>
20540 <dim>3</dim>
20541 <dim>3</dim>
20542 </port>
20543 <port id="1">
20544 <dim>32</dim>
20545 <dim>1</dim>
20546 <dim>1</dim>
20547 <dim>1</dim>
20548 </port>
20549 </input>
20550 <output>
20551 <port id="2" precision="FP16">
20552 <dim>32</dim>
20553 <dim>1</dim>
20554 <dim>3</dim>
20555 <dim>3</dim>
20556 </port>
20557 </output>
20558 </layer>
20559 <layer id="1378" name="16819" type="Reshape" version="opset1">
20560 <data special_zero="true"/>
20561 <input>
20562 <port id="0">
20563 <dim>32</dim>
20564 <dim>1</dim>
20565 <dim>3</dim>
20566 <dim>3</dim>
20567 </port>
20568 <port id="1">
20569 <dim>5</dim>
20570 </port>
20571 </input>
20572 <output>
20573 <port id="2" precision="FP16">
20574 <dim>32</dim>
20575 <dim>1</dim>
20576 <dim>1</dim>
20577 <dim>3</dim>
20578 <dim>3</dim>
20579 </port>
20580 </output>
20581 </layer>
20582 <layer id="1379" name="bottleneck3_9/inner/dw1/conv" type="GroupConvolution" version="opset1">
20583 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
20584 <input>
20585 <port id="0">
20586 <dim>1</dim>
20587 <dim>32</dim>
20588 <dim>40</dim>
20589 <dim>68</dim>
20590 </port>
20591 <port id="1">
20592 <dim>32</dim>
20593 <dim>1</dim>
20594 <dim>1</dim>
20595 <dim>3</dim>
20596 <dim>3</dim>
20597 </port>
20598 </input>
20599 <output>
20600 <port id="2" precision="FP16">
20601 <dim>1</dim>
20602 <dim>32</dim>
20603 <dim>40</dim>
20604 <dim>68</dim>
20605 </port>
20606 </output>
20607 </layer>
20608 <layer id="1380" name="data_add_2420924214119219692" type="Const" version="opset1">
20609 <data element_type="f16" offset="132144" shape="1,32,1,1" size="64"/>
20610 <output>
20611 <port id="0" precision="FP16">
20612 <dim>1</dim>
20613 <dim>32</dim>
20614 <dim>1</dim>
20615 <dim>1</dim>
20616 </port>
20617 </output>
20618 </layer>
20619 <layer id="1381" name="bottleneck3_9/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
20620 <data auto_broadcast="numpy"/>
20621 <input>
20622 <port id="0">
20623 <dim>1</dim>
20624 <dim>32</dim>
20625 <dim>40</dim>
20626 <dim>68</dim>
20627 </port>
20628 <port id="1">
20629 <dim>1</dim>
20630 <dim>32</dim>
20631 <dim>1</dim>
20632 <dim>1</dim>
20633 </port>
20634 </input>
20635 <output>
20636 <port id="2" names="bottleneck3_9/inner/dw1/conv" precision="FP16">
20637 <dim>1</dim>
20638 <dim>32</dim>
20639 <dim>40</dim>
20640 <dim>68</dim>
20641 </port>
20642 </output>
20643 </layer>
20644 <layer id="1382" name="bottleneck3_9/inner/dw1/fn/weights30768404031194" type="Const" version="opset1">
20645 <data element_type="f32" offset="1576" shape="1" size="4"/>
20646 <output>
20647 <port id="0" precision="FP32">
20648 <dim>1</dim>
20649 </port>
20650 </output>
20651 </layer>
20652 <layer id="1383" name="bottleneck3_9/inner/dw1/fn" type="PReLU" version="opset1">
20653 <input>
20654 <port id="0">
20655 <dim>1</dim>
20656 <dim>32</dim>
20657 <dim>40</dim>
20658 <dim>68</dim>
20659 </port>
20660 <port id="1">
20661 <dim>1</dim>
20662 </port>
20663 </input>
20664 <output>
20665 <port id="2" names="bottleneck3_9/inner/dw1/conv" precision="FP16">
20666 <dim>1</dim>
20667 <dim>32</dim>
20668 <dim>40</dim>
20669 <dim>68</dim>
20670 </port>
20671 </output>
20672 </layer>
20673 <layer id="1384" name="bottleneck3_9/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
20674 <data auto_broadcast="numpy" levels="256"/>
20675 <input>
20676 <port id="0">
20677 <dim>1</dim>
20678 <dim>32</dim>
20679 <dim>40</dim>
20680 <dim>68</dim>
20681 </port>
20682 <port id="1"/>
20683 <port id="2"/>
20684 <port id="3"/>
20685 <port id="4"/>
20686 </input>
20687 <output>
20688 <port id="5" precision="FP16">
20689 <dim>1</dim>
20690 <dim>32</dim>
20691 <dim>40</dim>
20692 <dim>68</dim>
20693 </port>
20694 </output>
20695 </layer>
20696 <layer id="1385" name="bottleneck3_9/dim_inc/bn/mean/Fused_Mul__copy119610290/quantized1403221969" type="Const" version="opset1">
20697 <data element_type="i8" offset="132208" shape="128,32,1,1" size="4096"/>
20698 <output>
20699 <port id="0" precision="I8">
20700 <dim>128</dim>
20701 <dim>32</dim>
20702 <dim>1</dim>
20703 <dim>1</dim>
20704 </port>
20705 </output>
20706 </layer>
20707 <layer id="1386" name="bottleneck3_9/dim_inc/bn/mean/Fused_Mul__copy119610290/quantized/to_f16" type="Convert" version="opset1">
20708 <data destination_type="f16"/>
20709 <input>
20710 <port id="0">
20711 <dim>128</dim>
20712 <dim>32</dim>
20713 <dim>1</dim>
20714 <dim>1</dim>
20715 </port>
20716 </input>
20717 <output>
20718 <port id="1" precision="FP16">
20719 <dim>128</dim>
20720 <dim>32</dim>
20721 <dim>1</dim>
20722 <dim>1</dim>
20723 </port>
20724 </output>
20725 </layer>
20726 <layer id="1387" name="bottleneck3_9/dim_inc/conv/fq_weights_1/zero_point1404521216" type="Const" version="opset1">
20727 <data element_type="f16" offset="136304" shape="128,1,1,1" size="256"/>
20728 <output>
20729 <port id="0" precision="FP16">
20730 <dim>128</dim>
20731 <dim>1</dim>
20732 <dim>1</dim>
20733 <dim>1</dim>
20734 </port>
20735 </output>
20736 </layer>
20737 <layer id="1388" name="bottleneck3_9/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
20738 <data auto_broadcast="numpy"/>
20739 <input>
20740 <port id="0">
20741 <dim>128</dim>
20742 <dim>32</dim>
20743 <dim>1</dim>
20744 <dim>1</dim>
20745 </port>
20746 <port id="1">
20747 <dim>128</dim>
20748 <dim>1</dim>
20749 <dim>1</dim>
20750 <dim>1</dim>
20751 </port>
20752 </input>
20753 <output>
20754 <port id="2" precision="FP16">
20755 <dim>128</dim>
20756 <dim>32</dim>
20757 <dim>1</dim>
20758 <dim>1</dim>
20759 </port>
20760 </output>
20761 </layer>
20762 <layer id="1389" name="bottleneck3_9/dim_inc/conv/fq_weights_1/scale1404021906" type="Const" version="opset1">
20763 <data element_type="f16" offset="136560" shape="128,1,1,1" size="256"/>
20764 <output>
20765 <port id="0" precision="FP16">
20766 <dim>128</dim>
20767 <dim>1</dim>
20768 <dim>1</dim>
20769 <dim>1</dim>
20770 </port>
20771 </output>
20772 </layer>
20773 <layer id="1390" name="bottleneck3_9/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
20774 <data auto_broadcast="numpy"/>
20775 <input>
20776 <port id="0">
20777 <dim>128</dim>
20778 <dim>32</dim>
20779 <dim>1</dim>
20780 <dim>1</dim>
20781 </port>
20782 <port id="1">
20783 <dim>128</dim>
20784 <dim>1</dim>
20785 <dim>1</dim>
20786 <dim>1</dim>
20787 </port>
20788 </input>
20789 <output>
20790 <port id="2" precision="FP16">
20791 <dim>128</dim>
20792 <dim>32</dim>
20793 <dim>1</dim>
20794 <dim>1</dim>
20795 </port>
20796 </output>
20797 </layer>
20798 <layer id="1391" name="bottleneck3_9/dim_inc/conv" type="Convolution" version="opset1">
20799 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
20800 <input>
20801 <port id="0">
20802 <dim>1</dim>
20803 <dim>32</dim>
20804 <dim>40</dim>
20805 <dim>68</dim>
20806 </port>
20807 <port id="1">
20808 <dim>128</dim>
20809 <dim>32</dim>
20810 <dim>1</dim>
20811 <dim>1</dim>
20812 </port>
20813 </input>
20814 <output>
20815 <port id="2" precision="FP16">
20816 <dim>1</dim>
20817 <dim>128</dim>
20818 <dim>40</dim>
20819 <dim>68</dim>
20820 </port>
20821 </output>
20822 </layer>
20823 <layer id="1392" name="data_add_2421724222119821699" type="Const" version="opset1">
20824 <data element_type="f16" offset="136816" shape="1,128,1,1" size="256"/>
20825 <output>
20826 <port id="0" precision="FP16">
20827 <dim>1</dim>
20828 <dim>128</dim>
20829 <dim>1</dim>
20830 <dim>1</dim>
20831 </port>
20832 </output>
20833 </layer>
20834 <layer id="1393" name="bottleneck3_9/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
20835 <data auto_broadcast="numpy"/>
20836 <input>
20837 <port id="0">
20838 <dim>1</dim>
20839 <dim>128</dim>
20840 <dim>40</dim>
20841 <dim>68</dim>
20842 </port>
20843 <port id="1">
20844 <dim>1</dim>
20845 <dim>128</dim>
20846 <dim>1</dim>
20847 <dim>1</dim>
20848 </port>
20849 </input>
20850 <output>
20851 <port id="2" names="bottleneck3_9/dim_inc/conv" precision="FP16">
20852 <dim>1</dim>
20853 <dim>128</dim>
20854 <dim>40</dim>
20855 <dim>68</dim>
20856 </port>
20857 </output>
20858 </layer>
20859 <layer id="1394" name="bottleneck3_9/add/fq_input_1" type="FakeQuantize" version="opset1">
20860 <data auto_broadcast="numpy" levels="256"/>
20861 <input>
20862 <port id="0">
20863 <dim>1</dim>
20864 <dim>128</dim>
20865 <dim>40</dim>
20866 <dim>68</dim>
20867 </port>
20868 <port id="1"/>
20869 <port id="2"/>
20870 <port id="3"/>
20871 <port id="4"/>
20872 </input>
20873 <output>
20874 <port id="5" precision="FP16">
20875 <dim>1</dim>
20876 <dim>128</dim>
20877 <dim>40</dim>
20878 <dim>68</dim>
20879 </port>
20880 </output>
20881 </layer>
20882 <layer id="1395" name="bottleneck3_9/add" type="Add" version="opset1">
20883 <data auto_broadcast="numpy"/>
20884 <input>
20885 <port id="0">
20886 <dim>1</dim>
20887 <dim>128</dim>
20888 <dim>40</dim>
20889 <dim>68</dim>
20890 </port>
20891 <port id="1">
20892 <dim>1</dim>
20893 <dim>128</dim>
20894 <dim>40</dim>
20895 <dim>68</dim>
20896 </port>
20897 </input>
20898 <output>
20899 <port id="2" names="bottleneck3_9/add" precision="FP16">
20900 <dim>1</dim>
20901 <dim>128</dim>
20902 <dim>40</dim>
20903 <dim>68</dim>
20904 </port>
20905 </output>
20906 </layer>
20907 <layer id="1396" name="bottleneck3_9/fn/weights31024406761201" type="Const" version="opset1">
20908 <data element_type="f32" offset="1576" shape="1" size="4"/>
20909 <output>
20910 <port id="0" precision="FP32">
20911 <dim>1</dim>
20912 </port>
20913 </output>
20914 </layer>
20915 <layer id="1397" name="bottleneck3_9/fn" type="PReLU" version="opset1">
20916 <input>
20917 <port id="0">
20918 <dim>1</dim>
20919 <dim>128</dim>
20920 <dim>40</dim>
20921 <dim>68</dim>
20922 </port>
20923 <port id="1">
20924 <dim>1</dim>
20925 </port>
20926 </input>
20927 <output>
20928 <port id="2" names="bottleneck3_9/add" precision="FP16">
20929 <dim>1</dim>
20930 <dim>128</dim>
20931 <dim>40</dim>
20932 <dim>68</dim>
20933 </port>
20934 </output>
20935 </layer>
20936 <layer id="1398" name="bottleneck3_10/add/fq_input_0" type="FakeQuantize" version="opset1">
20937 <data auto_broadcast="numpy" levels="256"/>
20938 <input>
20939 <port id="0">
20940 <dim>1</dim>
20941 <dim>128</dim>
20942 <dim>40</dim>
20943 <dim>68</dim>
20944 </port>
20945 <port id="1"/>
20946 <port id="2"/>
20947 <port id="3"/>
20948 <port id="4"/>
20949 </input>
20950 <output>
20951 <port id="5" precision="FP16">
20952 <dim>1</dim>
20953 <dim>128</dim>
20954 <dim>40</dim>
20955 <dim>68</dim>
20956 </port>
20957 </output>
20958 </layer>
20959 <layer id="1399" name="4474447819539" type="Const" version="opset1">
20960 <data element_type="f16" offset="137072" shape="" size="2"/>
20961 <output>
20962 <port id="0" precision="FP16"/>
20963 </output>
20964 </layer>
20965 <layer id="1400" name="4475447919491" type="Const" version="opset1">
20966 <data element_type="f16" offset="137074" shape="" size="2"/>
20967 <output>
20968 <port id="0" precision="FP16"/>
20969 </output>
20970 </layer>
20971 <layer id="1401" name="4476448020022" type="Const" version="opset1">
20972 <data element_type="f16" offset="137072" shape="" size="2"/>
20973 <output>
20974 <port id="0" precision="FP16"/>
20975 </output>
20976 </layer>
20977 <layer id="1402" name="4477448121588" type="Const" version="opset1">
20978 <data element_type="f16" offset="137074" shape="" size="2"/>
20979 <output>
20980 <port id="0" precision="FP16"/>
20981 </output>
20982 </layer>
20983 <layer id="1403" name="2944294820112" type="Const" version="opset1">
20984 <data element_type="f16" offset="137076" shape="" size="2"/>
20985 <output>
20986 <port id="0" precision="FP16"/>
20987 </output>
20988 </layer>
20989 <layer id="1404" name="2945294919929" type="Const" version="opset1">
20990 <data element_type="f16" offset="137078" shape="" size="2"/>
20991 <output>
20992 <port id="0" precision="FP16"/>
20993 </output>
20994 </layer>
20995 <layer id="1405" name="2946295019737" type="Const" version="opset1">
20996 <data element_type="f16" offset="137076" shape="" size="2"/>
20997 <output>
20998 <port id="0" precision="FP16"/>
20999 </output>
21000 </layer>
21001 <layer id="1406" name="2947295120466" type="Const" version="opset1">
21002 <data element_type="f16" offset="137078" shape="" size="2"/>
21003 <output>
21004 <port id="0" precision="FP16"/>
21005 </output>
21006 </layer>
21007 <layer id="1407" name="4224422820349" type="Const" version="opset1">
21008 <data element_type="f16" offset="137080" shape="1,32,1,1" size="64"/>
21009 <output>
21010 <port id="0" precision="FP16">
21011 <dim>1</dim>
21012 <dim>32</dim>
21013 <dim>1</dim>
21014 <dim>1</dim>
21015 </port>
21016 </output>
21017 </layer>
21018 <layer id="1408" name="4225422922002" type="Const" version="opset1">
21019 <data element_type="f16" offset="137144" shape="1,32,1,1" size="64"/>
21020 <output>
21021 <port id="0" precision="FP16">
21022 <dim>1</dim>
21023 <dim>32</dim>
21024 <dim>1</dim>
21025 <dim>1</dim>
21026 </port>
21027 </output>
21028 </layer>
21029 <layer id="1409" name="4226423020208" type="Const" version="opset1">
21030 <data element_type="f16" offset="137080" shape="1,32,1,1" size="64"/>
21031 <output>
21032 <port id="0" precision="FP16">
21033 <dim>1</dim>
21034 <dim>32</dim>
21035 <dim>1</dim>
21036 <dim>1</dim>
21037 </port>
21038 </output>
21039 </layer>
21040 <layer id="1410" name="4227423122239" type="Const" version="opset1">
21041 <data element_type="f16" offset="137144" shape="1,32,1,1" size="64"/>
21042 <output>
21043 <port id="0" precision="FP16">
21044 <dim>1</dim>
21045 <dim>32</dim>
21046 <dim>1</dim>
21047 <dim>1</dim>
21048 </port>
21049 </output>
21050 </layer>
21051 <layer id="1411" name="bottleneck3_10/dim_red/bn/mean/Fused_Mul__copy120310293/quantized1235221372" type="Const" version="opset1">
21052 <data element_type="i8" offset="137208" shape="32,128,1,1" size="4096"/>
21053 <output>
21054 <port id="0" precision="I8">
21055 <dim>32</dim>
21056 <dim>128</dim>
21057 <dim>1</dim>
21058 <dim>1</dim>
21059 </port>
21060 </output>
21061 </layer>
21062 <layer id="1412" name="bottleneck3_10/dim_red/bn/mean/Fused_Mul__copy120310293/quantized/to_f16" type="Convert" version="opset1">
21063 <data destination_type="f16"/>
21064 <input>
21065 <port id="0">
21066 <dim>32</dim>
21067 <dim>128</dim>
21068 <dim>1</dim>
21069 <dim>1</dim>
21070 </port>
21071 </input>
21072 <output>
21073 <port id="1" precision="FP16">
21074 <dim>32</dim>
21075 <dim>128</dim>
21076 <dim>1</dim>
21077 <dim>1</dim>
21078 </port>
21079 </output>
21080 </layer>
21081 <layer id="1413" name="bottleneck3_10/dim_red/conv/fq_weights_1/zero_point1236520145" type="Const" version="opset1">
21082 <data element_type="f16" offset="141304" shape="32,1,1,1" size="64"/>
21083 <output>
21084 <port id="0" precision="FP16">
21085 <dim>32</dim>
21086 <dim>1</dim>
21087 <dim>1</dim>
21088 <dim>1</dim>
21089 </port>
21090 </output>
21091 </layer>
21092 <layer id="1414" name="bottleneck3_10/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
21093 <data auto_broadcast="numpy"/>
21094 <input>
21095 <port id="0">
21096 <dim>32</dim>
21097 <dim>128</dim>
21098 <dim>1</dim>
21099 <dim>1</dim>
21100 </port>
21101 <port id="1">
21102 <dim>32</dim>
21103 <dim>1</dim>
21104 <dim>1</dim>
21105 <dim>1</dim>
21106 </port>
21107 </input>
21108 <output>
21109 <port id="2" precision="FP16">
21110 <dim>32</dim>
21111 <dim>128</dim>
21112 <dim>1</dim>
21113 <dim>1</dim>
21114 </port>
21115 </output>
21116 </layer>
21117 <layer id="1415" name="bottleneck3_10/dim_red/conv/fq_weights_1/scale1236020934" type="Const" version="opset1">
21118 <data element_type="f16" offset="141368" shape="32,1,1,1" size="64"/>
21119 <output>
21120 <port id="0" precision="FP16">
21121 <dim>32</dim>
21122 <dim>1</dim>
21123 <dim>1</dim>
21124 <dim>1</dim>
21125 </port>
21126 </output>
21127 </layer>
21128 <layer id="1416" name="bottleneck3_10/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
21129 <data auto_broadcast="numpy"/>
21130 <input>
21131 <port id="0">
21132 <dim>32</dim>
21133 <dim>128</dim>
21134 <dim>1</dim>
21135 <dim>1</dim>
21136 </port>
21137 <port id="1">
21138 <dim>32</dim>
21139 <dim>1</dim>
21140 <dim>1</dim>
21141 <dim>1</dim>
21142 </port>
21143 </input>
21144 <output>
21145 <port id="2" precision="FP16">
21146 <dim>32</dim>
21147 <dim>128</dim>
21148 <dim>1</dim>
21149 <dim>1</dim>
21150 </port>
21151 </output>
21152 </layer>
21153 <layer id="1417" name="bottleneck3_10/dim_red/conv" type="Convolution" version="opset1">
21154 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
21155 <input>
21156 <port id="0">
21157 <dim>1</dim>
21158 <dim>128</dim>
21159 <dim>40</dim>
21160 <dim>68</dim>
21161 </port>
21162 <port id="1">
21163 <dim>32</dim>
21164 <dim>128</dim>
21165 <dim>1</dim>
21166 <dim>1</dim>
21167 </port>
21168 </input>
21169 <output>
21170 <port id="2" precision="FP16">
21171 <dim>1</dim>
21172 <dim>32</dim>
21173 <dim>40</dim>
21174 <dim>68</dim>
21175 </port>
21176 </output>
21177 </layer>
21178 <layer id="1418" name="data_add_2422524230120519773" type="Const" version="opset1">
21179 <data element_type="f16" offset="141432" shape="1,32,1,1" size="64"/>
21180 <output>
21181 <port id="0" precision="FP16">
21182 <dim>1</dim>
21183 <dim>32</dim>
21184 <dim>1</dim>
21185 <dim>1</dim>
21186 </port>
21187 </output>
21188 </layer>
21189 <layer id="1419" name="bottleneck3_10/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
21190 <data auto_broadcast="numpy"/>
21191 <input>
21192 <port id="0">
21193 <dim>1</dim>
21194 <dim>32</dim>
21195 <dim>40</dim>
21196 <dim>68</dim>
21197 </port>
21198 <port id="1">
21199 <dim>1</dim>
21200 <dim>32</dim>
21201 <dim>1</dim>
21202 <dim>1</dim>
21203 </port>
21204 </input>
21205 <output>
21206 <port id="2" names="bottleneck3_10/dim_red/conv" precision="FP16">
21207 <dim>1</dim>
21208 <dim>32</dim>
21209 <dim>40</dim>
21210 <dim>68</dim>
21211 </port>
21212 </output>
21213 </layer>
21214 <layer id="1420" name="bottleneck3_10/dim_red/fn/weights30976405021207" type="Const" version="opset1">
21215 <data element_type="f32" offset="1576" shape="1" size="4"/>
21216 <output>
21217 <port id="0" precision="FP32">
21218 <dim>1</dim>
21219 </port>
21220 </output>
21221 </layer>
21222 <layer id="1421" name="bottleneck3_10/dim_red/fn" type="PReLU" version="opset1">
21223 <input>
21224 <port id="0">
21225 <dim>1</dim>
21226 <dim>32</dim>
21227 <dim>40</dim>
21228 <dim>68</dim>
21229 </port>
21230 <port id="1">
21231 <dim>1</dim>
21232 </port>
21233 </input>
21234 <output>
21235 <port id="2" names="bottleneck3_10/dim_red/conv" precision="FP16">
21236 <dim>1</dim>
21237 <dim>32</dim>
21238 <dim>40</dim>
21239 <dim>68</dim>
21240 </port>
21241 </output>
21242 </layer>
21243 <layer id="1422" name="bottleneck3_10/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
21244 <data auto_broadcast="numpy" levels="256"/>
21245 <input>
21246 <port id="0">
21247 <dim>1</dim>
21248 <dim>32</dim>
21249 <dim>40</dim>
21250 <dim>68</dim>
21251 </port>
21252 <port id="1">
21253 <dim>1</dim>
21254 <dim>32</dim>
21255 <dim>1</dim>
21256 <dim>1</dim>
21257 </port>
21258 <port id="2">
21259 <dim>1</dim>
21260 <dim>32</dim>
21261 <dim>1</dim>
21262 <dim>1</dim>
21263 </port>
21264 <port id="3">
21265 <dim>1</dim>
21266 <dim>32</dim>
21267 <dim>1</dim>
21268 <dim>1</dim>
21269 </port>
21270 <port id="4">
21271 <dim>1</dim>
21272 <dim>32</dim>
21273 <dim>1</dim>
21274 <dim>1</dim>
21275 </port>
21276 </input>
21277 <output>
21278 <port id="5" precision="FP16">
21279 <dim>1</dim>
21280 <dim>32</dim>
21281 <dim>40</dim>
21282 <dim>68</dim>
21283 </port>
21284 </output>
21285 </layer>
21286 <layer id="1423" name="16875/value1687722320" type="Const" version="opset1">
21287 <data element_type="i64" offset="43778" shape="5" size="40"/>
21288 <output>
21289 <port id="0" precision="I64">
21290 <dim>5</dim>
21291 </port>
21292 </output>
21293 </layer>
21294 <layer id="1424" name="bottleneck3_10/inner/dw1/bn/mean/Fused_Mul__copy120910296/quantized1369620472" type="Const" version="opset1">
21295 <data element_type="i8" offset="141496" shape="32,1,3,3" size="288"/>
21296 <output>
21297 <port id="0" precision="I8">
21298 <dim>32</dim>
21299 <dim>1</dim>
21300 <dim>3</dim>
21301 <dim>3</dim>
21302 </port>
21303 </output>
21304 </layer>
21305 <layer id="1425" name="bottleneck3_10/inner/dw1/bn/mean/Fused_Mul__copy120910296/quantized/to_f16" type="Convert" version="opset1">
21306 <data destination_type="f16"/>
21307 <input>
21308 <port id="0">
21309 <dim>32</dim>
21310 <dim>1</dim>
21311 <dim>3</dim>
21312 <dim>3</dim>
21313 </port>
21314 </input>
21315 <output>
21316 <port id="1" precision="FP16">
21317 <dim>32</dim>
21318 <dim>1</dim>
21319 <dim>3</dim>
21320 <dim>3</dim>
21321 </port>
21322 </output>
21323 </layer>
21324 <layer id="1426" name="bottleneck3_10/inner/dw1/conv/fq_weights_1/zero_point1370919593" type="Const" version="opset1">
21325 <data element_type="f16" offset="141784" shape="32,1,1,1" size="64"/>
21326 <output>
21327 <port id="0" precision="FP16">
21328 <dim>32</dim>
21329 <dim>1</dim>
21330 <dim>1</dim>
21331 <dim>1</dim>
21332 </port>
21333 </output>
21334 </layer>
21335 <layer id="1427" name="bottleneck3_10/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
21336 <data auto_broadcast="numpy"/>
21337 <input>
21338 <port id="0">
21339 <dim>32</dim>
21340 <dim>1</dim>
21341 <dim>3</dim>
21342 <dim>3</dim>
21343 </port>
21344 <port id="1">
21345 <dim>32</dim>
21346 <dim>1</dim>
21347 <dim>1</dim>
21348 <dim>1</dim>
21349 </port>
21350 </input>
21351 <output>
21352 <port id="2" precision="FP16">
21353 <dim>32</dim>
21354 <dim>1</dim>
21355 <dim>3</dim>
21356 <dim>3</dim>
21357 </port>
21358 </output>
21359 </layer>
21360 <layer id="1428" name="bottleneck3_10/inner/dw1/conv/fq_weights_1/scale1370420757" type="Const" version="opset1">
21361 <data element_type="f16" offset="141848" shape="32,1,1,1" size="64"/>
21362 <output>
21363 <port id="0" precision="FP16">
21364 <dim>32</dim>
21365 <dim>1</dim>
21366 <dim>1</dim>
21367 <dim>1</dim>
21368 </port>
21369 </output>
21370 </layer>
21371 <layer id="1429" name="bottleneck3_10/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
21372 <data auto_broadcast="numpy"/>
21373 <input>
21374 <port id="0">
21375 <dim>32</dim>
21376 <dim>1</dim>
21377 <dim>3</dim>
21378 <dim>3</dim>
21379 </port>
21380 <port id="1">
21381 <dim>32</dim>
21382 <dim>1</dim>
21383 <dim>1</dim>
21384 <dim>1</dim>
21385 </port>
21386 </input>
21387 <output>
21388 <port id="2" precision="FP16">
21389 <dim>32</dim>
21390 <dim>1</dim>
21391 <dim>3</dim>
21392 <dim>3</dim>
21393 </port>
21394 </output>
21395 </layer>
21396 <layer id="1430" name="16875" type="Reshape" version="opset1">
21397 <data special_zero="true"/>
21398 <input>
21399 <port id="0">
21400 <dim>32</dim>
21401 <dim>1</dim>
21402 <dim>3</dim>
21403 <dim>3</dim>
21404 </port>
21405 <port id="1">
21406 <dim>5</dim>
21407 </port>
21408 </input>
21409 <output>
21410 <port id="2" precision="FP16">
21411 <dim>32</dim>
21412 <dim>1</dim>
21413 <dim>1</dim>
21414 <dim>3</dim>
21415 <dim>3</dim>
21416 </port>
21417 </output>
21418 </layer>
21419 <layer id="1431" name="bottleneck3_10/inner/dw1/conv" type="GroupConvolution" version="opset1">
21420 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
21421 <input>
21422 <port id="0">
21423 <dim>1</dim>
21424 <dim>32</dim>
21425 <dim>40</dim>
21426 <dim>68</dim>
21427 </port>
21428 <port id="1">
21429 <dim>32</dim>
21430 <dim>1</dim>
21431 <dim>1</dim>
21432 <dim>3</dim>
21433 <dim>3</dim>
21434 </port>
21435 </input>
21436 <output>
21437 <port id="2" precision="FP16">
21438 <dim>1</dim>
21439 <dim>32</dim>
21440 <dim>40</dim>
21441 <dim>68</dim>
21442 </port>
21443 </output>
21444 </layer>
21445 <layer id="1432" name="data_add_2423324238121120427" type="Const" version="opset1">
21446 <data element_type="f16" offset="141912" shape="1,32,1,1" size="64"/>
21447 <output>
21448 <port id="0" precision="FP16">
21449 <dim>1</dim>
21450 <dim>32</dim>
21451 <dim>1</dim>
21452 <dim>1</dim>
21453 </port>
21454 </output>
21455 </layer>
21456 <layer id="1433" name="bottleneck3_10/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
21457 <data auto_broadcast="numpy"/>
21458 <input>
21459 <port id="0">
21460 <dim>1</dim>
21461 <dim>32</dim>
21462 <dim>40</dim>
21463 <dim>68</dim>
21464 </port>
21465 <port id="1">
21466 <dim>1</dim>
21467 <dim>32</dim>
21468 <dim>1</dim>
21469 <dim>1</dim>
21470 </port>
21471 </input>
21472 <output>
21473 <port id="2" names="bottleneck3_10/inner/dw1/conv" precision="FP16">
21474 <dim>1</dim>
21475 <dim>32</dim>
21476 <dim>40</dim>
21477 <dim>68</dim>
21478 </port>
21479 </output>
21480 </layer>
21481 <layer id="1434" name="bottleneck3_10/inner/dw1/fn/weights31016401511213" type="Const" version="opset1">
21482 <data element_type="f32" offset="1576" shape="1" size="4"/>
21483 <output>
21484 <port id="0" precision="FP32">
21485 <dim>1</dim>
21486 </port>
21487 </output>
21488 </layer>
21489 <layer id="1435" name="bottleneck3_10/inner/dw1/fn" type="PReLU" version="opset1">
21490 <input>
21491 <port id="0">
21492 <dim>1</dim>
21493 <dim>32</dim>
21494 <dim>40</dim>
21495 <dim>68</dim>
21496 </port>
21497 <port id="1">
21498 <dim>1</dim>
21499 </port>
21500 </input>
21501 <output>
21502 <port id="2" names="bottleneck3_10/inner/dw1/conv" precision="FP16">
21503 <dim>1</dim>
21504 <dim>32</dim>
21505 <dim>40</dim>
21506 <dim>68</dim>
21507 </port>
21508 </output>
21509 </layer>
21510 <layer id="1436" name="bottleneck3_10/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
21511 <data auto_broadcast="numpy" levels="256"/>
21512 <input>
21513 <port id="0">
21514 <dim>1</dim>
21515 <dim>32</dim>
21516 <dim>40</dim>
21517 <dim>68</dim>
21518 </port>
21519 <port id="1"/>
21520 <port id="2"/>
21521 <port id="3"/>
21522 <port id="4"/>
21523 </input>
21524 <output>
21525 <port id="5" precision="FP16">
21526 <dim>1</dim>
21527 <dim>32</dim>
21528 <dim>40</dim>
21529 <dim>68</dim>
21530 </port>
21531 </output>
21532 </layer>
21533 <layer id="1437" name="bottleneck3_10/dim_inc/bn/mean/Fused_Mul__copy121510299/quantized1151222302" type="Const" version="opset1">
21534 <data element_type="i8" offset="141976" shape="128,32,1,1" size="4096"/>
21535 <output>
21536 <port id="0" precision="I8">
21537 <dim>128</dim>
21538 <dim>32</dim>
21539 <dim>1</dim>
21540 <dim>1</dim>
21541 </port>
21542 </output>
21543 </layer>
21544 <layer id="1438" name="bottleneck3_10/dim_inc/bn/mean/Fused_Mul__copy121510299/quantized/to_f16" type="Convert" version="opset1">
21545 <data destination_type="f16"/>
21546 <input>
21547 <port id="0">
21548 <dim>128</dim>
21549 <dim>32</dim>
21550 <dim>1</dim>
21551 <dim>1</dim>
21552 </port>
21553 </input>
21554 <output>
21555 <port id="1" precision="FP16">
21556 <dim>128</dim>
21557 <dim>32</dim>
21558 <dim>1</dim>
21559 <dim>1</dim>
21560 </port>
21561 </output>
21562 </layer>
21563 <layer id="1439" name="bottleneck3_10/dim_inc/conv/fq_weights_1/zero_point1152520049" type="Const" version="opset1">
21564 <data element_type="f16" offset="146072" shape="128,1,1,1" size="256"/>
21565 <output>
21566 <port id="0" precision="FP16">
21567 <dim>128</dim>
21568 <dim>1</dim>
21569 <dim>1</dim>
21570 <dim>1</dim>
21571 </port>
21572 </output>
21573 </layer>
21574 <layer id="1440" name="bottleneck3_10/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
21575 <data auto_broadcast="numpy"/>
21576 <input>
21577 <port id="0">
21578 <dim>128</dim>
21579 <dim>32</dim>
21580 <dim>1</dim>
21581 <dim>1</dim>
21582 </port>
21583 <port id="1">
21584 <dim>128</dim>
21585 <dim>1</dim>
21586 <dim>1</dim>
21587 <dim>1</dim>
21588 </port>
21589 </input>
21590 <output>
21591 <port id="2" precision="FP16">
21592 <dim>128</dim>
21593 <dim>32</dim>
21594 <dim>1</dim>
21595 <dim>1</dim>
21596 </port>
21597 </output>
21598 </layer>
21599 <layer id="1441" name="bottleneck3_10/dim_inc/conv/fq_weights_1/scale1152021642" type="Const" version="opset1">
21600 <data element_type="f16" offset="146328" shape="128,1,1,1" size="256"/>
21601 <output>
21602 <port id="0" precision="FP16">
21603 <dim>128</dim>
21604 <dim>1</dim>
21605 <dim>1</dim>
21606 <dim>1</dim>
21607 </port>
21608 </output>
21609 </layer>
21610 <layer id="1442" name="bottleneck3_10/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
21611 <data auto_broadcast="numpy"/>
21612 <input>
21613 <port id="0">
21614 <dim>128</dim>
21615 <dim>32</dim>
21616 <dim>1</dim>
21617 <dim>1</dim>
21618 </port>
21619 <port id="1">
21620 <dim>128</dim>
21621 <dim>1</dim>
21622 <dim>1</dim>
21623 <dim>1</dim>
21624 </port>
21625 </input>
21626 <output>
21627 <port id="2" precision="FP16">
21628 <dim>128</dim>
21629 <dim>32</dim>
21630 <dim>1</dim>
21631 <dim>1</dim>
21632 </port>
21633 </output>
21634 </layer>
21635 <layer id="1443" name="bottleneck3_10/dim_inc/conv" type="Convolution" version="opset1">
21636 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
21637 <input>
21638 <port id="0">
21639 <dim>1</dim>
21640 <dim>32</dim>
21641 <dim>40</dim>
21642 <dim>68</dim>
21643 </port>
21644 <port id="1">
21645 <dim>128</dim>
21646 <dim>32</dim>
21647 <dim>1</dim>
21648 <dim>1</dim>
21649 </port>
21650 </input>
21651 <output>
21652 <port id="2" precision="FP16">
21653 <dim>1</dim>
21654 <dim>128</dim>
21655 <dim>40</dim>
21656 <dim>68</dim>
21657 </port>
21658 </output>
21659 </layer>
21660 <layer id="1444" name="data_add_2424124246121722014" type="Const" version="opset1">
21661 <data element_type="f16" offset="146584" shape="1,128,1,1" size="256"/>
21662 <output>
21663 <port id="0" precision="FP16">
21664 <dim>1</dim>
21665 <dim>128</dim>
21666 <dim>1</dim>
21667 <dim>1</dim>
21668 </port>
21669 </output>
21670 </layer>
21671 <layer id="1445" name="bottleneck3_10/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
21672 <data auto_broadcast="numpy"/>
21673 <input>
21674 <port id="0">
21675 <dim>1</dim>
21676 <dim>128</dim>
21677 <dim>40</dim>
21678 <dim>68</dim>
21679 </port>
21680 <port id="1">
21681 <dim>1</dim>
21682 <dim>128</dim>
21683 <dim>1</dim>
21684 <dim>1</dim>
21685 </port>
21686 </input>
21687 <output>
21688 <port id="2" names="bottleneck3_10/dim_inc/conv" precision="FP16">
21689 <dim>1</dim>
21690 <dim>128</dim>
21691 <dim>40</dim>
21692 <dim>68</dim>
21693 </port>
21694 </output>
21695 </layer>
21696 <layer id="1446" name="bottleneck3_10/add/fq_input_1" type="FakeQuantize" version="opset1">
21697 <data auto_broadcast="numpy" levels="256"/>
21698 <input>
21699 <port id="0">
21700 <dim>1</dim>
21701 <dim>128</dim>
21702 <dim>40</dim>
21703 <dim>68</dim>
21704 </port>
21705 <port id="1"/>
21706 <port id="2"/>
21707 <port id="3"/>
21708 <port id="4"/>
21709 </input>
21710 <output>
21711 <port id="5" precision="FP16">
21712 <dim>1</dim>
21713 <dim>128</dim>
21714 <dim>40</dim>
21715 <dim>68</dim>
21716 </port>
21717 </output>
21718 </layer>
21719 <layer id="1447" name="bottleneck3_10/add" type="Add" version="opset1">
21720 <data auto_broadcast="numpy"/>
21721 <input>
21722 <port id="0">
21723 <dim>1</dim>
21724 <dim>128</dim>
21725 <dim>40</dim>
21726 <dim>68</dim>
21727 </port>
21728 <port id="1">
21729 <dim>1</dim>
21730 <dim>128</dim>
21731 <dim>40</dim>
21732 <dim>68</dim>
21733 </port>
21734 </input>
21735 <output>
21736 <port id="2" names="bottleneck3_10/add" precision="FP16">
21737 <dim>1</dim>
21738 <dim>128</dim>
21739 <dim>40</dim>
21740 <dim>68</dim>
21741 </port>
21742 </output>
21743 </layer>
21744 <layer id="1448" name="bottleneck3_10/fn/weights30940401451220" type="Const" version="opset1">
21745 <data element_type="f32" offset="1576" shape="1" size="4"/>
21746 <output>
21747 <port id="0" precision="FP32">
21748 <dim>1</dim>
21749 </port>
21750 </output>
21751 </layer>
21752 <layer id="1449" name="bottleneck3_10/fn" type="PReLU" version="opset1">
21753 <input>
21754 <port id="0">
21755 <dim>1</dim>
21756 <dim>128</dim>
21757 <dim>40</dim>
21758 <dim>68</dim>
21759 </port>
21760 <port id="1">
21761 <dim>1</dim>
21762 </port>
21763 </input>
21764 <output>
21765 <port id="2" names="bottleneck3_10/add" precision="FP16">
21766 <dim>1</dim>
21767 <dim>128</dim>
21768 <dim>40</dim>
21769 <dim>68</dim>
21770 </port>
21771 </output>
21772 </layer>
21773 <layer id="1450" name="bottleneck4_0/dim_red/conv/fq_input_0" type="FakeQuantize" version="opset1">
21774 <data auto_broadcast="numpy" levels="256"/>
21775 <input>
21776 <port id="0">
21777 <dim>1</dim>
21778 <dim>128</dim>
21779 <dim>40</dim>
21780 <dim>68</dim>
21781 </port>
21782 <port id="1"/>
21783 <port id="2"/>
21784 <port id="3"/>
21785 <port id="4"/>
21786 </input>
21787 <output>
21788 <port id="5" precision="FP16">
21789 <dim>1</dim>
21790 <dim>128</dim>
21791 <dim>40</dim>
21792 <dim>68</dim>
21793 </port>
21794 </output>
21795 </layer>
21796 <layer id="1451" name="bottleneck4_0/skip/pooling" type="MaxPool" version="opset1">
21797 <data auto_pad="explicit" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="ceil" strides="2,2"/>
21798 <input>
21799 <port id="0">
21800 <dim>1</dim>
21801 <dim>128</dim>
21802 <dim>40</dim>
21803 <dim>68</dim>
21804 </port>
21805 </input>
21806 <output>
21807 <port id="1" names="bottleneck4_0/skip/pooling" precision="FP16">
21808 <dim>1</dim>
21809 <dim>128</dim>
21810 <dim>20</dim>
21811 <dim>34</dim>
21812 </port>
21813 </output>
21814 </layer>
21815 <layer id="1452" name="bottleneck4_0/skip/bn/mean/Fused_Mul__copy122310302/quantized1266421063" type="Const" version="opset1">
21816 <data element_type="i8" offset="146840" shape="256,128,1,1" size="32768"/>
21817 <output>
21818 <port id="0" precision="I8">
21819 <dim>256</dim>
21820 <dim>128</dim>
21821 <dim>1</dim>
21822 <dim>1</dim>
21823 </port>
21824 </output>
21825 </layer>
21826 <layer id="1453" name="bottleneck4_0/skip/bn/mean/Fused_Mul__copy122310302/quantized/to_f16" type="Convert" version="opset1">
21827 <data destination_type="f16"/>
21828 <input>
21829 <port id="0">
21830 <dim>256</dim>
21831 <dim>128</dim>
21832 <dim>1</dim>
21833 <dim>1</dim>
21834 </port>
21835 </input>
21836 <output>
21837 <port id="1" precision="FP16">
21838 <dim>256</dim>
21839 <dim>128</dim>
21840 <dim>1</dim>
21841 <dim>1</dim>
21842 </port>
21843 </output>
21844 </layer>
21845 <layer id="1454" name="bottleneck4_0/skip/conv/fq_weights_1/zero_point1267719407" type="Const" version="opset1">
21846 <data element_type="f16" offset="179608" shape="256,1,1,1" size="512"/>
21847 <output>
21848 <port id="0" precision="FP16">
21849 <dim>256</dim>
21850 <dim>1</dim>
21851 <dim>1</dim>
21852 <dim>1</dim>
21853 </port>
21854 </output>
21855 </layer>
21856 <layer id="1455" name="bottleneck4_0/skip/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
21857 <data auto_broadcast="numpy"/>
21858 <input>
21859 <port id="0">
21860 <dim>256</dim>
21861 <dim>128</dim>
21862 <dim>1</dim>
21863 <dim>1</dim>
21864 </port>
21865 <port id="1">
21866 <dim>256</dim>
21867 <dim>1</dim>
21868 <dim>1</dim>
21869 <dim>1</dim>
21870 </port>
21871 </input>
21872 <output>
21873 <port id="2" precision="FP16">
21874 <dim>256</dim>
21875 <dim>128</dim>
21876 <dim>1</dim>
21877 <dim>1</dim>
21878 </port>
21879 </output>
21880 </layer>
21881 <layer id="1456" name="bottleneck4_0/skip/conv/fq_weights_1/scale1267219896" type="Const" version="opset1">
21882 <data element_type="f16" offset="180120" shape="256,1,1,1" size="512"/>
21883 <output>
21884 <port id="0" precision="FP16">
21885 <dim>256</dim>
21886 <dim>1</dim>
21887 <dim>1</dim>
21888 <dim>1</dim>
21889 </port>
21890 </output>
21891 </layer>
21892 <layer id="1457" name="bottleneck4_0/skip/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
21893 <data auto_broadcast="numpy"/>
21894 <input>
21895 <port id="0">
21896 <dim>256</dim>
21897 <dim>128</dim>
21898 <dim>1</dim>
21899 <dim>1</dim>
21900 </port>
21901 <port id="1">
21902 <dim>256</dim>
21903 <dim>1</dim>
21904 <dim>1</dim>
21905 <dim>1</dim>
21906 </port>
21907 </input>
21908 <output>
21909 <port id="2" precision="FP16">
21910 <dim>256</dim>
21911 <dim>128</dim>
21912 <dim>1</dim>
21913 <dim>1</dim>
21914 </port>
21915 </output>
21916 </layer>
21917 <layer id="1458" name="bottleneck4_0/skip/conv" type="Convolution" version="opset1">
21918 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
21919 <input>
21920 <port id="0">
21921 <dim>1</dim>
21922 <dim>128</dim>
21923 <dim>20</dim>
21924 <dim>34</dim>
21925 </port>
21926 <port id="1">
21927 <dim>256</dim>
21928 <dim>128</dim>
21929 <dim>1</dim>
21930 <dim>1</dim>
21931 </port>
21932 </input>
21933 <output>
21934 <port id="2" precision="FP16">
21935 <dim>1</dim>
21936 <dim>256</dim>
21937 <dim>20</dim>
21938 <dim>34</dim>
21939 </port>
21940 </output>
21941 </layer>
21942 <layer id="1459" name="data_add_2424924254122520880" type="Const" version="opset1">
21943 <data element_type="f16" offset="180632" shape="1,256,1,1" size="512"/>
21944 <output>
21945 <port id="0" precision="FP16">
21946 <dim>1</dim>
21947 <dim>256</dim>
21948 <dim>1</dim>
21949 <dim>1</dim>
21950 </port>
21951 </output>
21952 </layer>
21953 <layer id="1460" name="bottleneck4_0/skip/bn/variance/Fused_Add_" type="Add" version="opset1">
21954 <data auto_broadcast="numpy"/>
21955 <input>
21956 <port id="0">
21957 <dim>1</dim>
21958 <dim>256</dim>
21959 <dim>20</dim>
21960 <dim>34</dim>
21961 </port>
21962 <port id="1">
21963 <dim>1</dim>
21964 <dim>256</dim>
21965 <dim>1</dim>
21966 <dim>1</dim>
21967 </port>
21968 </input>
21969 <output>
21970 <port id="2" names="bottleneck4_0/skip/conv" precision="FP16">
21971 <dim>1</dim>
21972 <dim>256</dim>
21973 <dim>20</dim>
21974 <dim>34</dim>
21975 </port>
21976 </output>
21977 </layer>
21978 <layer id="1461" name="bottleneck4_0/add/fq_input_0" type="FakeQuantize" version="opset1">
21979 <data auto_broadcast="numpy" levels="256"/>
21980 <input>
21981 <port id="0">
21982 <dim>1</dim>
21983 <dim>256</dim>
21984 <dim>20</dim>
21985 <dim>34</dim>
21986 </port>
21987 <port id="1"/>
21988 <port id="2"/>
21989 <port id="3"/>
21990 <port id="4"/>
21991 </input>
21992 <output>
21993 <port id="5" precision="FP16">
21994 <dim>1</dim>
21995 <dim>256</dim>
21996 <dim>20</dim>
21997 <dim>34</dim>
21998 </port>
21999 </output>
22000 </layer>
22001 <layer id="1462" name="3114311822758" type="Const" version="opset1">
22002 <data element_type="f16" offset="181144" shape="" size="2"/>
22003 <output>
22004 <port id="0" precision="FP16"/>
22005 </output>
22006 </layer>
22007 <layer id="1463" name="3115311920622" type="Const" version="opset1">
22008 <data element_type="f16" offset="181146" shape="" size="2"/>
22009 <output>
22010 <port id="0" precision="FP16"/>
22011 </output>
22012 </layer>
22013 <layer id="1464" name="3116312020928" type="Const" version="opset1">
22014 <data element_type="f16" offset="181144" shape="" size="2"/>
22015 <output>
22016 <port id="0" precision="FP16"/>
22017 </output>
22018 </layer>
22019 <layer id="1465" name="3117312121978" type="Const" version="opset1">
22020 <data element_type="f16" offset="181146" shape="" size="2"/>
22021 <output>
22022 <port id="0" precision="FP16"/>
22023 </output>
22024 </layer>
22025 <layer id="1466" name="3004300822833" type="Const" version="opset1">
22026 <data element_type="f16" offset="181148" shape="" size="2"/>
22027 <output>
22028 <port id="0" precision="FP16"/>
22029 </output>
22030 </layer>
22031 <layer id="1467" name="3005300922149" type="Const" version="opset1">
22032 <data element_type="f16" offset="181150" shape="" size="2"/>
22033 <output>
22034 <port id="0" precision="FP16"/>
22035 </output>
22036 </layer>
22037 <layer id="1468" name="3006301022056" type="Const" version="opset1">
22038 <data element_type="f16" offset="181148" shape="" size="2"/>
22039 <output>
22040 <port id="0" precision="FP16"/>
22041 </output>
22042 </layer>
22043 <layer id="1469" name="3007301120124" type="Const" version="opset1">
22044 <data element_type="f16" offset="181150" shape="" size="2"/>
22045 <output>
22046 <port id="0" precision="FP16"/>
22047 </output>
22048 </layer>
22049 <layer id="1470" name="4244424822983" type="Const" version="opset1">
22050 <data element_type="f16" offset="181152" shape="1,64,1,1" size="128"/>
22051 <output>
22052 <port id="0" precision="FP16">
22053 <dim>1</dim>
22054 <dim>64</dim>
22055 <dim>1</dim>
22056 <dim>1</dim>
22057 </port>
22058 </output>
22059 </layer>
22060 <layer id="1471" name="4245424921591" type="Const" version="opset1">
22061 <data element_type="f16" offset="181280" shape="1,64,1,1" size="128"/>
22062 <output>
22063 <port id="0" precision="FP16">
22064 <dim>1</dim>
22065 <dim>64</dim>
22066 <dim>1</dim>
22067 <dim>1</dim>
22068 </port>
22069 </output>
22070 </layer>
22071 <layer id="1472" name="4246425019413" type="Const" version="opset1">
22072 <data element_type="f16" offset="181152" shape="1,64,1,1" size="128"/>
22073 <output>
22074 <port id="0" precision="FP16">
22075 <dim>1</dim>
22076 <dim>64</dim>
22077 <dim>1</dim>
22078 <dim>1</dim>
22079 </port>
22080 </output>
22081 </layer>
22082 <layer id="1473" name="4247425121324" type="Const" version="opset1">
22083 <data element_type="f16" offset="181280" shape="1,64,1,1" size="128"/>
22084 <output>
22085 <port id="0" precision="FP16">
22086 <dim>1</dim>
22087 <dim>64</dim>
22088 <dim>1</dim>
22089 <dim>1</dim>
22090 </port>
22091 </output>
22092 </layer>
22093 <layer id="1474" name="bottleneck4_0/dim_red/bn/mean/Fused_Mul__copy122710304/quantized1182421375" type="Const" version="opset1">
22094 <data element_type="i8" offset="181408" shape="64,128,1,1" size="8192"/>
22095 <output>
22096 <port id="0" precision="I8">
22097 <dim>64</dim>
22098 <dim>128</dim>
22099 <dim>1</dim>
22100 <dim>1</dim>
22101 </port>
22102 </output>
22103 </layer>
22104 <layer id="1475" name="bottleneck4_0/dim_red/bn/mean/Fused_Mul__copy122710304/quantized/to_f16" type="Convert" version="opset1">
22105 <data destination_type="f16"/>
22106 <input>
22107 <port id="0">
22108 <dim>64</dim>
22109 <dim>128</dim>
22110 <dim>1</dim>
22111 <dim>1</dim>
22112 </port>
22113 </input>
22114 <output>
22115 <port id="1" precision="FP16">
22116 <dim>64</dim>
22117 <dim>128</dim>
22118 <dim>1</dim>
22119 <dim>1</dim>
22120 </port>
22121 </output>
22122 </layer>
22123 <layer id="1476" name="bottleneck4_0/dim_red/conv/fq_weights_1/zero_point1183720238" type="Const" version="opset1">
22124 <data element_type="f16" offset="189600" shape="64,1,1,1" size="128"/>
22125 <output>
22126 <port id="0" precision="FP16">
22127 <dim>64</dim>
22128 <dim>1</dim>
22129 <dim>1</dim>
22130 <dim>1</dim>
22131 </port>
22132 </output>
22133 </layer>
22134 <layer id="1477" name="bottleneck4_0/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
22135 <data auto_broadcast="numpy"/>
22136 <input>
22137 <port id="0">
22138 <dim>64</dim>
22139 <dim>128</dim>
22140 <dim>1</dim>
22141 <dim>1</dim>
22142 </port>
22143 <port id="1">
22144 <dim>64</dim>
22145 <dim>1</dim>
22146 <dim>1</dim>
22147 <dim>1</dim>
22148 </port>
22149 </input>
22150 <output>
22151 <port id="2" precision="FP16">
22152 <dim>64</dim>
22153 <dim>128</dim>
22154 <dim>1</dim>
22155 <dim>1</dim>
22156 </port>
22157 </output>
22158 </layer>
22159 <layer id="1478" name="bottleneck4_0/dim_red/conv/fq_weights_1/scale1183221075" type="Const" version="opset1">
22160 <data element_type="f16" offset="189728" shape="64,1,1,1" size="128"/>
22161 <output>
22162 <port id="0" precision="FP16">
22163 <dim>64</dim>
22164 <dim>1</dim>
22165 <dim>1</dim>
22166 <dim>1</dim>
22167 </port>
22168 </output>
22169 </layer>
22170 <layer id="1479" name="bottleneck4_0/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
22171 <data auto_broadcast="numpy"/>
22172 <input>
22173 <port id="0">
22174 <dim>64</dim>
22175 <dim>128</dim>
22176 <dim>1</dim>
22177 <dim>1</dim>
22178 </port>
22179 <port id="1">
22180 <dim>64</dim>
22181 <dim>1</dim>
22182 <dim>1</dim>
22183 <dim>1</dim>
22184 </port>
22185 </input>
22186 <output>
22187 <port id="2" precision="FP16">
22188 <dim>64</dim>
22189 <dim>128</dim>
22190 <dim>1</dim>
22191 <dim>1</dim>
22192 </port>
22193 </output>
22194 </layer>
22195 <layer id="1480" name="bottleneck4_0/dim_red/conv" type="Convolution" version="opset1">
22196 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
22197 <input>
22198 <port id="0">
22199 <dim>1</dim>
22200 <dim>128</dim>
22201 <dim>40</dim>
22202 <dim>68</dim>
22203 </port>
22204 <port id="1">
22205 <dim>64</dim>
22206 <dim>128</dim>
22207 <dim>1</dim>
22208 <dim>1</dim>
22209 </port>
22210 </input>
22211 <output>
22212 <port id="2" precision="FP16">
22213 <dim>1</dim>
22214 <dim>64</dim>
22215 <dim>40</dim>
22216 <dim>68</dim>
22217 </port>
22218 </output>
22219 </layer>
22220 <layer id="1481" name="data_add_2425724262122922353" type="Const" version="opset1">
22221 <data element_type="f16" offset="189856" shape="1,64,1,1" size="128"/>
22222 <output>
22223 <port id="0" precision="FP16">
22224 <dim>1</dim>
22225 <dim>64</dim>
22226 <dim>1</dim>
22227 <dim>1</dim>
22228 </port>
22229 </output>
22230 </layer>
22231 <layer id="1482" name="bottleneck4_0/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
22232 <data auto_broadcast="numpy"/>
22233 <input>
22234 <port id="0">
22235 <dim>1</dim>
22236 <dim>64</dim>
22237 <dim>40</dim>
22238 <dim>68</dim>
22239 </port>
22240 <port id="1">
22241 <dim>1</dim>
22242 <dim>64</dim>
22243 <dim>1</dim>
22244 <dim>1</dim>
22245 </port>
22246 </input>
22247 <output>
22248 <port id="2" names="bottleneck4_0/dim_red/conv" precision="FP16">
22249 <dim>1</dim>
22250 <dim>64</dim>
22251 <dim>40</dim>
22252 <dim>68</dim>
22253 </port>
22254 </output>
22255 </layer>
22256 <layer id="1483" name="bottleneck4_0/dim_red/fn/weights30984402531231" type="Const" version="opset1">
22257 <data element_type="f32" offset="1576" shape="1" size="4"/>
22258 <output>
22259 <port id="0" precision="FP32">
22260 <dim>1</dim>
22261 </port>
22262 </output>
22263 </layer>
22264 <layer id="1484" name="bottleneck4_0/dim_red/fn" type="PReLU" version="opset1">
22265 <input>
22266 <port id="0">
22267 <dim>1</dim>
22268 <dim>64</dim>
22269 <dim>40</dim>
22270 <dim>68</dim>
22271 </port>
22272 <port id="1">
22273 <dim>1</dim>
22274 </port>
22275 </input>
22276 <output>
22277 <port id="2" names="bottleneck4_0/dim_red/conv" precision="FP16">
22278 <dim>1</dim>
22279 <dim>64</dim>
22280 <dim>40</dim>
22281 <dim>68</dim>
22282 </port>
22283 </output>
22284 </layer>
22285 <layer id="1485" name="bottleneck4_0/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
22286 <data auto_broadcast="numpy" levels="256"/>
22287 <input>
22288 <port id="0">
22289 <dim>1</dim>
22290 <dim>64</dim>
22291 <dim>40</dim>
22292 <dim>68</dim>
22293 </port>
22294 <port id="1">
22295 <dim>1</dim>
22296 <dim>64</dim>
22297 <dim>1</dim>
22298 <dim>1</dim>
22299 </port>
22300 <port id="2">
22301 <dim>1</dim>
22302 <dim>64</dim>
22303 <dim>1</dim>
22304 <dim>1</dim>
22305 </port>
22306 <port id="3">
22307 <dim>1</dim>
22308 <dim>64</dim>
22309 <dim>1</dim>
22310 <dim>1</dim>
22311 </port>
22312 <port id="4">
22313 <dim>1</dim>
22314 <dim>64</dim>
22315 <dim>1</dim>
22316 <dim>1</dim>
22317 </port>
22318 </input>
22319 <output>
22320 <port id="5" precision="FP16">
22321 <dim>1</dim>
22322 <dim>64</dim>
22323 <dim>40</dim>
22324 <dim>68</dim>
22325 </port>
22326 </output>
22327 </layer>
22328 <layer id="1486" name="16879/value1688121138" type="Const" version="opset1">
22329 <data element_type="i64" offset="189984" shape="5" size="40"/>
22330 <output>
22331 <port id="0" precision="I64">
22332 <dim>5</dim>
22333 </port>
22334 </output>
22335 </layer>
22336 <layer id="1487" name="bottleneck4_0/inner/dw1/bn/mean/Fused_Mul__copy123310307/quantized1393620607" type="Const" version="opset1">
22337 <data element_type="i8" offset="190024" shape="64,1,3,3" size="576"/>
22338 <output>
22339 <port id="0" precision="I8">
22340 <dim>64</dim>
22341 <dim>1</dim>
22342 <dim>3</dim>
22343 <dim>3</dim>
22344 </port>
22345 </output>
22346 </layer>
22347 <layer id="1488" name="bottleneck4_0/inner/dw1/bn/mean/Fused_Mul__copy123310307/quantized/to_f16" type="Convert" version="opset1">
22348 <data destination_type="f16"/>
22349 <input>
22350 <port id="0">
22351 <dim>64</dim>
22352 <dim>1</dim>
22353 <dim>3</dim>
22354 <dim>3</dim>
22355 </port>
22356 </input>
22357 <output>
22358 <port id="1" precision="FP16">
22359 <dim>64</dim>
22360 <dim>1</dim>
22361 <dim>3</dim>
22362 <dim>3</dim>
22363 </port>
22364 </output>
22365 </layer>
22366 <layer id="1489" name="bottleneck4_0/inner/dw1/conv/fq_weights_1/zero_point1394920988" type="Const" version="opset1">
22367 <data element_type="f16" offset="190600" shape="64,1,1,1" size="128"/>
22368 <output>
22369 <port id="0" precision="FP16">
22370 <dim>64</dim>
22371 <dim>1</dim>
22372 <dim>1</dim>
22373 <dim>1</dim>
22374 </port>
22375 </output>
22376 </layer>
22377 <layer id="1490" name="bottleneck4_0/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
22378 <data auto_broadcast="numpy"/>
22379 <input>
22380 <port id="0">
22381 <dim>64</dim>
22382 <dim>1</dim>
22383 <dim>3</dim>
22384 <dim>3</dim>
22385 </port>
22386 <port id="1">
22387 <dim>64</dim>
22388 <dim>1</dim>
22389 <dim>1</dim>
22390 <dim>1</dim>
22391 </port>
22392 </input>
22393 <output>
22394 <port id="2" precision="FP16">
22395 <dim>64</dim>
22396 <dim>1</dim>
22397 <dim>3</dim>
22398 <dim>3</dim>
22399 </port>
22400 </output>
22401 </layer>
22402 <layer id="1491" name="bottleneck4_0/inner/dw1/conv/fq_weights_1/scale1394419923" type="Const" version="opset1">
22403 <data element_type="f16" offset="190728" shape="64,1,1,1" size="128"/>
22404 <output>
22405 <port id="0" precision="FP16">
22406 <dim>64</dim>
22407 <dim>1</dim>
22408 <dim>1</dim>
22409 <dim>1</dim>
22410 </port>
22411 </output>
22412 </layer>
22413 <layer id="1492" name="bottleneck4_0/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
22414 <data auto_broadcast="numpy"/>
22415 <input>
22416 <port id="0">
22417 <dim>64</dim>
22418 <dim>1</dim>
22419 <dim>3</dim>
22420 <dim>3</dim>
22421 </port>
22422 <port id="1">
22423 <dim>64</dim>
22424 <dim>1</dim>
22425 <dim>1</dim>
22426 <dim>1</dim>
22427 </port>
22428 </input>
22429 <output>
22430 <port id="2" precision="FP16">
22431 <dim>64</dim>
22432 <dim>1</dim>
22433 <dim>3</dim>
22434 <dim>3</dim>
22435 </port>
22436 </output>
22437 </layer>
22438 <layer id="1493" name="16879" type="Reshape" version="opset1">
22439 <data special_zero="true"/>
22440 <input>
22441 <port id="0">
22442 <dim>64</dim>
22443 <dim>1</dim>
22444 <dim>3</dim>
22445 <dim>3</dim>
22446 </port>
22447 <port id="1">
22448 <dim>5</dim>
22449 </port>
22450 </input>
22451 <output>
22452 <port id="2" precision="FP16">
22453 <dim>64</dim>
22454 <dim>1</dim>
22455 <dim>1</dim>
22456 <dim>3</dim>
22457 <dim>3</dim>
22458 </port>
22459 </output>
22460 </layer>
22461 <layer id="1494" name="bottleneck4_0/inner/dw1/conv" type="GroupConvolution" version="opset1">
22462 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
22463 <input>
22464 <port id="0">
22465 <dim>1</dim>
22466 <dim>64</dim>
22467 <dim>40</dim>
22468 <dim>68</dim>
22469 </port>
22470 <port id="1">
22471 <dim>64</dim>
22472 <dim>1</dim>
22473 <dim>1</dim>
22474 <dim>3</dim>
22475 <dim>3</dim>
22476 </port>
22477 </input>
22478 <output>
22479 <port id="2" precision="FP16">
22480 <dim>1</dim>
22481 <dim>64</dim>
22482 <dim>20</dim>
22483 <dim>34</dim>
22484 </port>
22485 </output>
22486 </layer>
22487 <layer id="1495" name="data_add_2426524270123522653" type="Const" version="opset1">
22488 <data element_type="f16" offset="190856" shape="1,64,1,1" size="128"/>
22489 <output>
22490 <port id="0" precision="FP16">
22491 <dim>1</dim>
22492 <dim>64</dim>
22493 <dim>1</dim>
22494 <dim>1</dim>
22495 </port>
22496 </output>
22497 </layer>
22498 <layer id="1496" name="bottleneck4_0/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
22499 <data auto_broadcast="numpy"/>
22500 <input>
22501 <port id="0">
22502 <dim>1</dim>
22503 <dim>64</dim>
22504 <dim>20</dim>
22505 <dim>34</dim>
22506 </port>
22507 <port id="1">
22508 <dim>1</dim>
22509 <dim>64</dim>
22510 <dim>1</dim>
22511 <dim>1</dim>
22512 </port>
22513 </input>
22514 <output>
22515 <port id="2" names="bottleneck4_0/inner/dw1/conv" precision="FP16">
22516 <dim>1</dim>
22517 <dim>64</dim>
22518 <dim>20</dim>
22519 <dim>34</dim>
22520 </port>
22521 </output>
22522 </layer>
22523 <layer id="1497" name="bottleneck4_0/inner/dw1/fn/weights31072399351237" type="Const" version="opset1">
22524 <data element_type="f32" offset="1576" shape="1" size="4"/>
22525 <output>
22526 <port id="0" precision="FP32">
22527 <dim>1</dim>
22528 </port>
22529 </output>
22530 </layer>
22531 <layer id="1498" name="bottleneck4_0/inner/dw1/fn" type="PReLU" version="opset1">
22532 <input>
22533 <port id="0">
22534 <dim>1</dim>
22535 <dim>64</dim>
22536 <dim>20</dim>
22537 <dim>34</dim>
22538 </port>
22539 <port id="1">
22540 <dim>1</dim>
22541 </port>
22542 </input>
22543 <output>
22544 <port id="2" names="bottleneck4_0/inner/dw1/conv" precision="FP16">
22545 <dim>1</dim>
22546 <dim>64</dim>
22547 <dim>20</dim>
22548 <dim>34</dim>
22549 </port>
22550 </output>
22551 </layer>
22552 <layer id="1499" name="bottleneck4_0/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
22553 <data auto_broadcast="numpy" levels="256"/>
22554 <input>
22555 <port id="0">
22556 <dim>1</dim>
22557 <dim>64</dim>
22558 <dim>20</dim>
22559 <dim>34</dim>
22560 </port>
22561 <port id="1"/>
22562 <port id="2"/>
22563 <port id="3"/>
22564 <port id="4"/>
22565 </input>
22566 <output>
22567 <port id="5" precision="FP16">
22568 <dim>1</dim>
22569 <dim>64</dim>
22570 <dim>20</dim>
22571 <dim>34</dim>
22572 </port>
22573 </output>
22574 </layer>
22575 <layer id="1500" name="bottleneck4_0/dim_inc/bn/mean/Fused_Mul__copy123910310/quantized1326420133" type="Const" version="opset1">
22576 <data element_type="i8" offset="190984" shape="256,64,1,1" size="16384"/>
22577 <output>
22578 <port id="0" precision="I8">
22579 <dim>256</dim>
22580 <dim>64</dim>
22581 <dim>1</dim>
22582 <dim>1</dim>
22583 </port>
22584 </output>
22585 </layer>
22586 <layer id="1501" name="bottleneck4_0/dim_inc/bn/mean/Fused_Mul__copy123910310/quantized/to_f16" type="Convert" version="opset1">
22587 <data destination_type="f16"/>
22588 <input>
22589 <port id="0">
22590 <dim>256</dim>
22591 <dim>64</dim>
22592 <dim>1</dim>
22593 <dim>1</dim>
22594 </port>
22595 </input>
22596 <output>
22597 <port id="1" precision="FP16">
22598 <dim>256</dim>
22599 <dim>64</dim>
22600 <dim>1</dim>
22601 <dim>1</dim>
22602 </port>
22603 </output>
22604 </layer>
22605 <layer id="1502" name="bottleneck4_0/dim_inc/conv/fq_weights_1/zero_point1327721255" type="Const" version="opset1">
22606 <data element_type="f16" offset="207368" shape="256,1,1,1" size="512"/>
22607 <output>
22608 <port id="0" precision="FP16">
22609 <dim>256</dim>
22610 <dim>1</dim>
22611 <dim>1</dim>
22612 <dim>1</dim>
22613 </port>
22614 </output>
22615 </layer>
22616 <layer id="1503" name="bottleneck4_0/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
22617 <data auto_broadcast="numpy"/>
22618 <input>
22619 <port id="0">
22620 <dim>256</dim>
22621 <dim>64</dim>
22622 <dim>1</dim>
22623 <dim>1</dim>
22624 </port>
22625 <port id="1">
22626 <dim>256</dim>
22627 <dim>1</dim>
22628 <dim>1</dim>
22629 <dim>1</dim>
22630 </port>
22631 </input>
22632 <output>
22633 <port id="2" precision="FP16">
22634 <dim>256</dim>
22635 <dim>64</dim>
22636 <dim>1</dim>
22637 <dim>1</dim>
22638 </port>
22639 </output>
22640 </layer>
22641 <layer id="1504" name="bottleneck4_0/dim_inc/conv/fq_weights_1/scale1327221849" type="Const" version="opset1">
22642 <data element_type="f16" offset="207880" shape="256,1,1,1" size="512"/>
22643 <output>
22644 <port id="0" precision="FP16">
22645 <dim>256</dim>
22646 <dim>1</dim>
22647 <dim>1</dim>
22648 <dim>1</dim>
22649 </port>
22650 </output>
22651 </layer>
22652 <layer id="1505" name="bottleneck4_0/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
22653 <data auto_broadcast="numpy"/>
22654 <input>
22655 <port id="0">
22656 <dim>256</dim>
22657 <dim>64</dim>
22658 <dim>1</dim>
22659 <dim>1</dim>
22660 </port>
22661 <port id="1">
22662 <dim>256</dim>
22663 <dim>1</dim>
22664 <dim>1</dim>
22665 <dim>1</dim>
22666 </port>
22667 </input>
22668 <output>
22669 <port id="2" precision="FP16">
22670 <dim>256</dim>
22671 <dim>64</dim>
22672 <dim>1</dim>
22673 <dim>1</dim>
22674 </port>
22675 </output>
22676 </layer>
22677 <layer id="1506" name="bottleneck4_0/dim_inc/conv" type="Convolution" version="opset1">
22678 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
22679 <input>
22680 <port id="0">
22681 <dim>1</dim>
22682 <dim>64</dim>
22683 <dim>20</dim>
22684 <dim>34</dim>
22685 </port>
22686 <port id="1">
22687 <dim>256</dim>
22688 <dim>64</dim>
22689 <dim>1</dim>
22690 <dim>1</dim>
22691 </port>
22692 </input>
22693 <output>
22694 <port id="2" precision="FP16">
22695 <dim>1</dim>
22696 <dim>256</dim>
22697 <dim>20</dim>
22698 <dim>34</dim>
22699 </port>
22700 </output>
22701 </layer>
22702 <layer id="1507" name="data_add_2427324278124122158" type="Const" version="opset1">
22703 <data element_type="f16" offset="208392" shape="1,256,1,1" size="512"/>
22704 <output>
22705 <port id="0" precision="FP16">
22706 <dim>1</dim>
22707 <dim>256</dim>
22708 <dim>1</dim>
22709 <dim>1</dim>
22710 </port>
22711 </output>
22712 </layer>
22713 <layer id="1508" name="bottleneck4_0/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
22714 <data auto_broadcast="numpy"/>
22715 <input>
22716 <port id="0">
22717 <dim>1</dim>
22718 <dim>256</dim>
22719 <dim>20</dim>
22720 <dim>34</dim>
22721 </port>
22722 <port id="1">
22723 <dim>1</dim>
22724 <dim>256</dim>
22725 <dim>1</dim>
22726 <dim>1</dim>
22727 </port>
22728 </input>
22729 <output>
22730 <port id="2" names="bottleneck4_0/dim_inc/conv" precision="FP16">
22731 <dim>1</dim>
22732 <dim>256</dim>
22733 <dim>20</dim>
22734 <dim>34</dim>
22735 </port>
22736 </output>
22737 </layer>
22738 <layer id="1509" name="bottleneck4_0/add/fq_input_1" type="FakeQuantize" version="opset1">
22739 <data auto_broadcast="numpy" levels="256"/>
22740 <input>
22741 <port id="0">
22742 <dim>1</dim>
22743 <dim>256</dim>
22744 <dim>20</dim>
22745 <dim>34</dim>
22746 </port>
22747 <port id="1"/>
22748 <port id="2"/>
22749 <port id="3"/>
22750 <port id="4"/>
22751 </input>
22752 <output>
22753 <port id="5" precision="FP16">
22754 <dim>1</dim>
22755 <dim>256</dim>
22756 <dim>20</dim>
22757 <dim>34</dim>
22758 </port>
22759 </output>
22760 </layer>
22761 <layer id="1510" name="bottleneck4_0/add" type="Add" version="opset1">
22762 <data auto_broadcast="numpy"/>
22763 <input>
22764 <port id="0">
22765 <dim>1</dim>
22766 <dim>256</dim>
22767 <dim>20</dim>
22768 <dim>34</dim>
22769 </port>
22770 <port id="1">
22771 <dim>1</dim>
22772 <dim>256</dim>
22773 <dim>20</dim>
22774 <dim>34</dim>
22775 </port>
22776 </input>
22777 <output>
22778 <port id="2" names="bottleneck4_0/add" precision="FP16">
22779 <dim>1</dim>
22780 <dim>256</dim>
22781 <dim>20</dim>
22782 <dim>34</dim>
22783 </port>
22784 </output>
22785 </layer>
22786 <layer id="1511" name="bottleneck4_0/fn/weights31080401151244" type="Const" version="opset1">
22787 <data element_type="f32" offset="1576" shape="1" size="4"/>
22788 <output>
22789 <port id="0" precision="FP32">
22790 <dim>1</dim>
22791 </port>
22792 </output>
22793 </layer>
22794 <layer id="1512" name="bottleneck4_0/fn" type="PReLU" version="opset1">
22795 <input>
22796 <port id="0">
22797 <dim>1</dim>
22798 <dim>256</dim>
22799 <dim>20</dim>
22800 <dim>34</dim>
22801 </port>
22802 <port id="1">
22803 <dim>1</dim>
22804 </port>
22805 </input>
22806 <output>
22807 <port id="2" names="bottleneck4_0/add" precision="FP16">
22808 <dim>1</dim>
22809 <dim>256</dim>
22810 <dim>20</dim>
22811 <dim>34</dim>
22812 </port>
22813 </output>
22814 </layer>
22815 <layer id="1513" name="bottleneck4_1/add/fq_input_0" type="FakeQuantize" version="opset1">
22816 <data auto_broadcast="numpy" levels="256"/>
22817 <input>
22818 <port id="0">
22819 <dim>1</dim>
22820 <dim>256</dim>
22821 <dim>20</dim>
22822 <dim>34</dim>
22823 </port>
22824 <port id="1"/>
22825 <port id="2"/>
22826 <port id="3"/>
22827 <port id="4"/>
22828 </input>
22829 <output>
22830 <port id="5" precision="FP16">
22831 <dim>1</dim>
22832 <dim>256</dim>
22833 <dim>20</dim>
22834 <dim>34</dim>
22835 </port>
22836 </output>
22837 </layer>
22838 <layer id="1514" name="3974397819956" type="Const" version="opset1">
22839 <data element_type="f16" offset="208904" shape="" size="2"/>
22840 <output>
22841 <port id="0" precision="FP16"/>
22842 </output>
22843 </layer>
22844 <layer id="1515" name="3975397919617" type="Const" version="opset1">
22845 <data element_type="f16" offset="208906" shape="" size="2"/>
22846 <output>
22847 <port id="0" precision="FP16"/>
22848 </output>
22849 </layer>
22850 <layer id="1516" name="3976398019632" type="Const" version="opset1">
22851 <data element_type="f16" offset="208904" shape="" size="2"/>
22852 <output>
22853 <port id="0" precision="FP16"/>
22854 </output>
22855 </layer>
22856 <layer id="1517" name="3977398122686" type="Const" version="opset1">
22857 <data element_type="f16" offset="208906" shape="" size="2"/>
22858 <output>
22859 <port id="0" precision="FP16"/>
22860 </output>
22861 </layer>
22862 <layer id="1518" name="4544454822989" type="Const" version="opset1">
22863 <data element_type="f16" offset="208908" shape="" size="2"/>
22864 <output>
22865 <port id="0" precision="FP16"/>
22866 </output>
22867 </layer>
22868 <layer id="1519" name="4545454920721" type="Const" version="opset1">
22869 <data element_type="f16" offset="208910" shape="" size="2"/>
22870 <output>
22871 <port id="0" precision="FP16"/>
22872 </output>
22873 </layer>
22874 <layer id="1520" name="4546455021402" type="Const" version="opset1">
22875 <data element_type="f16" offset="208908" shape="" size="2"/>
22876 <output>
22877 <port id="0" precision="FP16"/>
22878 </output>
22879 </layer>
22880 <layer id="1521" name="4547455119971" type="Const" version="opset1">
22881 <data element_type="f16" offset="208910" shape="" size="2"/>
22882 <output>
22883 <port id="0" precision="FP16"/>
22884 </output>
22885 </layer>
22886 <layer id="1522" name="5104510821027" type="Const" version="opset1">
22887 <data element_type="f16" offset="208912" shape="1,64,1,1" size="128"/>
22888 <output>
22889 <port id="0" precision="FP16">
22890 <dim>1</dim>
22891 <dim>64</dim>
22892 <dim>1</dim>
22893 <dim>1</dim>
22894 </port>
22895 </output>
22896 </layer>
22897 <layer id="1523" name="5105510922038" type="Const" version="opset1">
22898 <data element_type="f16" offset="209040" shape="1,64,1,1" size="128"/>
22899 <output>
22900 <port id="0" precision="FP16">
22901 <dim>1</dim>
22902 <dim>64</dim>
22903 <dim>1</dim>
22904 <dim>1</dim>
22905 </port>
22906 </output>
22907 </layer>
22908 <layer id="1524" name="5106511021408" type="Const" version="opset1">
22909 <data element_type="f16" offset="208912" shape="1,64,1,1" size="128"/>
22910 <output>
22911 <port id="0" precision="FP16">
22912 <dim>1</dim>
22913 <dim>64</dim>
22914 <dim>1</dim>
22915 <dim>1</dim>
22916 </port>
22917 </output>
22918 </layer>
22919 <layer id="1525" name="5107511121114" type="Const" version="opset1">
22920 <data element_type="f16" offset="209040" shape="1,64,1,1" size="128"/>
22921 <output>
22922 <port id="0" precision="FP16">
22923 <dim>1</dim>
22924 <dim>64</dim>
22925 <dim>1</dim>
22926 <dim>1</dim>
22927 </port>
22928 </output>
22929 </layer>
22930 <layer id="1526" name="bottleneck4_1/dim_red/bn/mean/Fused_Mul__copy124610313/quantized1180020589" type="Const" version="opset1">
22931 <data element_type="i8" offset="209168" shape="64,256,1,1" size="16384"/>
22932 <output>
22933 <port id="0" precision="I8">
22934 <dim>64</dim>
22935 <dim>256</dim>
22936 <dim>1</dim>
22937 <dim>1</dim>
22938 </port>
22939 </output>
22940 </layer>
22941 <layer id="1527" name="bottleneck4_1/dim_red/bn/mean/Fused_Mul__copy124610313/quantized/to_f16" type="Convert" version="opset1">
22942 <data destination_type="f16"/>
22943 <input>
22944 <port id="0">
22945 <dim>64</dim>
22946 <dim>256</dim>
22947 <dim>1</dim>
22948 <dim>1</dim>
22949 </port>
22950 </input>
22951 <output>
22952 <port id="1" precision="FP16">
22953 <dim>64</dim>
22954 <dim>256</dim>
22955 <dim>1</dim>
22956 <dim>1</dim>
22957 </port>
22958 </output>
22959 </layer>
22960 <layer id="1528" name="bottleneck4_1/dim_red/conv/fq_weights_1/zero_point1181319629" type="Const" version="opset1">
22961 <data element_type="f16" offset="225552" shape="64,1,1,1" size="128"/>
22962 <output>
22963 <port id="0" precision="FP16">
22964 <dim>64</dim>
22965 <dim>1</dim>
22966 <dim>1</dim>
22967 <dim>1</dim>
22968 </port>
22969 </output>
22970 </layer>
22971 <layer id="1529" name="bottleneck4_1/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
22972 <data auto_broadcast="numpy"/>
22973 <input>
22974 <port id="0">
22975 <dim>64</dim>
22976 <dim>256</dim>
22977 <dim>1</dim>
22978 <dim>1</dim>
22979 </port>
22980 <port id="1">
22981 <dim>64</dim>
22982 <dim>1</dim>
22983 <dim>1</dim>
22984 <dim>1</dim>
22985 </port>
22986 </input>
22987 <output>
22988 <port id="2" precision="FP16">
22989 <dim>64</dim>
22990 <dim>256</dim>
22991 <dim>1</dim>
22992 <dim>1</dim>
22993 </port>
22994 </output>
22995 </layer>
22996 <layer id="1530" name="bottleneck4_1/dim_red/conv/fq_weights_1/scale1180820541" type="Const" version="opset1">
22997 <data element_type="f16" offset="225680" shape="64,1,1,1" size="128"/>
22998 <output>
22999 <port id="0" precision="FP16">
23000 <dim>64</dim>
23001 <dim>1</dim>
23002 <dim>1</dim>
23003 <dim>1</dim>
23004 </port>
23005 </output>
23006 </layer>
23007 <layer id="1531" name="bottleneck4_1/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
23008 <data auto_broadcast="numpy"/>
23009 <input>
23010 <port id="0">
23011 <dim>64</dim>
23012 <dim>256</dim>
23013 <dim>1</dim>
23014 <dim>1</dim>
23015 </port>
23016 <port id="1">
23017 <dim>64</dim>
23018 <dim>1</dim>
23019 <dim>1</dim>
23020 <dim>1</dim>
23021 </port>
23022 </input>
23023 <output>
23024 <port id="2" precision="FP16">
23025 <dim>64</dim>
23026 <dim>256</dim>
23027 <dim>1</dim>
23028 <dim>1</dim>
23029 </port>
23030 </output>
23031 </layer>
23032 <layer id="1532" name="bottleneck4_1/dim_red/conv" type="Convolution" version="opset1">
23033 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
23034 <input>
23035 <port id="0">
23036 <dim>1</dim>
23037 <dim>256</dim>
23038 <dim>20</dim>
23039 <dim>34</dim>
23040 </port>
23041 <port id="1">
23042 <dim>64</dim>
23043 <dim>256</dim>
23044 <dim>1</dim>
23045 <dim>1</dim>
23046 </port>
23047 </input>
23048 <output>
23049 <port id="2" precision="FP16">
23050 <dim>1</dim>
23051 <dim>64</dim>
23052 <dim>20</dim>
23053 <dim>34</dim>
23054 </port>
23055 </output>
23056 </layer>
23057 <layer id="1533" name="data_add_2428124286124820064" type="Const" version="opset1">
23058 <data element_type="f16" offset="225808" shape="1,64,1,1" size="128"/>
23059 <output>
23060 <port id="0" precision="FP16">
23061 <dim>1</dim>
23062 <dim>64</dim>
23063 <dim>1</dim>
23064 <dim>1</dim>
23065 </port>
23066 </output>
23067 </layer>
23068 <layer id="1534" name="bottleneck4_1/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
23069 <data auto_broadcast="numpy"/>
23070 <input>
23071 <port id="0">
23072 <dim>1</dim>
23073 <dim>64</dim>
23074 <dim>20</dim>
23075 <dim>34</dim>
23076 </port>
23077 <port id="1">
23078 <dim>1</dim>
23079 <dim>64</dim>
23080 <dim>1</dim>
23081 <dim>1</dim>
23082 </port>
23083 </input>
23084 <output>
23085 <port id="2" names="bottleneck4_1/dim_red/conv" precision="FP16">
23086 <dim>1</dim>
23087 <dim>64</dim>
23088 <dim>20</dim>
23089 <dim>34</dim>
23090 </port>
23091 </output>
23092 </layer>
23093 <layer id="1535" name="bottleneck4_1/dim_red/fn/weights31040401781250" type="Const" version="opset1">
23094 <data element_type="f32" offset="1576" shape="1" size="4"/>
23095 <output>
23096 <port id="0" precision="FP32">
23097 <dim>1</dim>
23098 </port>
23099 </output>
23100 </layer>
23101 <layer id="1536" name="bottleneck4_1/dim_red/fn" type="PReLU" version="opset1">
23102 <input>
23103 <port id="0">
23104 <dim>1</dim>
23105 <dim>64</dim>
23106 <dim>20</dim>
23107 <dim>34</dim>
23108 </port>
23109 <port id="1">
23110 <dim>1</dim>
23111 </port>
23112 </input>
23113 <output>
23114 <port id="2" names="bottleneck4_1/dim_red/conv" precision="FP16">
23115 <dim>1</dim>
23116 <dim>64</dim>
23117 <dim>20</dim>
23118 <dim>34</dim>
23119 </port>
23120 </output>
23121 </layer>
23122 <layer id="1537" name="bottleneck4_1/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
23123 <data auto_broadcast="numpy" levels="256"/>
23124 <input>
23125 <port id="0">
23126 <dim>1</dim>
23127 <dim>64</dim>
23128 <dim>20</dim>
23129 <dim>34</dim>
23130 </port>
23131 <port id="1">
23132 <dim>1</dim>
23133 <dim>64</dim>
23134 <dim>1</dim>
23135 <dim>1</dim>
23136 </port>
23137 <port id="2">
23138 <dim>1</dim>
23139 <dim>64</dim>
23140 <dim>1</dim>
23141 <dim>1</dim>
23142 </port>
23143 <port id="3">
23144 <dim>1</dim>
23145 <dim>64</dim>
23146 <dim>1</dim>
23147 <dim>1</dim>
23148 </port>
23149 <port id="4">
23150 <dim>1</dim>
23151 <dim>64</dim>
23152 <dim>1</dim>
23153 <dim>1</dim>
23154 </port>
23155 </input>
23156 <output>
23157 <port id="5" precision="FP16">
23158 <dim>1</dim>
23159 <dim>64</dim>
23160 <dim>20</dim>
23161 <dim>34</dim>
23162 </port>
23163 </output>
23164 </layer>
23165 <layer id="1538" name="16923/value1692520649" type="Const" version="opset1">
23166 <data element_type="i64" offset="189984" shape="5" size="40"/>
23167 <output>
23168 <port id="0" precision="I64">
23169 <dim>5</dim>
23170 </port>
23171 </output>
23172 </layer>
23173 <layer id="1539" name="bottleneck4_1/inner/dw1/bn/mean/Fused_Mul__copy125210316/quantized1379221036" type="Const" version="opset1">
23174 <data element_type="i8" offset="225936" shape="64,1,3,3" size="576"/>
23175 <output>
23176 <port id="0" precision="I8">
23177 <dim>64</dim>
23178 <dim>1</dim>
23179 <dim>3</dim>
23180 <dim>3</dim>
23181 </port>
23182 </output>
23183 </layer>
23184 <layer id="1540" name="bottleneck4_1/inner/dw1/bn/mean/Fused_Mul__copy125210316/quantized/to_f16" type="Convert" version="opset1">
23185 <data destination_type="f16"/>
23186 <input>
23187 <port id="0">
23188 <dim>64</dim>
23189 <dim>1</dim>
23190 <dim>3</dim>
23191 <dim>3</dim>
23192 </port>
23193 </input>
23194 <output>
23195 <port id="1" precision="FP16">
23196 <dim>64</dim>
23197 <dim>1</dim>
23198 <dim>3</dim>
23199 <dim>3</dim>
23200 </port>
23201 </output>
23202 </layer>
23203 <layer id="1541" name="bottleneck4_1/inner/dw1/conv/fq_weights_1/zero_point1380522875" type="Const" version="opset1">
23204 <data element_type="f16" offset="226512" shape="64,1,1,1" size="128"/>
23205 <output>
23206 <port id="0" precision="FP16">
23207 <dim>64</dim>
23208 <dim>1</dim>
23209 <dim>1</dim>
23210 <dim>1</dim>
23211 </port>
23212 </output>
23213 </layer>
23214 <layer id="1542" name="bottleneck4_1/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
23215 <data auto_broadcast="numpy"/>
23216 <input>
23217 <port id="0">
23218 <dim>64</dim>
23219 <dim>1</dim>
23220 <dim>3</dim>
23221 <dim>3</dim>
23222 </port>
23223 <port id="1">
23224 <dim>64</dim>
23225 <dim>1</dim>
23226 <dim>1</dim>
23227 <dim>1</dim>
23228 </port>
23229 </input>
23230 <output>
23231 <port id="2" precision="FP16">
23232 <dim>64</dim>
23233 <dim>1</dim>
23234 <dim>3</dim>
23235 <dim>3</dim>
23236 </port>
23237 </output>
23238 </layer>
23239 <layer id="1543" name="bottleneck4_1/inner/dw1/conv/fq_weights_1/scale1380021909" type="Const" version="opset1">
23240 <data element_type="f16" offset="226640" shape="64,1,1,1" size="128"/>
23241 <output>
23242 <port id="0" precision="FP16">
23243 <dim>64</dim>
23244 <dim>1</dim>
23245 <dim>1</dim>
23246 <dim>1</dim>
23247 </port>
23248 </output>
23249 </layer>
23250 <layer id="1544" name="bottleneck4_1/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
23251 <data auto_broadcast="numpy"/>
23252 <input>
23253 <port id="0">
23254 <dim>64</dim>
23255 <dim>1</dim>
23256 <dim>3</dim>
23257 <dim>3</dim>
23258 </port>
23259 <port id="1">
23260 <dim>64</dim>
23261 <dim>1</dim>
23262 <dim>1</dim>
23263 <dim>1</dim>
23264 </port>
23265 </input>
23266 <output>
23267 <port id="2" precision="FP16">
23268 <dim>64</dim>
23269 <dim>1</dim>
23270 <dim>3</dim>
23271 <dim>3</dim>
23272 </port>
23273 </output>
23274 </layer>
23275 <layer id="1545" name="16923" type="Reshape" version="opset1">
23276 <data special_zero="true"/>
23277 <input>
23278 <port id="0">
23279 <dim>64</dim>
23280 <dim>1</dim>
23281 <dim>3</dim>
23282 <dim>3</dim>
23283 </port>
23284 <port id="1">
23285 <dim>5</dim>
23286 </port>
23287 </input>
23288 <output>
23289 <port id="2" precision="FP16">
23290 <dim>64</dim>
23291 <dim>1</dim>
23292 <dim>1</dim>
23293 <dim>3</dim>
23294 <dim>3</dim>
23295 </port>
23296 </output>
23297 </layer>
23298 <layer id="1546" name="bottleneck4_1/inner/dw1/conv" type="GroupConvolution" version="opset1">
23299 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
23300 <input>
23301 <port id="0">
23302 <dim>1</dim>
23303 <dim>64</dim>
23304 <dim>20</dim>
23305 <dim>34</dim>
23306 </port>
23307 <port id="1">
23308 <dim>64</dim>
23309 <dim>1</dim>
23310 <dim>1</dim>
23311 <dim>3</dim>
23312 <dim>3</dim>
23313 </port>
23314 </input>
23315 <output>
23316 <port id="2" precision="FP16">
23317 <dim>1</dim>
23318 <dim>64</dim>
23319 <dim>20</dim>
23320 <dim>34</dim>
23321 </port>
23322 </output>
23323 </layer>
23324 <layer id="1547" name="data_add_2428924294125422251" type="Const" version="opset1">
23325 <data element_type="f16" offset="226768" shape="1,64,1,1" size="128"/>
23326 <output>
23327 <port id="0" precision="FP16">
23328 <dim>1</dim>
23329 <dim>64</dim>
23330 <dim>1</dim>
23331 <dim>1</dim>
23332 </port>
23333 </output>
23334 </layer>
23335 <layer id="1548" name="bottleneck4_1/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
23336 <data auto_broadcast="numpy"/>
23337 <input>
23338 <port id="0">
23339 <dim>1</dim>
23340 <dim>64</dim>
23341 <dim>20</dim>
23342 <dim>34</dim>
23343 </port>
23344 <port id="1">
23345 <dim>1</dim>
23346 <dim>64</dim>
23347 <dim>1</dim>
23348 <dim>1</dim>
23349 </port>
23350 </input>
23351 <output>
23352 <port id="2" names="bottleneck4_1/inner/dw1/conv" precision="FP16">
23353 <dim>1</dim>
23354 <dim>64</dim>
23355 <dim>20</dim>
23356 <dim>34</dim>
23357 </port>
23358 </output>
23359 </layer>
23360 <layer id="1549" name="bottleneck4_1/inner/dw1/fn/weights30964399921256" type="Const" version="opset1">
23361 <data element_type="f32" offset="1576" shape="1" size="4"/>
23362 <output>
23363 <port id="0" precision="FP32">
23364 <dim>1</dim>
23365 </port>
23366 </output>
23367 </layer>
23368 <layer id="1550" name="bottleneck4_1/inner/dw1/fn" type="PReLU" version="opset1">
23369 <input>
23370 <port id="0">
23371 <dim>1</dim>
23372 <dim>64</dim>
23373 <dim>20</dim>
23374 <dim>34</dim>
23375 </port>
23376 <port id="1">
23377 <dim>1</dim>
23378 </port>
23379 </input>
23380 <output>
23381 <port id="2" names="bottleneck4_1/inner/dw1/conv" precision="FP16">
23382 <dim>1</dim>
23383 <dim>64</dim>
23384 <dim>20</dim>
23385 <dim>34</dim>
23386 </port>
23387 </output>
23388 </layer>
23389 <layer id="1551" name="bottleneck4_1/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
23390 <data auto_broadcast="numpy" levels="256"/>
23391 <input>
23392 <port id="0">
23393 <dim>1</dim>
23394 <dim>64</dim>
23395 <dim>20</dim>
23396 <dim>34</dim>
23397 </port>
23398 <port id="1"/>
23399 <port id="2"/>
23400 <port id="3"/>
23401 <port id="4"/>
23402 </input>
23403 <output>
23404 <port id="5" precision="FP16">
23405 <dim>1</dim>
23406 <dim>64</dim>
23407 <dim>20</dim>
23408 <dim>34</dim>
23409 </port>
23410 </output>
23411 </layer>
23412 <layer id="1552" name="bottleneck4_1/dim_inc/bn/mean/Fused_Mul__copy125810319/quantized1249620772" type="Const" version="opset1">
23413 <data element_type="i8" offset="226896" shape="256,64,1,1" size="16384"/>
23414 <output>
23415 <port id="0" precision="I8">
23416 <dim>256</dim>
23417 <dim>64</dim>
23418 <dim>1</dim>
23419 <dim>1</dim>
23420 </port>
23421 </output>
23422 </layer>
23423 <layer id="1553" name="bottleneck4_1/dim_inc/bn/mean/Fused_Mul__copy125810319/quantized/to_f16" type="Convert" version="opset1">
23424 <data destination_type="f16"/>
23425 <input>
23426 <port id="0">
23427 <dim>256</dim>
23428 <dim>64</dim>
23429 <dim>1</dim>
23430 <dim>1</dim>
23431 </port>
23432 </input>
23433 <output>
23434 <port id="1" precision="FP16">
23435 <dim>256</dim>
23436 <dim>64</dim>
23437 <dim>1</dim>
23438 <dim>1</dim>
23439 </port>
23440 </output>
23441 </layer>
23442 <layer id="1554" name="bottleneck4_1/dim_inc/conv/fq_weights_1/zero_point1250921828" type="Const" version="opset1">
23443 <data element_type="f16" offset="243280" shape="256,1,1,1" size="512"/>
23444 <output>
23445 <port id="0" precision="FP16">
23446 <dim>256</dim>
23447 <dim>1</dim>
23448 <dim>1</dim>
23449 <dim>1</dim>
23450 </port>
23451 </output>
23452 </layer>
23453 <layer id="1555" name="bottleneck4_1/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
23454 <data auto_broadcast="numpy"/>
23455 <input>
23456 <port id="0">
23457 <dim>256</dim>
23458 <dim>64</dim>
23459 <dim>1</dim>
23460 <dim>1</dim>
23461 </port>
23462 <port id="1">
23463 <dim>256</dim>
23464 <dim>1</dim>
23465 <dim>1</dim>
23466 <dim>1</dim>
23467 </port>
23468 </input>
23469 <output>
23470 <port id="2" precision="FP16">
23471 <dim>256</dim>
23472 <dim>64</dim>
23473 <dim>1</dim>
23474 <dim>1</dim>
23475 </port>
23476 </output>
23477 </layer>
23478 <layer id="1556" name="bottleneck4_1/dim_inc/conv/fq_weights_1/scale1250422458" type="Const" version="opset1">
23479 <data element_type="f16" offset="243792" shape="256,1,1,1" size="512"/>
23480 <output>
23481 <port id="0" precision="FP16">
23482 <dim>256</dim>
23483 <dim>1</dim>
23484 <dim>1</dim>
23485 <dim>1</dim>
23486 </port>
23487 </output>
23488 </layer>
23489 <layer id="1557" name="bottleneck4_1/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
23490 <data auto_broadcast="numpy"/>
23491 <input>
23492 <port id="0">
23493 <dim>256</dim>
23494 <dim>64</dim>
23495 <dim>1</dim>
23496 <dim>1</dim>
23497 </port>
23498 <port id="1">
23499 <dim>256</dim>
23500 <dim>1</dim>
23501 <dim>1</dim>
23502 <dim>1</dim>
23503 </port>
23504 </input>
23505 <output>
23506 <port id="2" precision="FP16">
23507 <dim>256</dim>
23508 <dim>64</dim>
23509 <dim>1</dim>
23510 <dim>1</dim>
23511 </port>
23512 </output>
23513 </layer>
23514 <layer id="1558" name="bottleneck4_1/dim_inc/conv" type="Convolution" version="opset1">
23515 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
23516 <input>
23517 <port id="0">
23518 <dim>1</dim>
23519 <dim>64</dim>
23520 <dim>20</dim>
23521 <dim>34</dim>
23522 </port>
23523 <port id="1">
23524 <dim>256</dim>
23525 <dim>64</dim>
23526 <dim>1</dim>
23527 <dim>1</dim>
23528 </port>
23529 </input>
23530 <output>
23531 <port id="2" precision="FP16">
23532 <dim>1</dim>
23533 <dim>256</dim>
23534 <dim>20</dim>
23535 <dim>34</dim>
23536 </port>
23537 </output>
23538 </layer>
23539 <layer id="1559" name="data_add_2429724302126020523" type="Const" version="opset1">
23540 <data element_type="f16" offset="244304" shape="1,256,1,1" size="512"/>
23541 <output>
23542 <port id="0" precision="FP16">
23543 <dim>1</dim>
23544 <dim>256</dim>
23545 <dim>1</dim>
23546 <dim>1</dim>
23547 </port>
23548 </output>
23549 </layer>
23550 <layer id="1560" name="bottleneck4_1/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
23551 <data auto_broadcast="numpy"/>
23552 <input>
23553 <port id="0">
23554 <dim>1</dim>
23555 <dim>256</dim>
23556 <dim>20</dim>
23557 <dim>34</dim>
23558 </port>
23559 <port id="1">
23560 <dim>1</dim>
23561 <dim>256</dim>
23562 <dim>1</dim>
23563 <dim>1</dim>
23564 </port>
23565 </input>
23566 <output>
23567 <port id="2" names="bottleneck4_1/dim_inc/conv" precision="FP16">
23568 <dim>1</dim>
23569 <dim>256</dim>
23570 <dim>20</dim>
23571 <dim>34</dim>
23572 </port>
23573 </output>
23574 </layer>
23575 <layer id="1561" name="bottleneck4_1/add/fq_input_1" type="FakeQuantize" version="opset1">
23576 <data auto_broadcast="numpy" levels="256"/>
23577 <input>
23578 <port id="0">
23579 <dim>1</dim>
23580 <dim>256</dim>
23581 <dim>20</dim>
23582 <dim>34</dim>
23583 </port>
23584 <port id="1"/>
23585 <port id="2"/>
23586 <port id="3"/>
23587 <port id="4"/>
23588 </input>
23589 <output>
23590 <port id="5" precision="FP16">
23591 <dim>1</dim>
23592 <dim>256</dim>
23593 <dim>20</dim>
23594 <dim>34</dim>
23595 </port>
23596 </output>
23597 </layer>
23598 <layer id="1562" name="bottleneck4_1/add" type="Add" version="opset1">
23599 <data auto_broadcast="numpy"/>
23600 <input>
23601 <port id="0">
23602 <dim>1</dim>
23603 <dim>256</dim>
23604 <dim>20</dim>
23605 <dim>34</dim>
23606 </port>
23607 <port id="1">
23608 <dim>1</dim>
23609 <dim>256</dim>
23610 <dim>20</dim>
23611 <dim>34</dim>
23612 </port>
23613 </input>
23614 <output>
23615 <port id="2" names="bottleneck4_1/add" precision="FP16">
23616 <dim>1</dim>
23617 <dim>256</dim>
23618 <dim>20</dim>
23619 <dim>34</dim>
23620 </port>
23621 </output>
23622 </layer>
23623 <layer id="1563" name="bottleneck4_1/fn/weights31132403611263" type="Const" version="opset1">
23624 <data element_type="f32" offset="1576" shape="1" size="4"/>
23625 <output>
23626 <port id="0" precision="FP32">
23627 <dim>1</dim>
23628 </port>
23629 </output>
23630 </layer>
23631 <layer id="1564" name="bottleneck4_1/fn" type="PReLU" version="opset1">
23632 <input>
23633 <port id="0">
23634 <dim>1</dim>
23635 <dim>256</dim>
23636 <dim>20</dim>
23637 <dim>34</dim>
23638 </port>
23639 <port id="1">
23640 <dim>1</dim>
23641 </port>
23642 </input>
23643 <output>
23644 <port id="2" names="bottleneck4_1/add" precision="FP16">
23645 <dim>1</dim>
23646 <dim>256</dim>
23647 <dim>20</dim>
23648 <dim>34</dim>
23649 </port>
23650 </output>
23651 </layer>
23652 <layer id="1565" name="bottleneck4_2/add/fq_input_0" type="FakeQuantize" version="opset1">
23653 <data auto_broadcast="numpy" levels="256"/>
23654 <input>
23655 <port id="0">
23656 <dim>1</dim>
23657 <dim>256</dim>
23658 <dim>20</dim>
23659 <dim>34</dim>
23660 </port>
23661 <port id="1"/>
23662 <port id="2"/>
23663 <port id="3"/>
23664 <port id="4"/>
23665 </input>
23666 <output>
23667 <port id="5" precision="FP16">
23668 <dim>1</dim>
23669 <dim>256</dim>
23670 <dim>20</dim>
23671 <dim>34</dim>
23672 </port>
23673 </output>
23674 </layer>
23675 <layer id="1566" name="2694269819551" type="Const" version="opset1">
23676 <data element_type="f16" offset="244816" shape="" size="2"/>
23677 <output>
23678 <port id="0" precision="FP16"/>
23679 </output>
23680 </layer>
23681 <layer id="1567" name="2695269921813" type="Const" version="opset1">
23682 <data element_type="f16" offset="244818" shape="" size="2"/>
23683 <output>
23684 <port id="0" precision="FP16"/>
23685 </output>
23686 </layer>
23687 <layer id="1568" name="2696270020040" type="Const" version="opset1">
23688 <data element_type="f16" offset="244816" shape="" size="2"/>
23689 <output>
23690 <port id="0" precision="FP16"/>
23691 </output>
23692 </layer>
23693 <layer id="1569" name="2697270122179" type="Const" version="opset1">
23694 <data element_type="f16" offset="244818" shape="" size="2"/>
23695 <output>
23696 <port id="0" precision="FP16"/>
23697 </output>
23698 </layer>
23699 <layer id="1570" name="4644464821657" type="Const" version="opset1">
23700 <data element_type="f16" offset="244820" shape="" size="2"/>
23701 <output>
23702 <port id="0" precision="FP16"/>
23703 </output>
23704 </layer>
23705 <layer id="1571" name="4645464920733" type="Const" version="opset1">
23706 <data element_type="f16" offset="244822" shape="" size="2"/>
23707 <output>
23708 <port id="0" precision="FP16"/>
23709 </output>
23710 </layer>
23711 <layer id="1572" name="4646465022779" type="Const" version="opset1">
23712 <data element_type="f16" offset="244820" shape="" size="2"/>
23713 <output>
23714 <port id="0" precision="FP16"/>
23715 </output>
23716 </layer>
23717 <layer id="1573" name="4647465121354" type="Const" version="opset1">
23718 <data element_type="f16" offset="244822" shape="" size="2"/>
23719 <output>
23720 <port id="0" precision="FP16"/>
23721 </output>
23722 </layer>
23723 <layer id="1574" name="4044404822188" type="Const" version="opset1">
23724 <data element_type="f16" offset="244824" shape="1,64,1,1" size="128"/>
23725 <output>
23726 <port id="0" precision="FP16">
23727 <dim>1</dim>
23728 <dim>64</dim>
23729 <dim>1</dim>
23730 <dim>1</dim>
23731 </port>
23732 </output>
23733 </layer>
23734 <layer id="1575" name="4045404920319" type="Const" version="opset1">
23735 <data element_type="f16" offset="244952" shape="1,64,1,1" size="128"/>
23736 <output>
23737 <port id="0" precision="FP16">
23738 <dim>1</dim>
23739 <dim>64</dim>
23740 <dim>1</dim>
23741 <dim>1</dim>
23742 </port>
23743 </output>
23744 </layer>
23745 <layer id="1576" name="4046405019905" type="Const" version="opset1">
23746 <data element_type="f16" offset="244824" shape="1,64,1,1" size="128"/>
23747 <output>
23748 <port id="0" precision="FP16">
23749 <dim>1</dim>
23750 <dim>64</dim>
23751 <dim>1</dim>
23752 <dim>1</dim>
23753 </port>
23754 </output>
23755 </layer>
23756 <layer id="1577" name="4047405122782" type="Const" version="opset1">
23757 <data element_type="f16" offset="244952" shape="1,64,1,1" size="128"/>
23758 <output>
23759 <port id="0" precision="FP16">
23760 <dim>1</dim>
23761 <dim>64</dim>
23762 <dim>1</dim>
23763 <dim>1</dim>
23764 </port>
23765 </output>
23766 </layer>
23767 <layer id="1578" name="bottleneck4_2/dim_red/bn/mean/Fused_Mul__copy126510322/quantized1242421189" type="Const" version="opset1">
23768 <data element_type="i8" offset="245080" shape="64,256,1,1" size="16384"/>
23769 <output>
23770 <port id="0" precision="I8">
23771 <dim>64</dim>
23772 <dim>256</dim>
23773 <dim>1</dim>
23774 <dim>1</dim>
23775 </port>
23776 </output>
23777 </layer>
23778 <layer id="1579" name="bottleneck4_2/dim_red/bn/mean/Fused_Mul__copy126510322/quantized/to_f16" type="Convert" version="opset1">
23779 <data destination_type="f16"/>
23780 <input>
23781 <port id="0">
23782 <dim>64</dim>
23783 <dim>256</dim>
23784 <dim>1</dim>
23785 <dim>1</dim>
23786 </port>
23787 </input>
23788 <output>
23789 <port id="1" precision="FP16">
23790 <dim>64</dim>
23791 <dim>256</dim>
23792 <dim>1</dim>
23793 <dim>1</dim>
23794 </port>
23795 </output>
23796 </layer>
23797 <layer id="1580" name="bottleneck4_2/dim_red/conv/fq_weights_1/zero_point1243720139" type="Const" version="opset1">
23798 <data element_type="f16" offset="261464" shape="64,1,1,1" size="128"/>
23799 <output>
23800 <port id="0" precision="FP16">
23801 <dim>64</dim>
23802 <dim>1</dim>
23803 <dim>1</dim>
23804 <dim>1</dim>
23805 </port>
23806 </output>
23807 </layer>
23808 <layer id="1581" name="bottleneck4_2/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
23809 <data auto_broadcast="numpy"/>
23810 <input>
23811 <port id="0">
23812 <dim>64</dim>
23813 <dim>256</dim>
23814 <dim>1</dim>
23815 <dim>1</dim>
23816 </port>
23817 <port id="1">
23818 <dim>64</dim>
23819 <dim>1</dim>
23820 <dim>1</dim>
23821 <dim>1</dim>
23822 </port>
23823 </input>
23824 <output>
23825 <port id="2" precision="FP16">
23826 <dim>64</dim>
23827 <dim>256</dim>
23828 <dim>1</dim>
23829 <dim>1</dim>
23830 </port>
23831 </output>
23832 </layer>
23833 <layer id="1582" name="bottleneck4_2/dim_red/conv/fq_weights_1/scale1243221885" type="Const" version="opset1">
23834 <data element_type="f16" offset="261592" shape="64,1,1,1" size="128"/>
23835 <output>
23836 <port id="0" precision="FP16">
23837 <dim>64</dim>
23838 <dim>1</dim>
23839 <dim>1</dim>
23840 <dim>1</dim>
23841 </port>
23842 </output>
23843 </layer>
23844 <layer id="1583" name="bottleneck4_2/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
23845 <data auto_broadcast="numpy"/>
23846 <input>
23847 <port id="0">
23848 <dim>64</dim>
23849 <dim>256</dim>
23850 <dim>1</dim>
23851 <dim>1</dim>
23852 </port>
23853 <port id="1">
23854 <dim>64</dim>
23855 <dim>1</dim>
23856 <dim>1</dim>
23857 <dim>1</dim>
23858 </port>
23859 </input>
23860 <output>
23861 <port id="2" precision="FP16">
23862 <dim>64</dim>
23863 <dim>256</dim>
23864 <dim>1</dim>
23865 <dim>1</dim>
23866 </port>
23867 </output>
23868 </layer>
23869 <layer id="1584" name="bottleneck4_2/dim_red/conv" type="Convolution" version="opset1">
23870 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
23871 <input>
23872 <port id="0">
23873 <dim>1</dim>
23874 <dim>256</dim>
23875 <dim>20</dim>
23876 <dim>34</dim>
23877 </port>
23878 <port id="1">
23879 <dim>64</dim>
23880 <dim>256</dim>
23881 <dim>1</dim>
23882 <dim>1</dim>
23883 </port>
23884 </input>
23885 <output>
23886 <port id="2" precision="FP16">
23887 <dim>1</dim>
23888 <dim>64</dim>
23889 <dim>20</dim>
23890 <dim>34</dim>
23891 </port>
23892 </output>
23893 </layer>
23894 <layer id="1585" name="data_add_2430524310126719380" type="Const" version="opset1">
23895 <data element_type="f16" offset="261720" shape="1,64,1,1" size="128"/>
23896 <output>
23897 <port id="0" precision="FP16">
23898 <dim>1</dim>
23899 <dim>64</dim>
23900 <dim>1</dim>
23901 <dim>1</dim>
23902 </port>
23903 </output>
23904 </layer>
23905 <layer id="1586" name="bottleneck4_2/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
23906 <data auto_broadcast="numpy"/>
23907 <input>
23908 <port id="0">
23909 <dim>1</dim>
23910 <dim>64</dim>
23911 <dim>20</dim>
23912 <dim>34</dim>
23913 </port>
23914 <port id="1">
23915 <dim>1</dim>
23916 <dim>64</dim>
23917 <dim>1</dim>
23918 <dim>1</dim>
23919 </port>
23920 </input>
23921 <output>
23922 <port id="2" names="bottleneck4_2/dim_red/conv" precision="FP16">
23923 <dim>1</dim>
23924 <dim>64</dim>
23925 <dim>20</dim>
23926 <dim>34</dim>
23927 </port>
23928 </output>
23929 </layer>
23930 <layer id="1587" name="bottleneck4_2/dim_red/fn/weights30812398181269" type="Const" version="opset1">
23931 <data element_type="f32" offset="1576" shape="1" size="4"/>
23932 <output>
23933 <port id="0" precision="FP32">
23934 <dim>1</dim>
23935 </port>
23936 </output>
23937 </layer>
23938 <layer id="1588" name="bottleneck4_2/dim_red/fn" type="PReLU" version="opset1">
23939 <input>
23940 <port id="0">
23941 <dim>1</dim>
23942 <dim>64</dim>
23943 <dim>20</dim>
23944 <dim>34</dim>
23945 </port>
23946 <port id="1">
23947 <dim>1</dim>
23948 </port>
23949 </input>
23950 <output>
23951 <port id="2" names="bottleneck4_2/dim_red/conv" precision="FP16">
23952 <dim>1</dim>
23953 <dim>64</dim>
23954 <dim>20</dim>
23955 <dim>34</dim>
23956 </port>
23957 </output>
23958 </layer>
23959 <layer id="1589" name="bottleneck4_2/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
23960 <data auto_broadcast="numpy" levels="256"/>
23961 <input>
23962 <port id="0">
23963 <dim>1</dim>
23964 <dim>64</dim>
23965 <dim>20</dim>
23966 <dim>34</dim>
23967 </port>
23968 <port id="1">
23969 <dim>1</dim>
23970 <dim>64</dim>
23971 <dim>1</dim>
23972 <dim>1</dim>
23973 </port>
23974 <port id="2">
23975 <dim>1</dim>
23976 <dim>64</dim>
23977 <dim>1</dim>
23978 <dim>1</dim>
23979 </port>
23980 <port id="3">
23981 <dim>1</dim>
23982 <dim>64</dim>
23983 <dim>1</dim>
23984 <dim>1</dim>
23985 </port>
23986 <port id="4">
23987 <dim>1</dim>
23988 <dim>64</dim>
23989 <dim>1</dim>
23990 <dim>1</dim>
23991 </port>
23992 </input>
23993 <output>
23994 <port id="5" precision="FP16">
23995 <dim>1</dim>
23996 <dim>64</dim>
23997 <dim>20</dim>
23998 <dim>34</dim>
23999 </port>
24000 </output>
24001 </layer>
24002 <layer id="1590" name="16867/value1686920148" type="Const" version="opset1">
24003 <data element_type="i64" offset="189984" shape="5" size="40"/>
24004 <output>
24005 <port id="0" precision="I64">
24006 <dim>5</dim>
24007 </port>
24008 </output>
24009 </layer>
24010 <layer id="1591" name="bottleneck4_2/inner/dw1/bn/mean/Fused_Mul__copy127110325/quantized1175222380" type="Const" version="opset1">
24011 <data element_type="i8" offset="261848" shape="64,1,3,3" size="576"/>
24012 <output>
24013 <port id="0" precision="I8">
24014 <dim>64</dim>
24015 <dim>1</dim>
24016 <dim>3</dim>
24017 <dim>3</dim>
24018 </port>
24019 </output>
24020 </layer>
24021 <layer id="1592" name="bottleneck4_2/inner/dw1/bn/mean/Fused_Mul__copy127110325/quantized/to_f16" type="Convert" version="opset1">
24022 <data destination_type="f16"/>
24023 <input>
24024 <port id="0">
24025 <dim>64</dim>
24026 <dim>1</dim>
24027 <dim>3</dim>
24028 <dim>3</dim>
24029 </port>
24030 </input>
24031 <output>
24032 <port id="1" precision="FP16">
24033 <dim>64</dim>
24034 <dim>1</dim>
24035 <dim>3</dim>
24036 <dim>3</dim>
24037 </port>
24038 </output>
24039 </layer>
24040 <layer id="1593" name="bottleneck4_2/inner/dw1/conv/fq_weights_1/zero_point1176521537" type="Const" version="opset1">
24041 <data element_type="f16" offset="262424" shape="64,1,1,1" size="128"/>
24042 <output>
24043 <port id="0" precision="FP16">
24044 <dim>64</dim>
24045 <dim>1</dim>
24046 <dim>1</dim>
24047 <dim>1</dim>
24048 </port>
24049 </output>
24050 </layer>
24051 <layer id="1594" name="bottleneck4_2/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
24052 <data auto_broadcast="numpy"/>
24053 <input>
24054 <port id="0">
24055 <dim>64</dim>
24056 <dim>1</dim>
24057 <dim>3</dim>
24058 <dim>3</dim>
24059 </port>
24060 <port id="1">
24061 <dim>64</dim>
24062 <dim>1</dim>
24063 <dim>1</dim>
24064 <dim>1</dim>
24065 </port>
24066 </input>
24067 <output>
24068 <port id="2" precision="FP16">
24069 <dim>64</dim>
24070 <dim>1</dim>
24071 <dim>3</dim>
24072 <dim>3</dim>
24073 </port>
24074 </output>
24075 </layer>
24076 <layer id="1595" name="bottleneck4_2/inner/dw1/conv/fq_weights_1/scale1176022944" type="Const" version="opset1">
24077 <data element_type="f16" offset="262552" shape="64,1,1,1" size="128"/>
24078 <output>
24079 <port id="0" precision="FP16">
24080 <dim>64</dim>
24081 <dim>1</dim>
24082 <dim>1</dim>
24083 <dim>1</dim>
24084 </port>
24085 </output>
24086 </layer>
24087 <layer id="1596" name="bottleneck4_2/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
24088 <data auto_broadcast="numpy"/>
24089 <input>
24090 <port id="0">
24091 <dim>64</dim>
24092 <dim>1</dim>
24093 <dim>3</dim>
24094 <dim>3</dim>
24095 </port>
24096 <port id="1">
24097 <dim>64</dim>
24098 <dim>1</dim>
24099 <dim>1</dim>
24100 <dim>1</dim>
24101 </port>
24102 </input>
24103 <output>
24104 <port id="2" precision="FP16">
24105 <dim>64</dim>
24106 <dim>1</dim>
24107 <dim>3</dim>
24108 <dim>3</dim>
24109 </port>
24110 </output>
24111 </layer>
24112 <layer id="1597" name="16867" type="Reshape" version="opset1">
24113 <data special_zero="true"/>
24114 <input>
24115 <port id="0">
24116 <dim>64</dim>
24117 <dim>1</dim>
24118 <dim>3</dim>
24119 <dim>3</dim>
24120 </port>
24121 <port id="1">
24122 <dim>5</dim>
24123 </port>
24124 </input>
24125 <output>
24126 <port id="2" precision="FP16">
24127 <dim>64</dim>
24128 <dim>1</dim>
24129 <dim>1</dim>
24130 <dim>3</dim>
24131 <dim>3</dim>
24132 </port>
24133 </output>
24134 </layer>
24135 <layer id="1598" name="bottleneck4_2/inner/dw1/conv" type="GroupConvolution" version="opset1">
24136 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
24137 <input>
24138 <port id="0">
24139 <dim>1</dim>
24140 <dim>64</dim>
24141 <dim>20</dim>
24142 <dim>34</dim>
24143 </port>
24144 <port id="1">
24145 <dim>64</dim>
24146 <dim>1</dim>
24147 <dim>1</dim>
24148 <dim>3</dim>
24149 <dim>3</dim>
24150 </port>
24151 </input>
24152 <output>
24153 <port id="2" precision="FP16">
24154 <dim>1</dim>
24155 <dim>64</dim>
24156 <dim>20</dim>
24157 <dim>34</dim>
24158 </port>
24159 </output>
24160 </layer>
24161 <layer id="1599" name="data_add_2431324318127320151" type="Const" version="opset1">
24162 <data element_type="f16" offset="262680" shape="1,64,1,1" size="128"/>
24163 <output>
24164 <port id="0" precision="FP16">
24165 <dim>1</dim>
24166 <dim>64</dim>
24167 <dim>1</dim>
24168 <dim>1</dim>
24169 </port>
24170 </output>
24171 </layer>
24172 <layer id="1600" name="bottleneck4_2/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
24173 <data auto_broadcast="numpy"/>
24174 <input>
24175 <port id="0">
24176 <dim>1</dim>
24177 <dim>64</dim>
24178 <dim>20</dim>
24179 <dim>34</dim>
24180 </port>
24181 <port id="1">
24182 <dim>1</dim>
24183 <dim>64</dim>
24184 <dim>1</dim>
24185 <dim>1</dim>
24186 </port>
24187 </input>
24188 <output>
24189 <port id="2" names="bottleneck4_2/inner/dw1/conv" precision="FP16">
24190 <dim>1</dim>
24191 <dim>64</dim>
24192 <dim>20</dim>
24193 <dim>34</dim>
24194 </port>
24195 </output>
24196 </layer>
24197 <layer id="1601" name="bottleneck4_2/inner/dw1/fn/weights30960401241275" type="Const" version="opset1">
24198 <data element_type="f32" offset="1576" shape="1" size="4"/>
24199 <output>
24200 <port id="0" precision="FP32">
24201 <dim>1</dim>
24202 </port>
24203 </output>
24204 </layer>
24205 <layer id="1602" name="bottleneck4_2/inner/dw1/fn" type="PReLU" version="opset1">
24206 <input>
24207 <port id="0">
24208 <dim>1</dim>
24209 <dim>64</dim>
24210 <dim>20</dim>
24211 <dim>34</dim>
24212 </port>
24213 <port id="1">
24214 <dim>1</dim>
24215 </port>
24216 </input>
24217 <output>
24218 <port id="2" names="bottleneck4_2/inner/dw1/conv" precision="FP16">
24219 <dim>1</dim>
24220 <dim>64</dim>
24221 <dim>20</dim>
24222 <dim>34</dim>
24223 </port>
24224 </output>
24225 </layer>
24226 <layer id="1603" name="bottleneck4_2/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
24227 <data auto_broadcast="numpy" levels="256"/>
24228 <input>
24229 <port id="0">
24230 <dim>1</dim>
24231 <dim>64</dim>
24232 <dim>20</dim>
24233 <dim>34</dim>
24234 </port>
24235 <port id="1"/>
24236 <port id="2"/>
24237 <port id="3"/>
24238 <port id="4"/>
24239 </input>
24240 <output>
24241 <port id="5" precision="FP16">
24242 <dim>1</dim>
24243 <dim>64</dim>
24244 <dim>20</dim>
24245 <dim>34</dim>
24246 </port>
24247 </output>
24248 </layer>
24249 <layer id="1604" name="bottleneck4_2/dim_inc/bn/mean/Fused_Mul__copy127710328/quantized1343219902" type="Const" version="opset1">
24250 <data element_type="i8" offset="262808" shape="256,64,1,1" size="16384"/>
24251 <output>
24252 <port id="0" precision="I8">
24253 <dim>256</dim>
24254 <dim>64</dim>
24255 <dim>1</dim>
24256 <dim>1</dim>
24257 </port>
24258 </output>
24259 </layer>
24260 <layer id="1605" name="bottleneck4_2/dim_inc/bn/mean/Fused_Mul__copy127710328/quantized/to_f16" type="Convert" version="opset1">
24261 <data destination_type="f16"/>
24262 <input>
24263 <port id="0">
24264 <dim>256</dim>
24265 <dim>64</dim>
24266 <dim>1</dim>
24267 <dim>1</dim>
24268 </port>
24269 </input>
24270 <output>
24271 <port id="1" precision="FP16">
24272 <dim>256</dim>
24273 <dim>64</dim>
24274 <dim>1</dim>
24275 <dim>1</dim>
24276 </port>
24277 </output>
24278 </layer>
24279 <layer id="1606" name="bottleneck4_2/dim_inc/conv/fq_weights_1/zero_point1344521468" type="Const" version="opset1">
24280 <data element_type="f16" offset="279192" shape="256,1,1,1" size="512"/>
24281 <output>
24282 <port id="0" precision="FP16">
24283 <dim>256</dim>
24284 <dim>1</dim>
24285 <dim>1</dim>
24286 <dim>1</dim>
24287 </port>
24288 </output>
24289 </layer>
24290 <layer id="1607" name="bottleneck4_2/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
24291 <data auto_broadcast="numpy"/>
24292 <input>
24293 <port id="0">
24294 <dim>256</dim>
24295 <dim>64</dim>
24296 <dim>1</dim>
24297 <dim>1</dim>
24298 </port>
24299 <port id="1">
24300 <dim>256</dim>
24301 <dim>1</dim>
24302 <dim>1</dim>
24303 <dim>1</dim>
24304 </port>
24305 </input>
24306 <output>
24307 <port id="2" precision="FP16">
24308 <dim>256</dim>
24309 <dim>64</dim>
24310 <dim>1</dim>
24311 <dim>1</dim>
24312 </port>
24313 </output>
24314 </layer>
24315 <layer id="1608" name="bottleneck4_2/dim_inc/conv/fq_weights_1/scale1344020652" type="Const" version="opset1">
24316 <data element_type="f16" offset="279704" shape="256,1,1,1" size="512"/>
24317 <output>
24318 <port id="0" precision="FP16">
24319 <dim>256</dim>
24320 <dim>1</dim>
24321 <dim>1</dim>
24322 <dim>1</dim>
24323 </port>
24324 </output>
24325 </layer>
24326 <layer id="1609" name="bottleneck4_2/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
24327 <data auto_broadcast="numpy"/>
24328 <input>
24329 <port id="0">
24330 <dim>256</dim>
24331 <dim>64</dim>
24332 <dim>1</dim>
24333 <dim>1</dim>
24334 </port>
24335 <port id="1">
24336 <dim>256</dim>
24337 <dim>1</dim>
24338 <dim>1</dim>
24339 <dim>1</dim>
24340 </port>
24341 </input>
24342 <output>
24343 <port id="2" precision="FP16">
24344 <dim>256</dim>
24345 <dim>64</dim>
24346 <dim>1</dim>
24347 <dim>1</dim>
24348 </port>
24349 </output>
24350 </layer>
24351 <layer id="1610" name="bottleneck4_2/dim_inc/conv" type="Convolution" version="opset1">
24352 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
24353 <input>
24354 <port id="0">
24355 <dim>1</dim>
24356 <dim>64</dim>
24357 <dim>20</dim>
24358 <dim>34</dim>
24359 </port>
24360 <port id="1">
24361 <dim>256</dim>
24362 <dim>64</dim>
24363 <dim>1</dim>
24364 <dim>1</dim>
24365 </port>
24366 </input>
24367 <output>
24368 <port id="2" precision="FP16">
24369 <dim>1</dim>
24370 <dim>256</dim>
24371 <dim>20</dim>
24372 <dim>34</dim>
24373 </port>
24374 </output>
24375 </layer>
24376 <layer id="1611" name="data_add_2432124326127920925" type="Const" version="opset1">
24377 <data element_type="f16" offset="280216" shape="1,256,1,1" size="512"/>
24378 <output>
24379 <port id="0" precision="FP16">
24380 <dim>1</dim>
24381 <dim>256</dim>
24382 <dim>1</dim>
24383 <dim>1</dim>
24384 </port>
24385 </output>
24386 </layer>
24387 <layer id="1612" name="bottleneck4_2/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
24388 <data auto_broadcast="numpy"/>
24389 <input>
24390 <port id="0">
24391 <dim>1</dim>
24392 <dim>256</dim>
24393 <dim>20</dim>
24394 <dim>34</dim>
24395 </port>
24396 <port id="1">
24397 <dim>1</dim>
24398 <dim>256</dim>
24399 <dim>1</dim>
24400 <dim>1</dim>
24401 </port>
24402 </input>
24403 <output>
24404 <port id="2" names="bottleneck4_2/dim_inc/conv" precision="FP16">
24405 <dim>1</dim>
24406 <dim>256</dim>
24407 <dim>20</dim>
24408 <dim>34</dim>
24409 </port>
24410 </output>
24411 </layer>
24412 <layer id="1613" name="bottleneck4_2/add/fq_input_1" type="FakeQuantize" version="opset1">
24413 <data auto_broadcast="numpy" levels="256"/>
24414 <input>
24415 <port id="0">
24416 <dim>1</dim>
24417 <dim>256</dim>
24418 <dim>20</dim>
24419 <dim>34</dim>
24420 </port>
24421 <port id="1"/>
24422 <port id="2"/>
24423 <port id="3"/>
24424 <port id="4"/>
24425 </input>
24426 <output>
24427 <port id="5" precision="FP16">
24428 <dim>1</dim>
24429 <dim>256</dim>
24430 <dim>20</dim>
24431 <dim>34</dim>
24432 </port>
24433 </output>
24434 </layer>
24435 <layer id="1614" name="bottleneck4_2/add" type="Add" version="opset1">
24436 <data auto_broadcast="numpy"/>
24437 <input>
24438 <port id="0">
24439 <dim>1</dim>
24440 <dim>256</dim>
24441 <dim>20</dim>
24442 <dim>34</dim>
24443 </port>
24444 <port id="1">
24445 <dim>1</dim>
24446 <dim>256</dim>
24447 <dim>20</dim>
24448 <dim>34</dim>
24449 </port>
24450 </input>
24451 <output>
24452 <port id="2" names="bottleneck4_2/add" precision="FP16">
24453 <dim>1</dim>
24454 <dim>256</dim>
24455 <dim>20</dim>
24456 <dim>34</dim>
24457 </port>
24458 </output>
24459 </layer>
24460 <layer id="1615" name="bottleneck4_2/fn/weights31076404931282" type="Const" version="opset1">
24461 <data element_type="f32" offset="1576" shape="1" size="4"/>
24462 <output>
24463 <port id="0" precision="FP32">
24464 <dim>1</dim>
24465 </port>
24466 </output>
24467 </layer>
24468 <layer id="1616" name="bottleneck4_2/fn" type="PReLU" version="opset1">
24469 <input>
24470 <port id="0">
24471 <dim>1</dim>
24472 <dim>256</dim>
24473 <dim>20</dim>
24474 <dim>34</dim>
24475 </port>
24476 <port id="1">
24477 <dim>1</dim>
24478 </port>
24479 </input>
24480 <output>
24481 <port id="2" names="bottleneck4_2/add" precision="FP16">
24482 <dim>1</dim>
24483 <dim>256</dim>
24484 <dim>20</dim>
24485 <dim>34</dim>
24486 </port>
24487 </output>
24488 </layer>
24489 <layer id="1617" name="bottleneck4_3/add/fq_input_0" type="FakeQuantize" version="opset1">
24490 <data auto_broadcast="numpy" levels="256"/>
24491 <input>
24492 <port id="0">
24493 <dim>1</dim>
24494 <dim>256</dim>
24495 <dim>20</dim>
24496 <dim>34</dim>
24497 </port>
24498 <port id="1"/>
24499 <port id="2"/>
24500 <port id="3"/>
24501 <port id="4"/>
24502 </input>
24503 <output>
24504 <port id="5" precision="FP16">
24505 <dim>1</dim>
24506 <dim>256</dim>
24507 <dim>20</dim>
24508 <dim>34</dim>
24509 </port>
24510 </output>
24511 </layer>
24512 <layer id="1618" name="4194419821612" type="Const" version="opset1">
24513 <data element_type="f16" offset="280728" shape="" size="2"/>
24514 <output>
24515 <port id="0" precision="FP16"/>
24516 </output>
24517 </layer>
24518 <layer id="1619" name="4195419922626" type="Const" version="opset1">
24519 <data element_type="f16" offset="280730" shape="" size="2"/>
24520 <output>
24521 <port id="0" precision="FP16"/>
24522 </output>
24523 </layer>
24524 <layer id="1620" name="4196420022434" type="Const" version="opset1">
24525 <data element_type="f16" offset="280728" shape="" size="2"/>
24526 <output>
24527 <port id="0" precision="FP16"/>
24528 </output>
24529 </layer>
24530 <layer id="1621" name="4197420119560" type="Const" version="opset1">
24531 <data element_type="f16" offset="280730" shape="" size="2"/>
24532 <output>
24533 <port id="0" precision="FP16"/>
24534 </output>
24535 </layer>
24536 <layer id="1622" name="3664366821177" type="Const" version="opset1">
24537 <data element_type="f16" offset="280732" shape="" size="2"/>
24538 <output>
24539 <port id="0" precision="FP16"/>
24540 </output>
24541 </layer>
24542 <layer id="1623" name="3665366922953" type="Const" version="opset1">
24543 <data element_type="f16" offset="280734" shape="" size="2"/>
24544 <output>
24545 <port id="0" precision="FP16"/>
24546 </output>
24547 </layer>
24548 <layer id="1624" name="3666367021291" type="Const" version="opset1">
24549 <data element_type="f16" offset="280732" shape="" size="2"/>
24550 <output>
24551 <port id="0" precision="FP16"/>
24552 </output>
24553 </layer>
24554 <layer id="1625" name="3667367122464" type="Const" version="opset1">
24555 <data element_type="f16" offset="280734" shape="" size="2"/>
24556 <output>
24557 <port id="0" precision="FP16"/>
24558 </output>
24559 </layer>
24560 <layer id="1626" name="4804480820217" type="Const" version="opset1">
24561 <data element_type="f16" offset="280736" shape="1,64,1,1" size="128"/>
24562 <output>
24563 <port id="0" precision="FP16">
24564 <dim>1</dim>
24565 <dim>64</dim>
24566 <dim>1</dim>
24567 <dim>1</dim>
24568 </port>
24569 </output>
24570 </layer>
24571 <layer id="1627" name="4805480921045" type="Const" version="opset1">
24572 <data element_type="f16" offset="280864" shape="1,64,1,1" size="128"/>
24573 <output>
24574 <port id="0" precision="FP16">
24575 <dim>1</dim>
24576 <dim>64</dim>
24577 <dim>1</dim>
24578 <dim>1</dim>
24579 </port>
24580 </output>
24581 </layer>
24582 <layer id="1628" name="4806481021951" type="Const" version="opset1">
24583 <data element_type="f16" offset="280736" shape="1,64,1,1" size="128"/>
24584 <output>
24585 <port id="0" precision="FP16">
24586 <dim>1</dim>
24587 <dim>64</dim>
24588 <dim>1</dim>
24589 <dim>1</dim>
24590 </port>
24591 </output>
24592 </layer>
24593 <layer id="1629" name="4807481121279" type="Const" version="opset1">
24594 <data element_type="f16" offset="280864" shape="1,64,1,1" size="128"/>
24595 <output>
24596 <port id="0" precision="FP16">
24597 <dim>1</dim>
24598 <dim>64</dim>
24599 <dim>1</dim>
24600 <dim>1</dim>
24601 </port>
24602 </output>
24603 </layer>
24604 <layer id="1630" name="bottleneck4_3/dim_red/bn/mean/Fused_Mul__copy128410331/quantized1328819668" type="Const" version="opset1">
24605 <data element_type="i8" offset="280992" shape="64,256,1,1" size="16384"/>
24606 <output>
24607 <port id="0" precision="I8">
24608 <dim>64</dim>
24609 <dim>256</dim>
24610 <dim>1</dim>
24611 <dim>1</dim>
24612 </port>
24613 </output>
24614 </layer>
24615 <layer id="1631" name="bottleneck4_3/dim_red/bn/mean/Fused_Mul__copy128410331/quantized/to_f16" type="Convert" version="opset1">
24616 <data destination_type="f16"/>
24617 <input>
24618 <port id="0">
24619 <dim>64</dim>
24620 <dim>256</dim>
24621 <dim>1</dim>
24622 <dim>1</dim>
24623 </port>
24624 </input>
24625 <output>
24626 <port id="1" precision="FP16">
24627 <dim>64</dim>
24628 <dim>256</dim>
24629 <dim>1</dim>
24630 <dim>1</dim>
24631 </port>
24632 </output>
24633 </layer>
24634 <layer id="1632" name="bottleneck4_3/dim_red/conv/fq_weights_1/zero_point1330121576" type="Const" version="opset1">
24635 <data element_type="f16" offset="297376" shape="64,1,1,1" size="128"/>
24636 <output>
24637 <port id="0" precision="FP16">
24638 <dim>64</dim>
24639 <dim>1</dim>
24640 <dim>1</dim>
24641 <dim>1</dim>
24642 </port>
24643 </output>
24644 </layer>
24645 <layer id="1633" name="bottleneck4_3/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
24646 <data auto_broadcast="numpy"/>
24647 <input>
24648 <port id="0">
24649 <dim>64</dim>
24650 <dim>256</dim>
24651 <dim>1</dim>
24652 <dim>1</dim>
24653 </port>
24654 <port id="1">
24655 <dim>64</dim>
24656 <dim>1</dim>
24657 <dim>1</dim>
24658 <dim>1</dim>
24659 </port>
24660 </input>
24661 <output>
24662 <port id="2" precision="FP16">
24663 <dim>64</dim>
24664 <dim>256</dim>
24665 <dim>1</dim>
24666 <dim>1</dim>
24667 </port>
24668 </output>
24669 </layer>
24670 <layer id="1634" name="bottleneck4_3/dim_red/conv/fq_weights_1/scale1329622698" type="Const" version="opset1">
24671 <data element_type="f16" offset="297504" shape="64,1,1,1" size="128"/>
24672 <output>
24673 <port id="0" precision="FP16">
24674 <dim>64</dim>
24675 <dim>1</dim>
24676 <dim>1</dim>
24677 <dim>1</dim>
24678 </port>
24679 </output>
24680 </layer>
24681 <layer id="1635" name="bottleneck4_3/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
24682 <data auto_broadcast="numpy"/>
24683 <input>
24684 <port id="0">
24685 <dim>64</dim>
24686 <dim>256</dim>
24687 <dim>1</dim>
24688 <dim>1</dim>
24689 </port>
24690 <port id="1">
24691 <dim>64</dim>
24692 <dim>1</dim>
24693 <dim>1</dim>
24694 <dim>1</dim>
24695 </port>
24696 </input>
24697 <output>
24698 <port id="2" precision="FP16">
24699 <dim>64</dim>
24700 <dim>256</dim>
24701 <dim>1</dim>
24702 <dim>1</dim>
24703 </port>
24704 </output>
24705 </layer>
24706 <layer id="1636" name="bottleneck4_3/dim_red/conv" type="Convolution" version="opset1">
24707 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
24708 <input>
24709 <port id="0">
24710 <dim>1</dim>
24711 <dim>256</dim>
24712 <dim>20</dim>
24713 <dim>34</dim>
24714 </port>
24715 <port id="1">
24716 <dim>64</dim>
24717 <dim>256</dim>
24718 <dim>1</dim>
24719 <dim>1</dim>
24720 </port>
24721 </input>
24722 <output>
24723 <port id="2" precision="FP16">
24724 <dim>1</dim>
24725 <dim>64</dim>
24726 <dim>20</dim>
24727 <dim>34</dim>
24728 </port>
24729 </output>
24730 </layer>
24731 <layer id="1637" name="data_add_2432924334128620583" type="Const" version="opset1">
24732 <data element_type="f16" offset="297632" shape="1,64,1,1" size="128"/>
24733 <output>
24734 <port id="0" precision="FP16">
24735 <dim>1</dim>
24736 <dim>64</dim>
24737 <dim>1</dim>
24738 <dim>1</dim>
24739 </port>
24740 </output>
24741 </layer>
24742 <layer id="1638" name="bottleneck4_3/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
24743 <data auto_broadcast="numpy"/>
24744 <input>
24745 <port id="0">
24746 <dim>1</dim>
24747 <dim>64</dim>
24748 <dim>20</dim>
24749 <dim>34</dim>
24750 </port>
24751 <port id="1">
24752 <dim>1</dim>
24753 <dim>64</dim>
24754 <dim>1</dim>
24755 <dim>1</dim>
24756 </port>
24757 </input>
24758 <output>
24759 <port id="2" names="bottleneck4_3/dim_red/conv" precision="FP16">
24760 <dim>1</dim>
24761 <dim>64</dim>
24762 <dim>20</dim>
24763 <dim>34</dim>
24764 </port>
24765 </output>
24766 </layer>
24767 <layer id="1639" name="bottleneck4_3/dim_red/fn/weights30932406851288" type="Const" version="opset1">
24768 <data element_type="f32" offset="1576" shape="1" size="4"/>
24769 <output>
24770 <port id="0" precision="FP32">
24771 <dim>1</dim>
24772 </port>
24773 </output>
24774 </layer>
24775 <layer id="1640" name="bottleneck4_3/dim_red/fn" type="PReLU" version="opset1">
24776 <input>
24777 <port id="0">
24778 <dim>1</dim>
24779 <dim>64</dim>
24780 <dim>20</dim>
24781 <dim>34</dim>
24782 </port>
24783 <port id="1">
24784 <dim>1</dim>
24785 </port>
24786 </input>
24787 <output>
24788 <port id="2" names="bottleneck4_3/dim_red/conv" precision="FP16">
24789 <dim>1</dim>
24790 <dim>64</dim>
24791 <dim>20</dim>
24792 <dim>34</dim>
24793 </port>
24794 </output>
24795 </layer>
24796 <layer id="1641" name="bottleneck4_3/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
24797 <data auto_broadcast="numpy" levels="256"/>
24798 <input>
24799 <port id="0">
24800 <dim>1</dim>
24801 <dim>64</dim>
24802 <dim>20</dim>
24803 <dim>34</dim>
24804 </port>
24805 <port id="1">
24806 <dim>1</dim>
24807 <dim>64</dim>
24808 <dim>1</dim>
24809 <dim>1</dim>
24810 </port>
24811 <port id="2">
24812 <dim>1</dim>
24813 <dim>64</dim>
24814 <dim>1</dim>
24815 <dim>1</dim>
24816 </port>
24817 <port id="3">
24818 <dim>1</dim>
24819 <dim>64</dim>
24820 <dim>1</dim>
24821 <dim>1</dim>
24822 </port>
24823 <port id="4">
24824 <dim>1</dim>
24825 <dim>64</dim>
24826 <dim>1</dim>
24827 <dim>1</dim>
24828 </port>
24829 </input>
24830 <output>
24831 <port id="5" precision="FP16">
24832 <dim>1</dim>
24833 <dim>64</dim>
24834 <dim>20</dim>
24835 <dim>34</dim>
24836 </port>
24837 </output>
24838 </layer>
24839 <layer id="1642" name="16903/value1690521288" type="Const" version="opset1">
24840 <data element_type="i64" offset="189984" shape="5" size="40"/>
24841 <output>
24842 <port id="0" precision="I64">
24843 <dim>5</dim>
24844 </port>
24845 </output>
24846 </layer>
24847 <layer id="1643" name="bottleneck4_3/inner/dw1/bn/mean/Fused_Mul__copy129010334/quantized1201619842" type="Const" version="opset1">
24848 <data element_type="i8" offset="297760" shape="64,1,3,3" size="576"/>
24849 <output>
24850 <port id="0" precision="I8">
24851 <dim>64</dim>
24852 <dim>1</dim>
24853 <dim>3</dim>
24854 <dim>3</dim>
24855 </port>
24856 </output>
24857 </layer>
24858 <layer id="1644" name="bottleneck4_3/inner/dw1/bn/mean/Fused_Mul__copy129010334/quantized/to_f16" type="Convert" version="opset1">
24859 <data destination_type="f16"/>
24860 <input>
24861 <port id="0">
24862 <dim>64</dim>
24863 <dim>1</dim>
24864 <dim>3</dim>
24865 <dim>3</dim>
24866 </port>
24867 </input>
24868 <output>
24869 <port id="1" precision="FP16">
24870 <dim>64</dim>
24871 <dim>1</dim>
24872 <dim>3</dim>
24873 <dim>3</dim>
24874 </port>
24875 </output>
24876 </layer>
24877 <layer id="1645" name="bottleneck4_3/inner/dw1/conv/fq_weights_1/zero_point1202920493" type="Const" version="opset1">
24878 <data element_type="f16" offset="298336" shape="64,1,1,1" size="128"/>
24879 <output>
24880 <port id="0" precision="FP16">
24881 <dim>64</dim>
24882 <dim>1</dim>
24883 <dim>1</dim>
24884 <dim>1</dim>
24885 </port>
24886 </output>
24887 </layer>
24888 <layer id="1646" name="bottleneck4_3/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
24889 <data auto_broadcast="numpy"/>
24890 <input>
24891 <port id="0">
24892 <dim>64</dim>
24893 <dim>1</dim>
24894 <dim>3</dim>
24895 <dim>3</dim>
24896 </port>
24897 <port id="1">
24898 <dim>64</dim>
24899 <dim>1</dim>
24900 <dim>1</dim>
24901 <dim>1</dim>
24902 </port>
24903 </input>
24904 <output>
24905 <port id="2" precision="FP16">
24906 <dim>64</dim>
24907 <dim>1</dim>
24908 <dim>3</dim>
24909 <dim>3</dim>
24910 </port>
24911 </output>
24912 </layer>
24913 <layer id="1647" name="bottleneck4_3/inner/dw1/conv/fq_weights_1/scale1202421861" type="Const" version="opset1">
24914 <data element_type="f16" offset="298464" shape="64,1,1,1" size="128"/>
24915 <output>
24916 <port id="0" precision="FP16">
24917 <dim>64</dim>
24918 <dim>1</dim>
24919 <dim>1</dim>
24920 <dim>1</dim>
24921 </port>
24922 </output>
24923 </layer>
24924 <layer id="1648" name="bottleneck4_3/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
24925 <data auto_broadcast="numpy"/>
24926 <input>
24927 <port id="0">
24928 <dim>64</dim>
24929 <dim>1</dim>
24930 <dim>3</dim>
24931 <dim>3</dim>
24932 </port>
24933 <port id="1">
24934 <dim>64</dim>
24935 <dim>1</dim>
24936 <dim>1</dim>
24937 <dim>1</dim>
24938 </port>
24939 </input>
24940 <output>
24941 <port id="2" precision="FP16">
24942 <dim>64</dim>
24943 <dim>1</dim>
24944 <dim>3</dim>
24945 <dim>3</dim>
24946 </port>
24947 </output>
24948 </layer>
24949 <layer id="1649" name="16903" type="Reshape" version="opset1">
24950 <data special_zero="true"/>
24951 <input>
24952 <port id="0">
24953 <dim>64</dim>
24954 <dim>1</dim>
24955 <dim>3</dim>
24956 <dim>3</dim>
24957 </port>
24958 <port id="1">
24959 <dim>5</dim>
24960 </port>
24961 </input>
24962 <output>
24963 <port id="2" precision="FP16">
24964 <dim>64</dim>
24965 <dim>1</dim>
24966 <dim>1</dim>
24967 <dim>3</dim>
24968 <dim>3</dim>
24969 </port>
24970 </output>
24971 </layer>
24972 <layer id="1650" name="bottleneck4_3/inner/dw1/conv" type="GroupConvolution" version="opset1">
24973 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
24974 <input>
24975 <port id="0">
24976 <dim>1</dim>
24977 <dim>64</dim>
24978 <dim>20</dim>
24979 <dim>34</dim>
24980 </port>
24981 <port id="1">
24982 <dim>64</dim>
24983 <dim>1</dim>
24984 <dim>1</dim>
24985 <dim>3</dim>
24986 <dim>3</dim>
24987 </port>
24988 </input>
24989 <output>
24990 <port id="2" precision="FP16">
24991 <dim>1</dim>
24992 <dim>64</dim>
24993 <dim>20</dim>
24994 <dim>34</dim>
24995 </port>
24996 </output>
24997 </layer>
24998 <layer id="1651" name="data_add_2433724342129221663" type="Const" version="opset1">
24999 <data element_type="f16" offset="298592" shape="1,64,1,1" size="128"/>
25000 <output>
25001 <port id="0" precision="FP16">
25002 <dim>1</dim>
25003 <dim>64</dim>
25004 <dim>1</dim>
25005 <dim>1</dim>
25006 </port>
25007 </output>
25008 </layer>
25009 <layer id="1652" name="bottleneck4_3/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
25010 <data auto_broadcast="numpy"/>
25011 <input>
25012 <port id="0">
25013 <dim>1</dim>
25014 <dim>64</dim>
25015 <dim>20</dim>
25016 <dim>34</dim>
25017 </port>
25018 <port id="1">
25019 <dim>1</dim>
25020 <dim>64</dim>
25021 <dim>1</dim>
25022 <dim>1</dim>
25023 </port>
25024 </input>
25025 <output>
25026 <port id="2" names="bottleneck4_3/inner/dw1/conv" precision="FP16">
25027 <dim>1</dim>
25028 <dim>64</dim>
25029 <dim>20</dim>
25030 <dim>34</dim>
25031 </port>
25032 </output>
25033 </layer>
25034 <layer id="1653" name="bottleneck4_3/inner/dw1/fn/weights31084401301294" type="Const" version="opset1">
25035 <data element_type="f32" offset="1576" shape="1" size="4"/>
25036 <output>
25037 <port id="0" precision="FP32">
25038 <dim>1</dim>
25039 </port>
25040 </output>
25041 </layer>
25042 <layer id="1654" name="bottleneck4_3/inner/dw1/fn" type="PReLU" version="opset1">
25043 <input>
25044 <port id="0">
25045 <dim>1</dim>
25046 <dim>64</dim>
25047 <dim>20</dim>
25048 <dim>34</dim>
25049 </port>
25050 <port id="1">
25051 <dim>1</dim>
25052 </port>
25053 </input>
25054 <output>
25055 <port id="2" names="bottleneck4_3/inner/dw1/conv" precision="FP16">
25056 <dim>1</dim>
25057 <dim>64</dim>
25058 <dim>20</dim>
25059 <dim>34</dim>
25060 </port>
25061 </output>
25062 </layer>
25063 <layer id="1655" name="bottleneck4_3/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
25064 <data auto_broadcast="numpy" levels="256"/>
25065 <input>
25066 <port id="0">
25067 <dim>1</dim>
25068 <dim>64</dim>
25069 <dim>20</dim>
25070 <dim>34</dim>
25071 </port>
25072 <port id="1"/>
25073 <port id="2"/>
25074 <port id="3"/>
25075 <port id="4"/>
25076 </input>
25077 <output>
25078 <port id="5" precision="FP16">
25079 <dim>1</dim>
25080 <dim>64</dim>
25081 <dim>20</dim>
25082 <dim>34</dim>
25083 </port>
25084 </output>
25085 </layer>
25086 <layer id="1656" name="bottleneck4_3/dim_inc/bn/mean/Fused_Mul__copy129610337/quantized1345621945" type="Const" version="opset1">
25087 <data element_type="i8" offset="298720" shape="256,64,1,1" size="16384"/>
25088 <output>
25089 <port id="0" precision="I8">
25090 <dim>256</dim>
25091 <dim>64</dim>
25092 <dim>1</dim>
25093 <dim>1</dim>
25094 </port>
25095 </output>
25096 </layer>
25097 <layer id="1657" name="bottleneck4_3/dim_inc/bn/mean/Fused_Mul__copy129610337/quantized/to_f16" type="Convert" version="opset1">
25098 <data destination_type="f16"/>
25099 <input>
25100 <port id="0">
25101 <dim>256</dim>
25102 <dim>64</dim>
25103 <dim>1</dim>
25104 <dim>1</dim>
25105 </port>
25106 </input>
25107 <output>
25108 <port id="1" precision="FP16">
25109 <dim>256</dim>
25110 <dim>64</dim>
25111 <dim>1</dim>
25112 <dim>1</dim>
25113 </port>
25114 </output>
25115 </layer>
25116 <layer id="1658" name="bottleneck4_3/dim_inc/conv/fq_weights_1/zero_point1346922947" type="Const" version="opset1">
25117 <data element_type="f16" offset="315104" shape="256,1,1,1" size="512"/>
25118 <output>
25119 <port id="0" precision="FP16">
25120 <dim>256</dim>
25121 <dim>1</dim>
25122 <dim>1</dim>
25123 <dim>1</dim>
25124 </port>
25125 </output>
25126 </layer>
25127 <layer id="1659" name="bottleneck4_3/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
25128 <data auto_broadcast="numpy"/>
25129 <input>
25130 <port id="0">
25131 <dim>256</dim>
25132 <dim>64</dim>
25133 <dim>1</dim>
25134 <dim>1</dim>
25135 </port>
25136 <port id="1">
25137 <dim>256</dim>
25138 <dim>1</dim>
25139 <dim>1</dim>
25140 <dim>1</dim>
25141 </port>
25142 </input>
25143 <output>
25144 <port id="2" precision="FP16">
25145 <dim>256</dim>
25146 <dim>64</dim>
25147 <dim>1</dim>
25148 <dim>1</dim>
25149 </port>
25150 </output>
25151 </layer>
25152 <layer id="1660" name="bottleneck4_3/dim_inc/conv/fq_weights_1/scale1346421705" type="Const" version="opset1">
25153 <data element_type="f16" offset="315616" shape="256,1,1,1" size="512"/>
25154 <output>
25155 <port id="0" precision="FP16">
25156 <dim>256</dim>
25157 <dim>1</dim>
25158 <dim>1</dim>
25159 <dim>1</dim>
25160 </port>
25161 </output>
25162 </layer>
25163 <layer id="1661" name="bottleneck4_3/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
25164 <data auto_broadcast="numpy"/>
25165 <input>
25166 <port id="0">
25167 <dim>256</dim>
25168 <dim>64</dim>
25169 <dim>1</dim>
25170 <dim>1</dim>
25171 </port>
25172 <port id="1">
25173 <dim>256</dim>
25174 <dim>1</dim>
25175 <dim>1</dim>
25176 <dim>1</dim>
25177 </port>
25178 </input>
25179 <output>
25180 <port id="2" precision="FP16">
25181 <dim>256</dim>
25182 <dim>64</dim>
25183 <dim>1</dim>
25184 <dim>1</dim>
25185 </port>
25186 </output>
25187 </layer>
25188 <layer id="1662" name="bottleneck4_3/dim_inc/conv" type="Convolution" version="opset1">
25189 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
25190 <input>
25191 <port id="0">
25192 <dim>1</dim>
25193 <dim>64</dim>
25194 <dim>20</dim>
25195 <dim>34</dim>
25196 </port>
25197 <port id="1">
25198 <dim>256</dim>
25199 <dim>64</dim>
25200 <dim>1</dim>
25201 <dim>1</dim>
25202 </port>
25203 </input>
25204 <output>
25205 <port id="2" precision="FP16">
25206 <dim>1</dim>
25207 <dim>256</dim>
25208 <dim>20</dim>
25209 <dim>34</dim>
25210 </port>
25211 </output>
25212 </layer>
25213 <layer id="1663" name="data_add_2434524350129820115" type="Const" version="opset1">
25214 <data element_type="f16" offset="316128" shape="1,256,1,1" size="512"/>
25215 <output>
25216 <port id="0" precision="FP16">
25217 <dim>1</dim>
25218 <dim>256</dim>
25219 <dim>1</dim>
25220 <dim>1</dim>
25221 </port>
25222 </output>
25223 </layer>
25224 <layer id="1664" name="bottleneck4_3/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
25225 <data auto_broadcast="numpy"/>
25226 <input>
25227 <port id="0">
25228 <dim>1</dim>
25229 <dim>256</dim>
25230 <dim>20</dim>
25231 <dim>34</dim>
25232 </port>
25233 <port id="1">
25234 <dim>1</dim>
25235 <dim>256</dim>
25236 <dim>1</dim>
25237 <dim>1</dim>
25238 </port>
25239 </input>
25240 <output>
25241 <port id="2" names="bottleneck4_3/dim_inc/conv" precision="FP16">
25242 <dim>1</dim>
25243 <dim>256</dim>
25244 <dim>20</dim>
25245 <dim>34</dim>
25246 </port>
25247 </output>
25248 </layer>
25249 <layer id="1665" name="bottleneck4_3/add/fq_input_1" type="FakeQuantize" version="opset1">
25250 <data auto_broadcast="numpy" levels="256"/>
25251 <input>
25252 <port id="0">
25253 <dim>1</dim>
25254 <dim>256</dim>
25255 <dim>20</dim>
25256 <dim>34</dim>
25257 </port>
25258 <port id="1"/>
25259 <port id="2"/>
25260 <port id="3"/>
25261 <port id="4"/>
25262 </input>
25263 <output>
25264 <port id="5" precision="FP16">
25265 <dim>1</dim>
25266 <dim>256</dim>
25267 <dim>20</dim>
25268 <dim>34</dim>
25269 </port>
25270 </output>
25271 </layer>
25272 <layer id="1666" name="bottleneck4_3/add" type="Add" version="opset1">
25273 <data auto_broadcast="numpy"/>
25274 <input>
25275 <port id="0">
25276 <dim>1</dim>
25277 <dim>256</dim>
25278 <dim>20</dim>
25279 <dim>34</dim>
25280 </port>
25281 <port id="1">
25282 <dim>1</dim>
25283 <dim>256</dim>
25284 <dim>20</dim>
25285 <dim>34</dim>
25286 </port>
25287 </input>
25288 <output>
25289 <port id="2" names="bottleneck4_3/add" precision="FP16">
25290 <dim>1</dim>
25291 <dim>256</dim>
25292 <dim>20</dim>
25293 <dim>34</dim>
25294 </port>
25295 </output>
25296 </layer>
25297 <layer id="1667" name="bottleneck4_3/fn/weights30764404661301" type="Const" version="opset1">
25298 <data element_type="f32" offset="1576" shape="1" size="4"/>
25299 <output>
25300 <port id="0" precision="FP32">
25301 <dim>1</dim>
25302 </port>
25303 </output>
25304 </layer>
25305 <layer id="1668" name="bottleneck4_3/fn" type="PReLU" version="opset1">
25306 <input>
25307 <port id="0">
25308 <dim>1</dim>
25309 <dim>256</dim>
25310 <dim>20</dim>
25311 <dim>34</dim>
25312 </port>
25313 <port id="1">
25314 <dim>1</dim>
25315 </port>
25316 </input>
25317 <output>
25318 <port id="2" names="bottleneck4_3/add" precision="FP16">
25319 <dim>1</dim>
25320 <dim>256</dim>
25321 <dim>20</dim>
25322 <dim>34</dim>
25323 </port>
25324 </output>
25325 </layer>
25326 <layer id="1669" name="bottleneck4_4/add/fq_input_0" type="FakeQuantize" version="opset1">
25327 <data auto_broadcast="numpy" levels="256"/>
25328 <input>
25329 <port id="0">
25330 <dim>1</dim>
25331 <dim>256</dim>
25332 <dim>20</dim>
25333 <dim>34</dim>
25334 </port>
25335 <port id="1"/>
25336 <port id="2"/>
25337 <port id="3"/>
25338 <port id="4"/>
25339 </input>
25340 <output>
25341 <port id="5" precision="FP16">
25342 <dim>1</dim>
25343 <dim>256</dim>
25344 <dim>20</dim>
25345 <dim>34</dim>
25346 </port>
25347 </output>
25348 </layer>
25349 <layer id="1670" name="3554355822860" type="Const" version="opset1">
25350 <data element_type="f16" offset="316640" shape="" size="2"/>
25351 <output>
25352 <port id="0" precision="FP16"/>
25353 </output>
25354 </layer>
25355 <layer id="1671" name="3555355922923" type="Const" version="opset1">
25356 <data element_type="f16" offset="316642" shape="" size="2"/>
25357 <output>
25358 <port id="0" precision="FP16"/>
25359 </output>
25360 </layer>
25361 <layer id="1672" name="3556356021948" type="Const" version="opset1">
25362 <data element_type="f16" offset="316640" shape="" size="2"/>
25363 <output>
25364 <port id="0" precision="FP16"/>
25365 </output>
25366 </layer>
25367 <layer id="1673" name="3557356121156" type="Const" version="opset1">
25368 <data element_type="f16" offset="316642" shape="" size="2"/>
25369 <output>
25370 <port id="0" precision="FP16"/>
25371 </output>
25372 </layer>
25373 <layer id="1674" name="4364436821486" type="Const" version="opset1">
25374 <data element_type="f16" offset="316644" shape="" size="2"/>
25375 <output>
25376 <port id="0" precision="FP16"/>
25377 </output>
25378 </layer>
25379 <layer id="1675" name="4365436920781" type="Const" version="opset1">
25380 <data element_type="f16" offset="316646" shape="" size="2"/>
25381 <output>
25382 <port id="0" precision="FP16"/>
25383 </output>
25384 </layer>
25385 <layer id="1676" name="4366437019791" type="Const" version="opset1">
25386 <data element_type="f16" offset="316644" shape="" size="2"/>
25387 <output>
25388 <port id="0" precision="FP16"/>
25389 </output>
25390 </layer>
25391 <layer id="1677" name="4367437119374" type="Const" version="opset1">
25392 <data element_type="f16" offset="316646" shape="" size="2"/>
25393 <output>
25394 <port id="0" precision="FP16"/>
25395 </output>
25396 </layer>
25397 <layer id="1678" name="5504550820172" type="Const" version="opset1">
25398 <data element_type="f16" offset="316648" shape="1,64,1,1" size="128"/>
25399 <output>
25400 <port id="0" precision="FP16">
25401 <dim>1</dim>
25402 <dim>64</dim>
25403 <dim>1</dim>
25404 <dim>1</dim>
25405 </port>
25406 </output>
25407 </layer>
25408 <layer id="1679" name="5505550920838" type="Const" version="opset1">
25409 <data element_type="f16" offset="316776" shape="1,64,1,1" size="128"/>
25410 <output>
25411 <port id="0" precision="FP16">
25412 <dim>1</dim>
25413 <dim>64</dim>
25414 <dim>1</dim>
25415 <dim>1</dim>
25416 </port>
25417 </output>
25418 </layer>
25419 <layer id="1680" name="5506551019758" type="Const" version="opset1">
25420 <data element_type="f16" offset="316648" shape="1,64,1,1" size="128"/>
25421 <output>
25422 <port id="0" precision="FP16">
25423 <dim>1</dim>
25424 <dim>64</dim>
25425 <dim>1</dim>
25426 <dim>1</dim>
25427 </port>
25428 </output>
25429 </layer>
25430 <layer id="1681" name="5507551121759" type="Const" version="opset1">
25431 <data element_type="f16" offset="316776" shape="1,64,1,1" size="128"/>
25432 <output>
25433 <port id="0" precision="FP16">
25434 <dim>1</dim>
25435 <dim>64</dim>
25436 <dim>1</dim>
25437 <dim>1</dim>
25438 </port>
25439 </output>
25440 </layer>
25441 <layer id="1682" name="bottleneck4_4/dim_red/bn/mean/Fused_Mul__copy130310340/quantized1244820982" type="Const" version="opset1">
25442 <data element_type="i8" offset="316904" shape="64,256,1,1" size="16384"/>
25443 <output>
25444 <port id="0" precision="I8">
25445 <dim>64</dim>
25446 <dim>256</dim>
25447 <dim>1</dim>
25448 <dim>1</dim>
25449 </port>
25450 </output>
25451 </layer>
25452 <layer id="1683" name="bottleneck4_4/dim_red/bn/mean/Fused_Mul__copy130310340/quantized/to_f16" type="Convert" version="opset1">
25453 <data destination_type="f16"/>
25454 <input>
25455 <port id="0">
25456 <dim>64</dim>
25457 <dim>256</dim>
25458 <dim>1</dim>
25459 <dim>1</dim>
25460 </port>
25461 </input>
25462 <output>
25463 <port id="1" precision="FP16">
25464 <dim>64</dim>
25465 <dim>256</dim>
25466 <dim>1</dim>
25467 <dim>1</dim>
25468 </port>
25469 </output>
25470 </layer>
25471 <layer id="1684" name="bottleneck4_4/dim_red/conv/fq_weights_1/zero_point1246120223" type="Const" version="opset1">
25472 <data element_type="f16" offset="333288" shape="64,1,1,1" size="128"/>
25473 <output>
25474 <port id="0" precision="FP16">
25475 <dim>64</dim>
25476 <dim>1</dim>
25477 <dim>1</dim>
25478 <dim>1</dim>
25479 </port>
25480 </output>
25481 </layer>
25482 <layer id="1685" name="bottleneck4_4/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
25483 <data auto_broadcast="numpy"/>
25484 <input>
25485 <port id="0">
25486 <dim>64</dim>
25487 <dim>256</dim>
25488 <dim>1</dim>
25489 <dim>1</dim>
25490 </port>
25491 <port id="1">
25492 <dim>64</dim>
25493 <dim>1</dim>
25494 <dim>1</dim>
25495 <dim>1</dim>
25496 </port>
25497 </input>
25498 <output>
25499 <port id="2" precision="FP16">
25500 <dim>64</dim>
25501 <dim>256</dim>
25502 <dim>1</dim>
25503 <dim>1</dim>
25504 </port>
25505 </output>
25506 </layer>
25507 <layer id="1686" name="bottleneck4_4/dim_red/conv/fq_weights_1/scale1245622836" type="Const" version="opset1">
25508 <data element_type="f16" offset="333416" shape="64,1,1,1" size="128"/>
25509 <output>
25510 <port id="0" precision="FP16">
25511 <dim>64</dim>
25512 <dim>1</dim>
25513 <dim>1</dim>
25514 <dim>1</dim>
25515 </port>
25516 </output>
25517 </layer>
25518 <layer id="1687" name="bottleneck4_4/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
25519 <data auto_broadcast="numpy"/>
25520 <input>
25521 <port id="0">
25522 <dim>64</dim>
25523 <dim>256</dim>
25524 <dim>1</dim>
25525 <dim>1</dim>
25526 </port>
25527 <port id="1">
25528 <dim>64</dim>
25529 <dim>1</dim>
25530 <dim>1</dim>
25531 <dim>1</dim>
25532 </port>
25533 </input>
25534 <output>
25535 <port id="2" precision="FP16">
25536 <dim>64</dim>
25537 <dim>256</dim>
25538 <dim>1</dim>
25539 <dim>1</dim>
25540 </port>
25541 </output>
25542 </layer>
25543 <layer id="1688" name="bottleneck4_4/dim_red/conv" type="Convolution" version="opset1">
25544 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
25545 <input>
25546 <port id="0">
25547 <dim>1</dim>
25548 <dim>256</dim>
25549 <dim>20</dim>
25550 <dim>34</dim>
25551 </port>
25552 <port id="1">
25553 <dim>64</dim>
25554 <dim>256</dim>
25555 <dim>1</dim>
25556 <dim>1</dim>
25557 </port>
25558 </input>
25559 <output>
25560 <port id="2" precision="FP16">
25561 <dim>1</dim>
25562 <dim>64</dim>
25563 <dim>20</dim>
25564 <dim>34</dim>
25565 </port>
25566 </output>
25567 </layer>
25568 <layer id="1689" name="data_add_2435324358130520409" type="Const" version="opset1">
25569 <data element_type="f16" offset="333544" shape="1,64,1,1" size="128"/>
25570 <output>
25571 <port id="0" precision="FP16">
25572 <dim>1</dim>
25573 <dim>64</dim>
25574 <dim>1</dim>
25575 <dim>1</dim>
25576 </port>
25577 </output>
25578 </layer>
25579 <layer id="1690" name="bottleneck4_4/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
25580 <data auto_broadcast="numpy"/>
25581 <input>
25582 <port id="0">
25583 <dim>1</dim>
25584 <dim>64</dim>
25585 <dim>20</dim>
25586 <dim>34</dim>
25587 </port>
25588 <port id="1">
25589 <dim>1</dim>
25590 <dim>64</dim>
25591 <dim>1</dim>
25592 <dim>1</dim>
25593 </port>
25594 </input>
25595 <output>
25596 <port id="2" names="bottleneck4_4/dim_red/conv" precision="FP16">
25597 <dim>1</dim>
25598 <dim>64</dim>
25599 <dim>20</dim>
25600 <dim>34</dim>
25601 </port>
25602 </output>
25603 </layer>
25604 <layer id="1691" name="bottleneck4_4/dim_red/fn/weights30928400671307" type="Const" version="opset1">
25605 <data element_type="f32" offset="1576" shape="1" size="4"/>
25606 <output>
25607 <port id="0" precision="FP32">
25608 <dim>1</dim>
25609 </port>
25610 </output>
25611 </layer>
25612 <layer id="1692" name="bottleneck4_4/dim_red/fn" type="PReLU" version="opset1">
25613 <input>
25614 <port id="0">
25615 <dim>1</dim>
25616 <dim>64</dim>
25617 <dim>20</dim>
25618 <dim>34</dim>
25619 </port>
25620 <port id="1">
25621 <dim>1</dim>
25622 </port>
25623 </input>
25624 <output>
25625 <port id="2" names="bottleneck4_4/dim_red/conv" precision="FP16">
25626 <dim>1</dim>
25627 <dim>64</dim>
25628 <dim>20</dim>
25629 <dim>34</dim>
25630 </port>
25631 </output>
25632 </layer>
25633 <layer id="1693" name="bottleneck4_4/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
25634 <data auto_broadcast="numpy" levels="256"/>
25635 <input>
25636 <port id="0">
25637 <dim>1</dim>
25638 <dim>64</dim>
25639 <dim>20</dim>
25640 <dim>34</dim>
25641 </port>
25642 <port id="1">
25643 <dim>1</dim>
25644 <dim>64</dim>
25645 <dim>1</dim>
25646 <dim>1</dim>
25647 </port>
25648 <port id="2">
25649 <dim>1</dim>
25650 <dim>64</dim>
25651 <dim>1</dim>
25652 <dim>1</dim>
25653 </port>
25654 <port id="3">
25655 <dim>1</dim>
25656 <dim>64</dim>
25657 <dim>1</dim>
25658 <dim>1</dim>
25659 </port>
25660 <port id="4">
25661 <dim>1</dim>
25662 <dim>64</dim>
25663 <dim>1</dim>
25664 <dim>1</dim>
25665 </port>
25666 </input>
25667 <output>
25668 <port id="5" precision="FP16">
25669 <dim>1</dim>
25670 <dim>64</dim>
25671 <dim>20</dim>
25672 <dim>34</dim>
25673 </port>
25674 </output>
25675 </layer>
25676 <layer id="1694" name="16935/value1693722893" type="Const" version="opset1">
25677 <data element_type="i64" offset="189984" shape="5" size="40"/>
25678 <output>
25679 <port id="0" precision="I64">
25680 <dim>5</dim>
25681 </port>
25682 </output>
25683 </layer>
25684 <layer id="1695" name="bottleneck4_4/inner/dw1/bn/mean/Fused_Mul__copy130910343/quantized1225621669" type="Const" version="opset1">
25685 <data element_type="i8" offset="333672" shape="64,1,3,3" size="576"/>
25686 <output>
25687 <port id="0" precision="I8">
25688 <dim>64</dim>
25689 <dim>1</dim>
25690 <dim>3</dim>
25691 <dim>3</dim>
25692 </port>
25693 </output>
25694 </layer>
25695 <layer id="1696" name="bottleneck4_4/inner/dw1/bn/mean/Fused_Mul__copy130910343/quantized/to_f16" type="Convert" version="opset1">
25696 <data destination_type="f16"/>
25697 <input>
25698 <port id="0">
25699 <dim>64</dim>
25700 <dim>1</dim>
25701 <dim>3</dim>
25702 <dim>3</dim>
25703 </port>
25704 </input>
25705 <output>
25706 <port id="1" precision="FP16">
25707 <dim>64</dim>
25708 <dim>1</dim>
25709 <dim>3</dim>
25710 <dim>3</dim>
25711 </port>
25712 </output>
25713 </layer>
25714 <layer id="1697" name="bottleneck4_4/inner/dw1/conv/fq_weights_1/zero_point1226919734" type="Const" version="opset1">
25715 <data element_type="f16" offset="334248" shape="64,1,1,1" size="128"/>
25716 <output>
25717 <port id="0" precision="FP16">
25718 <dim>64</dim>
25719 <dim>1</dim>
25720 <dim>1</dim>
25721 <dim>1</dim>
25722 </port>
25723 </output>
25724 </layer>
25725 <layer id="1698" name="bottleneck4_4/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
25726 <data auto_broadcast="numpy"/>
25727 <input>
25728 <port id="0">
25729 <dim>64</dim>
25730 <dim>1</dim>
25731 <dim>3</dim>
25732 <dim>3</dim>
25733 </port>
25734 <port id="1">
25735 <dim>64</dim>
25736 <dim>1</dim>
25737 <dim>1</dim>
25738 <dim>1</dim>
25739 </port>
25740 </input>
25741 <output>
25742 <port id="2" precision="FP16">
25743 <dim>64</dim>
25744 <dim>1</dim>
25745 <dim>3</dim>
25746 <dim>3</dim>
25747 </port>
25748 </output>
25749 </layer>
25750 <layer id="1699" name="bottleneck4_4/inner/dw1/conv/fq_weights_1/scale1226419650" type="Const" version="opset1">
25751 <data element_type="f16" offset="334376" shape="64,1,1,1" size="128"/>
25752 <output>
25753 <port id="0" precision="FP16">
25754 <dim>64</dim>
25755 <dim>1</dim>
25756 <dim>1</dim>
25757 <dim>1</dim>
25758 </port>
25759 </output>
25760 </layer>
25761 <layer id="1700" name="bottleneck4_4/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
25762 <data auto_broadcast="numpy"/>
25763 <input>
25764 <port id="0">
25765 <dim>64</dim>
25766 <dim>1</dim>
25767 <dim>3</dim>
25768 <dim>3</dim>
25769 </port>
25770 <port id="1">
25771 <dim>64</dim>
25772 <dim>1</dim>
25773 <dim>1</dim>
25774 <dim>1</dim>
25775 </port>
25776 </input>
25777 <output>
25778 <port id="2" precision="FP16">
25779 <dim>64</dim>
25780 <dim>1</dim>
25781 <dim>3</dim>
25782 <dim>3</dim>
25783 </port>
25784 </output>
25785 </layer>
25786 <layer id="1701" name="16935" type="Reshape" version="opset1">
25787 <data special_zero="true"/>
25788 <input>
25789 <port id="0">
25790 <dim>64</dim>
25791 <dim>1</dim>
25792 <dim>3</dim>
25793 <dim>3</dim>
25794 </port>
25795 <port id="1">
25796 <dim>5</dim>
25797 </port>
25798 </input>
25799 <output>
25800 <port id="2" precision="FP16">
25801 <dim>64</dim>
25802 <dim>1</dim>
25803 <dim>1</dim>
25804 <dim>3</dim>
25805 <dim>3</dim>
25806 </port>
25807 </output>
25808 </layer>
25809 <layer id="1702" name="bottleneck4_4/inner/dw1/conv" type="GroupConvolution" version="opset1">
25810 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
25811 <input>
25812 <port id="0">
25813 <dim>1</dim>
25814 <dim>64</dim>
25815 <dim>20</dim>
25816 <dim>34</dim>
25817 </port>
25818 <port id="1">
25819 <dim>64</dim>
25820 <dim>1</dim>
25821 <dim>1</dim>
25822 <dim>3</dim>
25823 <dim>3</dim>
25824 </port>
25825 </input>
25826 <output>
25827 <port id="2" precision="FP16">
25828 <dim>1</dim>
25829 <dim>64</dim>
25830 <dim>20</dim>
25831 <dim>34</dim>
25832 </port>
25833 </output>
25834 </layer>
25835 <layer id="1703" name="data_add_2436124366131121252" type="Const" version="opset1">
25836 <data element_type="f16" offset="334504" shape="1,64,1,1" size="128"/>
25837 <output>
25838 <port id="0" precision="FP16">
25839 <dim>1</dim>
25840 <dim>64</dim>
25841 <dim>1</dim>
25842 <dim>1</dim>
25843 </port>
25844 </output>
25845 </layer>
25846 <layer id="1704" name="bottleneck4_4/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
25847 <data auto_broadcast="numpy"/>
25848 <input>
25849 <port id="0">
25850 <dim>1</dim>
25851 <dim>64</dim>
25852 <dim>20</dim>
25853 <dim>34</dim>
25854 </port>
25855 <port id="1">
25856 <dim>1</dim>
25857 <dim>64</dim>
25858 <dim>1</dim>
25859 <dim>1</dim>
25860 </port>
25861 </input>
25862 <output>
25863 <port id="2" names="bottleneck4_4/inner/dw1/conv" precision="FP16">
25864 <dim>1</dim>
25865 <dim>64</dim>
25866 <dim>20</dim>
25867 <dim>34</dim>
25868 </port>
25869 </output>
25870 </layer>
25871 <layer id="1705" name="bottleneck4_4/inner/dw1/fn/weights30992404541313" type="Const" version="opset1">
25872 <data element_type="f32" offset="1576" shape="1" size="4"/>
25873 <output>
25874 <port id="0" precision="FP32">
25875 <dim>1</dim>
25876 </port>
25877 </output>
25878 </layer>
25879 <layer id="1706" name="bottleneck4_4/inner/dw1/fn" type="PReLU" version="opset1">
25880 <input>
25881 <port id="0">
25882 <dim>1</dim>
25883 <dim>64</dim>
25884 <dim>20</dim>
25885 <dim>34</dim>
25886 </port>
25887 <port id="1">
25888 <dim>1</dim>
25889 </port>
25890 </input>
25891 <output>
25892 <port id="2" names="bottleneck4_4/inner/dw1/conv" precision="FP16">
25893 <dim>1</dim>
25894 <dim>64</dim>
25895 <dim>20</dim>
25896 <dim>34</dim>
25897 </port>
25898 </output>
25899 </layer>
25900 <layer id="1707" name="bottleneck4_4/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
25901 <data auto_broadcast="numpy" levels="256"/>
25902 <input>
25903 <port id="0">
25904 <dim>1</dim>
25905 <dim>64</dim>
25906 <dim>20</dim>
25907 <dim>34</dim>
25908 </port>
25909 <port id="1"/>
25910 <port id="2"/>
25911 <port id="3"/>
25912 <port id="4"/>
25913 </input>
25914 <output>
25915 <port id="5" precision="FP16">
25916 <dim>1</dim>
25917 <dim>64</dim>
25918 <dim>20</dim>
25919 <dim>34</dim>
25920 </port>
25921 </output>
25922 </layer>
25923 <layer id="1708" name="bottleneck4_4/dim_inc/bn/mean/Fused_Mul__copy131510346/quantized1415221723" type="Const" version="opset1">
25924 <data element_type="i8" offset="334632" shape="256,64,1,1" size="16384"/>
25925 <output>
25926 <port id="0" precision="I8">
25927 <dim>256</dim>
25928 <dim>64</dim>
25929 <dim>1</dim>
25930 <dim>1</dim>
25931 </port>
25932 </output>
25933 </layer>
25934 <layer id="1709" name="bottleneck4_4/dim_inc/bn/mean/Fused_Mul__copy131510346/quantized/to_f16" type="Convert" version="opset1">
25935 <data destination_type="f16"/>
25936 <input>
25937 <port id="0">
25938 <dim>256</dim>
25939 <dim>64</dim>
25940 <dim>1</dim>
25941 <dim>1</dim>
25942 </port>
25943 </input>
25944 <output>
25945 <port id="1" precision="FP16">
25946 <dim>256</dim>
25947 <dim>64</dim>
25948 <dim>1</dim>
25949 <dim>1</dim>
25950 </port>
25951 </output>
25952 </layer>
25953 <layer id="1710" name="bottleneck4_4/dim_inc/conv/fq_weights_1/zero_point1416521732" type="Const" version="opset1">
25954 <data element_type="f16" offset="351016" shape="256,1,1,1" size="512"/>
25955 <output>
25956 <port id="0" precision="FP16">
25957 <dim>256</dim>
25958 <dim>1</dim>
25959 <dim>1</dim>
25960 <dim>1</dim>
25961 </port>
25962 </output>
25963 </layer>
25964 <layer id="1711" name="bottleneck4_4/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
25965 <data auto_broadcast="numpy"/>
25966 <input>
25967 <port id="0">
25968 <dim>256</dim>
25969 <dim>64</dim>
25970 <dim>1</dim>
25971 <dim>1</dim>
25972 </port>
25973 <port id="1">
25974 <dim>256</dim>
25975 <dim>1</dim>
25976 <dim>1</dim>
25977 <dim>1</dim>
25978 </port>
25979 </input>
25980 <output>
25981 <port id="2" precision="FP16">
25982 <dim>256</dim>
25983 <dim>64</dim>
25984 <dim>1</dim>
25985 <dim>1</dim>
25986 </port>
25987 </output>
25988 </layer>
25989 <layer id="1712" name="bottleneck4_4/dim_inc/conv/fq_weights_1/scale1416022620" type="Const" version="opset1">
25990 <data element_type="f16" offset="351528" shape="256,1,1,1" size="512"/>
25991 <output>
25992 <port id="0" precision="FP16">
25993 <dim>256</dim>
25994 <dim>1</dim>
25995 <dim>1</dim>
25996 <dim>1</dim>
25997 </port>
25998 </output>
25999 </layer>
26000 <layer id="1713" name="bottleneck4_4/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
26001 <data auto_broadcast="numpy"/>
26002 <input>
26003 <port id="0">
26004 <dim>256</dim>
26005 <dim>64</dim>
26006 <dim>1</dim>
26007 <dim>1</dim>
26008 </port>
26009 <port id="1">
26010 <dim>256</dim>
26011 <dim>1</dim>
26012 <dim>1</dim>
26013 <dim>1</dim>
26014 </port>
26015 </input>
26016 <output>
26017 <port id="2" precision="FP16">
26018 <dim>256</dim>
26019 <dim>64</dim>
26020 <dim>1</dim>
26021 <dim>1</dim>
26022 </port>
26023 </output>
26024 </layer>
26025 <layer id="1714" name="bottleneck4_4/dim_inc/conv" type="Convolution" version="opset1">
26026 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
26027 <input>
26028 <port id="0">
26029 <dim>1</dim>
26030 <dim>64</dim>
26031 <dim>20</dim>
26032 <dim>34</dim>
26033 </port>
26034 <port id="1">
26035 <dim>256</dim>
26036 <dim>64</dim>
26037 <dim>1</dim>
26038 <dim>1</dim>
26039 </port>
26040 </input>
26041 <output>
26042 <port id="2" precision="FP16">
26043 <dim>1</dim>
26044 <dim>256</dim>
26045 <dim>20</dim>
26046 <dim>34</dim>
26047 </port>
26048 </output>
26049 </layer>
26050 <layer id="1715" name="data_add_2436924374131720943" type="Const" version="opset1">
26051 <data element_type="f16" offset="352040" shape="1,256,1,1" size="512"/>
26052 <output>
26053 <port id="0" precision="FP16">
26054 <dim>1</dim>
26055 <dim>256</dim>
26056 <dim>1</dim>
26057 <dim>1</dim>
26058 </port>
26059 </output>
26060 </layer>
26061 <layer id="1716" name="bottleneck4_4/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
26062 <data auto_broadcast="numpy"/>
26063 <input>
26064 <port id="0">
26065 <dim>1</dim>
26066 <dim>256</dim>
26067 <dim>20</dim>
26068 <dim>34</dim>
26069 </port>
26070 <port id="1">
26071 <dim>1</dim>
26072 <dim>256</dim>
26073 <dim>1</dim>
26074 <dim>1</dim>
26075 </port>
26076 </input>
26077 <output>
26078 <port id="2" names="bottleneck4_4/dim_inc/conv" precision="FP16">
26079 <dim>1</dim>
26080 <dim>256</dim>
26081 <dim>20</dim>
26082 <dim>34</dim>
26083 </port>
26084 </output>
26085 </layer>
26086 <layer id="1717" name="bottleneck4_4/add/fq_input_1" type="FakeQuantize" version="opset1">
26087 <data auto_broadcast="numpy" levels="256"/>
26088 <input>
26089 <port id="0">
26090 <dim>1</dim>
26091 <dim>256</dim>
26092 <dim>20</dim>
26093 <dim>34</dim>
26094 </port>
26095 <port id="1"/>
26096 <port id="2"/>
26097 <port id="3"/>
26098 <port id="4"/>
26099 </input>
26100 <output>
26101 <port id="5" precision="FP16">
26102 <dim>1</dim>
26103 <dim>256</dim>
26104 <dim>20</dim>
26105 <dim>34</dim>
26106 </port>
26107 </output>
26108 </layer>
26109 <layer id="1718" name="bottleneck4_4/add" type="Add" version="opset1">
26110 <data auto_broadcast="numpy"/>
26111 <input>
26112 <port id="0">
26113 <dim>1</dim>
26114 <dim>256</dim>
26115 <dim>20</dim>
26116 <dim>34</dim>
26117 </port>
26118 <port id="1">
26119 <dim>1</dim>
26120 <dim>256</dim>
26121 <dim>20</dim>
26122 <dim>34</dim>
26123 </port>
26124 </input>
26125 <output>
26126 <port id="2" names="bottleneck4_4/add" precision="FP16">
26127 <dim>1</dim>
26128 <dim>256</dim>
26129 <dim>20</dim>
26130 <dim>34</dim>
26131 </port>
26132 </output>
26133 </layer>
26134 <layer id="1719" name="bottleneck4_4/fn/weights30996399261320" type="Const" version="opset1">
26135 <data element_type="f32" offset="1576" shape="1" size="4"/>
26136 <output>
26137 <port id="0" precision="FP32">
26138 <dim>1</dim>
26139 </port>
26140 </output>
26141 </layer>
26142 <layer id="1720" name="bottleneck4_4/fn" type="PReLU" version="opset1">
26143 <input>
26144 <port id="0">
26145 <dim>1</dim>
26146 <dim>256</dim>
26147 <dim>20</dim>
26148 <dim>34</dim>
26149 </port>
26150 <port id="1">
26151 <dim>1</dim>
26152 </port>
26153 </input>
26154 <output>
26155 <port id="2" names="bottleneck4_4/add" precision="FP16">
26156 <dim>1</dim>
26157 <dim>256</dim>
26158 <dim>20</dim>
26159 <dim>34</dim>
26160 </port>
26161 </output>
26162 </layer>
26163 <layer id="1721" name="bottleneck4_5/add/fq_input_0" type="FakeQuantize" version="opset1">
26164 <data auto_broadcast="numpy" levels="256"/>
26165 <input>
26166 <port id="0">
26167 <dim>1</dim>
26168 <dim>256</dim>
26169 <dim>20</dim>
26170 <dim>34</dim>
26171 </port>
26172 <port id="1"/>
26173 <port id="2"/>
26174 <port id="3"/>
26175 <port id="4"/>
26176 </input>
26177 <output>
26178 <port id="5" precision="FP16">
26179 <dim>1</dim>
26180 <dim>256</dim>
26181 <dim>20</dim>
26182 <dim>34</dim>
26183 </port>
26184 </output>
26185 </layer>
26186 <layer id="1722" name="4274427822878" type="Const" version="opset1">
26187 <data element_type="f16" offset="352552" shape="" size="2"/>
26188 <output>
26189 <port id="0" precision="FP16"/>
26190 </output>
26191 </layer>
26192 <layer id="1723" name="4275427919563" type="Const" version="opset1">
26193 <data element_type="f16" offset="352554" shape="" size="2"/>
26194 <output>
26195 <port id="0" precision="FP16"/>
26196 </output>
26197 </layer>
26198 <layer id="1724" name="4276428021600" type="Const" version="opset1">
26199 <data element_type="f16" offset="352552" shape="" size="2"/>
26200 <output>
26201 <port id="0" precision="FP16"/>
26202 </output>
26203 </layer>
26204 <layer id="1725" name="4277428121873" type="Const" version="opset1">
26205 <data element_type="f16" offset="352554" shape="" size="2"/>
26206 <output>
26207 <port id="0" precision="FP16"/>
26208 </output>
26209 </layer>
26210 <layer id="1726" name="3884388820109" type="Const" version="opset1">
26211 <data element_type="f16" offset="352556" shape="" size="2"/>
26212 <output>
26213 <port id="0" precision="FP16"/>
26214 </output>
26215 </layer>
26216 <layer id="1727" name="3885388921066" type="Const" version="opset1">
26217 <data element_type="f16" offset="352558" shape="" size="2"/>
26218 <output>
26219 <port id="0" precision="FP16"/>
26220 </output>
26221 </layer>
26222 <layer id="1728" name="3886389022656" type="Const" version="opset1">
26223 <data element_type="f16" offset="352556" shape="" size="2"/>
26224 <output>
26225 <port id="0" precision="FP16"/>
26226 </output>
26227 </layer>
26228 <layer id="1729" name="3887389120121" type="Const" version="opset1">
26229 <data element_type="f16" offset="352558" shape="" size="2"/>
26230 <output>
26231 <port id="0" precision="FP16"/>
26232 </output>
26233 </layer>
26234 <layer id="1730" name="4424442821330" type="Const" version="opset1">
26235 <data element_type="f16" offset="352560" shape="1,64,1,1" size="128"/>
26236 <output>
26237 <port id="0" precision="FP16">
26238 <dim>1</dim>
26239 <dim>64</dim>
26240 <dim>1</dim>
26241 <dim>1</dim>
26242 </port>
26243 </output>
26244 </layer>
26245 <layer id="1731" name="4425442919746" type="Const" version="opset1">
26246 <data element_type="f16" offset="352688" shape="1,64,1,1" size="128"/>
26247 <output>
26248 <port id="0" precision="FP16">
26249 <dim>1</dim>
26250 <dim>64</dim>
26251 <dim>1</dim>
26252 <dim>1</dim>
26253 </port>
26254 </output>
26255 </layer>
26256 <layer id="1732" name="4426443020889" type="Const" version="opset1">
26257 <data element_type="f16" offset="352560" shape="1,64,1,1" size="128"/>
26258 <output>
26259 <port id="0" precision="FP16">
26260 <dim>1</dim>
26261 <dim>64</dim>
26262 <dim>1</dim>
26263 <dim>1</dim>
26264 </port>
26265 </output>
26266 </layer>
26267 <layer id="1733" name="4427443120718" type="Const" version="opset1">
26268 <data element_type="f16" offset="352688" shape="1,64,1,1" size="128"/>
26269 <output>
26270 <port id="0" precision="FP16">
26271 <dim>1</dim>
26272 <dim>64</dim>
26273 <dim>1</dim>
26274 <dim>1</dim>
26275 </port>
26276 </output>
26277 </layer>
26278 <layer id="1734" name="bottleneck4_5/dim_red/bn/mean/Fused_Mul__copy132210349/quantized1412822845" type="Const" version="opset1">
26279 <data element_type="i8" offset="352816" shape="64,256,1,1" size="16384"/>
26280 <output>
26281 <port id="0" precision="I8">
26282 <dim>64</dim>
26283 <dim>256</dim>
26284 <dim>1</dim>
26285 <dim>1</dim>
26286 </port>
26287 </output>
26288 </layer>
26289 <layer id="1735" name="bottleneck4_5/dim_red/bn/mean/Fused_Mul__copy132210349/quantized/to_f16" type="Convert" version="opset1">
26290 <data destination_type="f16"/>
26291 <input>
26292 <port id="0">
26293 <dim>64</dim>
26294 <dim>256</dim>
26295 <dim>1</dim>
26296 <dim>1</dim>
26297 </port>
26298 </input>
26299 <output>
26300 <port id="1" precision="FP16">
26301 <dim>64</dim>
26302 <dim>256</dim>
26303 <dim>1</dim>
26304 <dim>1</dim>
26305 </port>
26306 </output>
26307 </layer>
26308 <layer id="1736" name="bottleneck4_5/dim_red/conv/fq_weights_1/zero_point1414121765" type="Const" version="opset1">
26309 <data element_type="f16" offset="369200" shape="64,1,1,1" size="128"/>
26310 <output>
26311 <port id="0" precision="FP16">
26312 <dim>64</dim>
26313 <dim>1</dim>
26314 <dim>1</dim>
26315 <dim>1</dim>
26316 </port>
26317 </output>
26318 </layer>
26319 <layer id="1737" name="bottleneck4_5/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
26320 <data auto_broadcast="numpy"/>
26321 <input>
26322 <port id="0">
26323 <dim>64</dim>
26324 <dim>256</dim>
26325 <dim>1</dim>
26326 <dim>1</dim>
26327 </port>
26328 <port id="1">
26329 <dim>64</dim>
26330 <dim>1</dim>
26331 <dim>1</dim>
26332 <dim>1</dim>
26333 </port>
26334 </input>
26335 <output>
26336 <port id="2" precision="FP16">
26337 <dim>64</dim>
26338 <dim>256</dim>
26339 <dim>1</dim>
26340 <dim>1</dim>
26341 </port>
26342 </output>
26343 </layer>
26344 <layer id="1738" name="bottleneck4_5/dim_red/conv/fq_weights_1/scale1413622977" type="Const" version="opset1">
26345 <data element_type="f16" offset="369328" shape="64,1,1,1" size="128"/>
26346 <output>
26347 <port id="0" precision="FP16">
26348 <dim>64</dim>
26349 <dim>1</dim>
26350 <dim>1</dim>
26351 <dim>1</dim>
26352 </port>
26353 </output>
26354 </layer>
26355 <layer id="1739" name="bottleneck4_5/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
26356 <data auto_broadcast="numpy"/>
26357 <input>
26358 <port id="0">
26359 <dim>64</dim>
26360 <dim>256</dim>
26361 <dim>1</dim>
26362 <dim>1</dim>
26363 </port>
26364 <port id="1">
26365 <dim>64</dim>
26366 <dim>1</dim>
26367 <dim>1</dim>
26368 <dim>1</dim>
26369 </port>
26370 </input>
26371 <output>
26372 <port id="2" precision="FP16">
26373 <dim>64</dim>
26374 <dim>256</dim>
26375 <dim>1</dim>
26376 <dim>1</dim>
26377 </port>
26378 </output>
26379 </layer>
26380 <layer id="1740" name="bottleneck4_5/dim_red/conv" type="Convolution" version="opset1">
26381 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
26382 <input>
26383 <port id="0">
26384 <dim>1</dim>
26385 <dim>256</dim>
26386 <dim>20</dim>
26387 <dim>34</dim>
26388 </port>
26389 <port id="1">
26390 <dim>64</dim>
26391 <dim>256</dim>
26392 <dim>1</dim>
26393 <dim>1</dim>
26394 </port>
26395 </input>
26396 <output>
26397 <port id="2" precision="FP16">
26398 <dim>1</dim>
26399 <dim>64</dim>
26400 <dim>20</dim>
26401 <dim>34</dim>
26402 </port>
26403 </output>
26404 </layer>
26405 <layer id="1741" name="data_add_2437724382132419416" type="Const" version="opset1">
26406 <data element_type="f16" offset="369456" shape="1,64,1,1" size="128"/>
26407 <output>
26408 <port id="0" precision="FP16">
26409 <dim>1</dim>
26410 <dim>64</dim>
26411 <dim>1</dim>
26412 <dim>1</dim>
26413 </port>
26414 </output>
26415 </layer>
26416 <layer id="1742" name="bottleneck4_5/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
26417 <data auto_broadcast="numpy"/>
26418 <input>
26419 <port id="0">
26420 <dim>1</dim>
26421 <dim>64</dim>
26422 <dim>20</dim>
26423 <dim>34</dim>
26424 </port>
26425 <port id="1">
26426 <dim>1</dim>
26427 <dim>64</dim>
26428 <dim>1</dim>
26429 <dim>1</dim>
26430 </port>
26431 </input>
26432 <output>
26433 <port id="2" names="bottleneck4_5/dim_red/conv" precision="FP16">
26434 <dim>1</dim>
26435 <dim>64</dim>
26436 <dim>20</dim>
26437 <dim>34</dim>
26438 </port>
26439 </output>
26440 </layer>
26441 <layer id="1743" name="bottleneck4_5/dim_red/fn/weights30804406821326" type="Const" version="opset1">
26442 <data element_type="f32" offset="1576" shape="1" size="4"/>
26443 <output>
26444 <port id="0" precision="FP32">
26445 <dim>1</dim>
26446 </port>
26447 </output>
26448 </layer>
26449 <layer id="1744" name="bottleneck4_5/dim_red/fn" type="PReLU" version="opset1">
26450 <input>
26451 <port id="0">
26452 <dim>1</dim>
26453 <dim>64</dim>
26454 <dim>20</dim>
26455 <dim>34</dim>
26456 </port>
26457 <port id="1">
26458 <dim>1</dim>
26459 </port>
26460 </input>
26461 <output>
26462 <port id="2" names="bottleneck4_5/dim_red/conv" precision="FP16">
26463 <dim>1</dim>
26464 <dim>64</dim>
26465 <dim>20</dim>
26466 <dim>34</dim>
26467 </port>
26468 </output>
26469 </layer>
26470 <layer id="1745" name="bottleneck4_5/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
26471 <data auto_broadcast="numpy" levels="256"/>
26472 <input>
26473 <port id="0">
26474 <dim>1</dim>
26475 <dim>64</dim>
26476 <dim>20</dim>
26477 <dim>34</dim>
26478 </port>
26479 <port id="1">
26480 <dim>1</dim>
26481 <dim>64</dim>
26482 <dim>1</dim>
26483 <dim>1</dim>
26484 </port>
26485 <port id="2">
26486 <dim>1</dim>
26487 <dim>64</dim>
26488 <dim>1</dim>
26489 <dim>1</dim>
26490 </port>
26491 <port id="3">
26492 <dim>1</dim>
26493 <dim>64</dim>
26494 <dim>1</dim>
26495 <dim>1</dim>
26496 </port>
26497 <port id="4">
26498 <dim>1</dim>
26499 <dim>64</dim>
26500 <dim>1</dim>
26501 <dim>1</dim>
26502 </port>
26503 </input>
26504 <output>
26505 <port id="5" precision="FP16">
26506 <dim>1</dim>
26507 <dim>64</dim>
26508 <dim>20</dim>
26509 <dim>34</dim>
26510 </port>
26511 </output>
26512 </layer>
26513 <layer id="1746" name="16891/value1689321123" type="Const" version="opset1">
26514 <data element_type="i64" offset="189984" shape="5" size="40"/>
26515 <output>
26516 <port id="0" precision="I64">
26517 <dim>5</dim>
26518 </port>
26519 </output>
26520 </layer>
26521 <layer id="1747" name="bottleneck4_5/inner/dw1/bn/mean/Fused_Mul__copy132810352/quantized1372020865" type="Const" version="opset1">
26522 <data element_type="i8" offset="369584" shape="64,1,3,3" size="576"/>
26523 <output>
26524 <port id="0" precision="I8">
26525 <dim>64</dim>
26526 <dim>1</dim>
26527 <dim>3</dim>
26528 <dim>3</dim>
26529 </port>
26530 </output>
26531 </layer>
26532 <layer id="1748" name="bottleneck4_5/inner/dw1/bn/mean/Fused_Mul__copy132810352/quantized/to_f16" type="Convert" version="opset1">
26533 <data destination_type="f16"/>
26534 <input>
26535 <port id="0">
26536 <dim>64</dim>
26537 <dim>1</dim>
26538 <dim>3</dim>
26539 <dim>3</dim>
26540 </port>
26541 </input>
26542 <output>
26543 <port id="1" precision="FP16">
26544 <dim>64</dim>
26545 <dim>1</dim>
26546 <dim>3</dim>
26547 <dim>3</dim>
26548 </port>
26549 </output>
26550 </layer>
26551 <layer id="1749" name="bottleneck4_5/inner/dw1/conv/fq_weights_1/zero_point1373320262" type="Const" version="opset1">
26552 <data element_type="f16" offset="370160" shape="64,1,1,1" size="128"/>
26553 <output>
26554 <port id="0" precision="FP16">
26555 <dim>64</dim>
26556 <dim>1</dim>
26557 <dim>1</dim>
26558 <dim>1</dim>
26559 </port>
26560 </output>
26561 </layer>
26562 <layer id="1750" name="bottleneck4_5/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
26563 <data auto_broadcast="numpy"/>
26564 <input>
26565 <port id="0">
26566 <dim>64</dim>
26567 <dim>1</dim>
26568 <dim>3</dim>
26569 <dim>3</dim>
26570 </port>
26571 <port id="1">
26572 <dim>64</dim>
26573 <dim>1</dim>
26574 <dim>1</dim>
26575 <dim>1</dim>
26576 </port>
26577 </input>
26578 <output>
26579 <port id="2" precision="FP16">
26580 <dim>64</dim>
26581 <dim>1</dim>
26582 <dim>3</dim>
26583 <dim>3</dim>
26584 </port>
26585 </output>
26586 </layer>
26587 <layer id="1751" name="bottleneck4_5/inner/dw1/conv/fq_weights_1/scale1372821525" type="Const" version="opset1">
26588 <data element_type="f16" offset="370288" shape="64,1,1,1" size="128"/>
26589 <output>
26590 <port id="0" precision="FP16">
26591 <dim>64</dim>
26592 <dim>1</dim>
26593 <dim>1</dim>
26594 <dim>1</dim>
26595 </port>
26596 </output>
26597 </layer>
26598 <layer id="1752" name="bottleneck4_5/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
26599 <data auto_broadcast="numpy"/>
26600 <input>
26601 <port id="0">
26602 <dim>64</dim>
26603 <dim>1</dim>
26604 <dim>3</dim>
26605 <dim>3</dim>
26606 </port>
26607 <port id="1">
26608 <dim>64</dim>
26609 <dim>1</dim>
26610 <dim>1</dim>
26611 <dim>1</dim>
26612 </port>
26613 </input>
26614 <output>
26615 <port id="2" precision="FP16">
26616 <dim>64</dim>
26617 <dim>1</dim>
26618 <dim>3</dim>
26619 <dim>3</dim>
26620 </port>
26621 </output>
26622 </layer>
26623 <layer id="1753" name="16891" type="Reshape" version="opset1">
26624 <data special_zero="true"/>
26625 <input>
26626 <port id="0">
26627 <dim>64</dim>
26628 <dim>1</dim>
26629 <dim>3</dim>
26630 <dim>3</dim>
26631 </port>
26632 <port id="1">
26633 <dim>5</dim>
26634 </port>
26635 </input>
26636 <output>
26637 <port id="2" precision="FP16">
26638 <dim>64</dim>
26639 <dim>1</dim>
26640 <dim>1</dim>
26641 <dim>3</dim>
26642 <dim>3</dim>
26643 </port>
26644 </output>
26645 </layer>
26646 <layer id="1754" name="bottleneck4_5/inner/dw1/conv" type="GroupConvolution" version="opset1">
26647 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
26648 <input>
26649 <port id="0">
26650 <dim>1</dim>
26651 <dim>64</dim>
26652 <dim>20</dim>
26653 <dim>34</dim>
26654 </port>
26655 <port id="1">
26656 <dim>64</dim>
26657 <dim>1</dim>
26658 <dim>1</dim>
26659 <dim>3</dim>
26660 <dim>3</dim>
26661 </port>
26662 </input>
26663 <output>
26664 <port id="2" precision="FP16">
26665 <dim>1</dim>
26666 <dim>64</dim>
26667 <dim>20</dim>
26668 <dim>34</dim>
26669 </port>
26670 </output>
26671 </layer>
26672 <layer id="1755" name="data_add_2438524390133022614" type="Const" version="opset1">
26673 <data element_type="f16" offset="370416" shape="1,64,1,1" size="128"/>
26674 <output>
26675 <port id="0" precision="FP16">
26676 <dim>1</dim>
26677 <dim>64</dim>
26678 <dim>1</dim>
26679 <dim>1</dim>
26680 </port>
26681 </output>
26682 </layer>
26683 <layer id="1756" name="bottleneck4_5/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
26684 <data auto_broadcast="numpy"/>
26685 <input>
26686 <port id="0">
26687 <dim>1</dim>
26688 <dim>64</dim>
26689 <dim>20</dim>
26690 <dim>34</dim>
26691 </port>
26692 <port id="1">
26693 <dim>1</dim>
26694 <dim>64</dim>
26695 <dim>1</dim>
26696 <dim>1</dim>
26697 </port>
26698 </input>
26699 <output>
26700 <port id="2" names="bottleneck4_5/inner/dw1/conv" precision="FP16">
26701 <dim>1</dim>
26702 <dim>64</dim>
26703 <dim>20</dim>
26704 <dim>34</dim>
26705 </port>
26706 </output>
26707 </layer>
26708 <layer id="1757" name="bottleneck4_5/inner/dw1/fn/weights30900396801332" type="Const" version="opset1">
26709 <data element_type="f32" offset="1576" shape="1" size="4"/>
26710 <output>
26711 <port id="0" precision="FP32">
26712 <dim>1</dim>
26713 </port>
26714 </output>
26715 </layer>
26716 <layer id="1758" name="bottleneck4_5/inner/dw1/fn" type="PReLU" version="opset1">
26717 <input>
26718 <port id="0">
26719 <dim>1</dim>
26720 <dim>64</dim>
26721 <dim>20</dim>
26722 <dim>34</dim>
26723 </port>
26724 <port id="1">
26725 <dim>1</dim>
26726 </port>
26727 </input>
26728 <output>
26729 <port id="2" names="bottleneck4_5/inner/dw1/conv" precision="FP16">
26730 <dim>1</dim>
26731 <dim>64</dim>
26732 <dim>20</dim>
26733 <dim>34</dim>
26734 </port>
26735 </output>
26736 </layer>
26737 <layer id="1759" name="bottleneck4_5/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
26738 <data auto_broadcast="numpy" levels="256"/>
26739 <input>
26740 <port id="0">
26741 <dim>1</dim>
26742 <dim>64</dim>
26743 <dim>20</dim>
26744 <dim>34</dim>
26745 </port>
26746 <port id="1"/>
26747 <port id="2"/>
26748 <port id="3"/>
26749 <port id="4"/>
26750 </input>
26751 <output>
26752 <port id="5" precision="FP16">
26753 <dim>1</dim>
26754 <dim>64</dim>
26755 <dim>20</dim>
26756 <dim>34</dim>
26757 </port>
26758 </output>
26759 </layer>
26760 <layer id="1760" name="bottleneck4_5/dim_inc/bn/mean/Fused_Mul__copy133410355/quantized1340820727" type="Const" version="opset1">
26761 <data element_type="i8" offset="370544" shape="256,64,1,1" size="16384"/>
26762 <output>
26763 <port id="0" precision="I8">
26764 <dim>256</dim>
26765 <dim>64</dim>
26766 <dim>1</dim>
26767 <dim>1</dim>
26768 </port>
26769 </output>
26770 </layer>
26771 <layer id="1761" name="bottleneck4_5/dim_inc/bn/mean/Fused_Mul__copy133410355/quantized/to_f16" type="Convert" version="opset1">
26772 <data destination_type="f16"/>
26773 <input>
26774 <port id="0">
26775 <dim>256</dim>
26776 <dim>64</dim>
26777 <dim>1</dim>
26778 <dim>1</dim>
26779 </port>
26780 </input>
26781 <output>
26782 <port id="1" precision="FP16">
26783 <dim>256</dim>
26784 <dim>64</dim>
26785 <dim>1</dim>
26786 <dim>1</dim>
26787 </port>
26788 </output>
26789 </layer>
26790 <layer id="1762" name="bottleneck4_5/dim_inc/conv/fq_weights_1/zero_point1342120445" type="Const" version="opset1">
26791 <data element_type="f16" offset="386928" shape="256,1,1,1" size="512"/>
26792 <output>
26793 <port id="0" precision="FP16">
26794 <dim>256</dim>
26795 <dim>1</dim>
26796 <dim>1</dim>
26797 <dim>1</dim>
26798 </port>
26799 </output>
26800 </layer>
26801 <layer id="1763" name="bottleneck4_5/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
26802 <data auto_broadcast="numpy"/>
26803 <input>
26804 <port id="0">
26805 <dim>256</dim>
26806 <dim>64</dim>
26807 <dim>1</dim>
26808 <dim>1</dim>
26809 </port>
26810 <port id="1">
26811 <dim>256</dim>
26812 <dim>1</dim>
26813 <dim>1</dim>
26814 <dim>1</dim>
26815 </port>
26816 </input>
26817 <output>
26818 <port id="2" precision="FP16">
26819 <dim>256</dim>
26820 <dim>64</dim>
26821 <dim>1</dim>
26822 <dim>1</dim>
26823 </port>
26824 </output>
26825 </layer>
26826 <layer id="1764" name="bottleneck4_5/dim_inc/conv/fq_weights_1/scale1341620247" type="Const" version="opset1">
26827 <data element_type="f16" offset="387440" shape="256,1,1,1" size="512"/>
26828 <output>
26829 <port id="0" precision="FP16">
26830 <dim>256</dim>
26831 <dim>1</dim>
26832 <dim>1</dim>
26833 <dim>1</dim>
26834 </port>
26835 </output>
26836 </layer>
26837 <layer id="1765" name="bottleneck4_5/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
26838 <data auto_broadcast="numpy"/>
26839 <input>
26840 <port id="0">
26841 <dim>256</dim>
26842 <dim>64</dim>
26843 <dim>1</dim>
26844 <dim>1</dim>
26845 </port>
26846 <port id="1">
26847 <dim>256</dim>
26848 <dim>1</dim>
26849 <dim>1</dim>
26850 <dim>1</dim>
26851 </port>
26852 </input>
26853 <output>
26854 <port id="2" precision="FP16">
26855 <dim>256</dim>
26856 <dim>64</dim>
26857 <dim>1</dim>
26858 <dim>1</dim>
26859 </port>
26860 </output>
26861 </layer>
26862 <layer id="1766" name="bottleneck4_5/dim_inc/conv" type="Convolution" version="opset1">
26863 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
26864 <input>
26865 <port id="0">
26866 <dim>1</dim>
26867 <dim>64</dim>
26868 <dim>20</dim>
26869 <dim>34</dim>
26870 </port>
26871 <port id="1">
26872 <dim>256</dim>
26873 <dim>64</dim>
26874 <dim>1</dim>
26875 <dim>1</dim>
26876 </port>
26877 </input>
26878 <output>
26879 <port id="2" precision="FP16">
26880 <dim>1</dim>
26881 <dim>256</dim>
26882 <dim>20</dim>
26883 <dim>34</dim>
26884 </port>
26885 </output>
26886 </layer>
26887 <layer id="1767" name="data_add_2439324398133619437" type="Const" version="opset1">
26888 <data element_type="f16" offset="387952" shape="1,256,1,1" size="512"/>
26889 <output>
26890 <port id="0" precision="FP16">
26891 <dim>1</dim>
26892 <dim>256</dim>
26893 <dim>1</dim>
26894 <dim>1</dim>
26895 </port>
26896 </output>
26897 </layer>
26898 <layer id="1768" name="bottleneck4_5/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
26899 <data auto_broadcast="numpy"/>
26900 <input>
26901 <port id="0">
26902 <dim>1</dim>
26903 <dim>256</dim>
26904 <dim>20</dim>
26905 <dim>34</dim>
26906 </port>
26907 <port id="1">
26908 <dim>1</dim>
26909 <dim>256</dim>
26910 <dim>1</dim>
26911 <dim>1</dim>
26912 </port>
26913 </input>
26914 <output>
26915 <port id="2" names="bottleneck4_5/dim_inc/conv" precision="FP16">
26916 <dim>1</dim>
26917 <dim>256</dim>
26918 <dim>20</dim>
26919 <dim>34</dim>
26920 </port>
26921 </output>
26922 </layer>
26923 <layer id="1769" name="bottleneck4_5/add/fq_input_1" type="FakeQuantize" version="opset1">
26924 <data auto_broadcast="numpy" levels="256"/>
26925 <input>
26926 <port id="0">
26927 <dim>1</dim>
26928 <dim>256</dim>
26929 <dim>20</dim>
26930 <dim>34</dim>
26931 </port>
26932 <port id="1"/>
26933 <port id="2"/>
26934 <port id="3"/>
26935 <port id="4"/>
26936 </input>
26937 <output>
26938 <port id="5" precision="FP16">
26939 <dim>1</dim>
26940 <dim>256</dim>
26941 <dim>20</dim>
26942 <dim>34</dim>
26943 </port>
26944 </output>
26945 </layer>
26946 <layer id="1770" name="bottleneck4_5/add" type="Add" version="opset1">
26947 <data auto_broadcast="numpy"/>
26948 <input>
26949 <port id="0">
26950 <dim>1</dim>
26951 <dim>256</dim>
26952 <dim>20</dim>
26953 <dim>34</dim>
26954 </port>
26955 <port id="1">
26956 <dim>1</dim>
26957 <dim>256</dim>
26958 <dim>20</dim>
26959 <dim>34</dim>
26960 </port>
26961 </input>
26962 <output>
26963 <port id="2" names="bottleneck4_5/add" precision="FP16">
26964 <dim>1</dim>
26965 <dim>256</dim>
26966 <dim>20</dim>
26967 <dim>34</dim>
26968 </port>
26969 </output>
26970 </layer>
26971 <layer id="1771" name="bottleneck4_5/fn/weights30876398571339" type="Const" version="opset1">
26972 <data element_type="f32" offset="1576" shape="1" size="4"/>
26973 <output>
26974 <port id="0" precision="FP32">
26975 <dim>1</dim>
26976 </port>
26977 </output>
26978 </layer>
26979 <layer id="1772" name="bottleneck4_5/fn" type="PReLU" version="opset1">
26980 <input>
26981 <port id="0">
26982 <dim>1</dim>
26983 <dim>256</dim>
26984 <dim>20</dim>
26985 <dim>34</dim>
26986 </port>
26987 <port id="1">
26988 <dim>1</dim>
26989 </port>
26990 </input>
26991 <output>
26992 <port id="2" names="bottleneck4_5/add" precision="FP16">
26993 <dim>1</dim>
26994 <dim>256</dim>
26995 <dim>20</dim>
26996 <dim>34</dim>
26997 </port>
26998 </output>
26999 </layer>
27000 <layer id="1773" name="bottleneck4_6/add/fq_input_0" type="FakeQuantize" version="opset1">
27001 <data auto_broadcast="numpy" levels="256"/>
27002 <input>
27003 <port id="0">
27004 <dim>1</dim>
27005 <dim>256</dim>
27006 <dim>20</dim>
27007 <dim>34</dim>
27008 </port>
27009 <port id="1"/>
27010 <port id="2"/>
27011 <port id="3"/>
27012 <port id="4"/>
27013 </input>
27014 <output>
27015 <port id="5" precision="FP16">
27016 <dim>1</dim>
27017 <dim>256</dim>
27018 <dim>20</dim>
27019 <dim>34</dim>
27020 </port>
27021 </output>
27022 </layer>
27023 <layer id="1774" name="3654365819443" type="Const" version="opset1">
27024 <data element_type="f16" offset="388464" shape="" size="2"/>
27025 <output>
27026 <port id="0" precision="FP16"/>
27027 </output>
27028 </layer>
27029 <layer id="1775" name="3655365922563" type="Const" version="opset1">
27030 <data element_type="f16" offset="388466" shape="" size="2"/>
27031 <output>
27032 <port id="0" precision="FP16"/>
27033 </output>
27034 </layer>
27035 <layer id="1776" name="3656366021522" type="Const" version="opset1">
27036 <data element_type="f16" offset="388464" shape="" size="2"/>
27037 <output>
27038 <port id="0" precision="FP16"/>
27039 </output>
27040 </layer>
27041 <layer id="1777" name="3657366121603" type="Const" version="opset1">
27042 <data element_type="f16" offset="388466" shape="" size="2"/>
27043 <output>
27044 <port id="0" precision="FP16"/>
27045 </output>
27046 </layer>
27047 <layer id="1778" name="5224522821594" type="Const" version="opset1">
27048 <data element_type="f16" offset="388468" shape="" size="2"/>
27049 <output>
27050 <port id="0" precision="FP16"/>
27051 </output>
27052 </layer>
27053 <layer id="1779" name="5225522920526" type="Const" version="opset1">
27054 <data element_type="f16" offset="388470" shape="" size="2"/>
27055 <output>
27056 <port id="0" precision="FP16"/>
27057 </output>
27058 </layer>
27059 <layer id="1780" name="5226523022527" type="Const" version="opset1">
27060 <data element_type="f16" offset="388468" shape="" size="2"/>
27061 <output>
27062 <port id="0" precision="FP16"/>
27063 </output>
27064 </layer>
27065 <layer id="1781" name="5227523120157" type="Const" version="opset1">
27066 <data element_type="f16" offset="388470" shape="" size="2"/>
27067 <output>
27068 <port id="0" precision="FP16"/>
27069 </output>
27070 </layer>
27071 <layer id="1782" name="4304430822476" type="Const" version="opset1">
27072 <data element_type="f16" offset="388472" shape="1,64,1,1" size="128"/>
27073 <output>
27074 <port id="0" precision="FP16">
27075 <dim>1</dim>
27076 <dim>64</dim>
27077 <dim>1</dim>
27078 <dim>1</dim>
27079 </port>
27080 </output>
27081 </layer>
27082 <layer id="1783" name="4305430921246" type="Const" version="opset1">
27083 <data element_type="f16" offset="388600" shape="1,64,1,1" size="128"/>
27084 <output>
27085 <port id="0" precision="FP16">
27086 <dim>1</dim>
27087 <dim>64</dim>
27088 <dim>1</dim>
27089 <dim>1</dim>
27090 </port>
27091 </output>
27092 </layer>
27093 <layer id="1784" name="4306431021162" type="Const" version="opset1">
27094 <data element_type="f16" offset="388472" shape="1,64,1,1" size="128"/>
27095 <output>
27096 <port id="0" precision="FP16">
27097 <dim>1</dim>
27098 <dim>64</dim>
27099 <dim>1</dim>
27100 <dim>1</dim>
27101 </port>
27102 </output>
27103 </layer>
27104 <layer id="1785" name="4307431122680" type="Const" version="opset1">
27105 <data element_type="f16" offset="388600" shape="1,64,1,1" size="128"/>
27106 <output>
27107 <port id="0" precision="FP16">
27108 <dim>1</dim>
27109 <dim>64</dim>
27110 <dim>1</dim>
27111 <dim>1</dim>
27112 </port>
27113 </output>
27114 </layer>
27115 <layer id="1786" name="bottleneck4_6/dim_red/bn/mean/Fused_Mul__copy134110358/quantized1417620322" type="Const" version="opset1">
27116 <data element_type="i8" offset="388728" shape="64,256,1,1" size="16384"/>
27117 <output>
27118 <port id="0" precision="I8">
27119 <dim>64</dim>
27120 <dim>256</dim>
27121 <dim>1</dim>
27122 <dim>1</dim>
27123 </port>
27124 </output>
27125 </layer>
27126 <layer id="1787" name="bottleneck4_6/dim_red/bn/mean/Fused_Mul__copy134110358/quantized/to_f16" type="Convert" version="opset1">
27127 <data destination_type="f16"/>
27128 <input>
27129 <port id="0">
27130 <dim>64</dim>
27131 <dim>256</dim>
27132 <dim>1</dim>
27133 <dim>1</dim>
27134 </port>
27135 </input>
27136 <output>
27137 <port id="1" precision="FP16">
27138 <dim>64</dim>
27139 <dim>256</dim>
27140 <dim>1</dim>
27141 <dim>1</dim>
27142 </port>
27143 </output>
27144 </layer>
27145 <layer id="1788" name="bottleneck4_6/dim_red/conv/fq_weights_1/zero_point1418922356" type="Const" version="opset1">
27146 <data element_type="f16" offset="405112" shape="64,1,1,1" size="128"/>
27147 <output>
27148 <port id="0" precision="FP16">
27149 <dim>64</dim>
27150 <dim>1</dim>
27151 <dim>1</dim>
27152 <dim>1</dim>
27153 </port>
27154 </output>
27155 </layer>
27156 <layer id="1789" name="bottleneck4_6/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
27157 <data auto_broadcast="numpy"/>
27158 <input>
27159 <port id="0">
27160 <dim>64</dim>
27161 <dim>256</dim>
27162 <dim>1</dim>
27163 <dim>1</dim>
27164 </port>
27165 <port id="1">
27166 <dim>64</dim>
27167 <dim>1</dim>
27168 <dim>1</dim>
27169 <dim>1</dim>
27170 </port>
27171 </input>
27172 <output>
27173 <port id="2" precision="FP16">
27174 <dim>64</dim>
27175 <dim>256</dim>
27176 <dim>1</dim>
27177 <dim>1</dim>
27178 </port>
27179 </output>
27180 </layer>
27181 <layer id="1790" name="bottleneck4_6/dim_red/conv/fq_weights_1/scale1418421513" type="Const" version="opset1">
27182 <data element_type="f16" offset="405240" shape="64,1,1,1" size="128"/>
27183 <output>
27184 <port id="0" precision="FP16">
27185 <dim>64</dim>
27186 <dim>1</dim>
27187 <dim>1</dim>
27188 <dim>1</dim>
27189 </port>
27190 </output>
27191 </layer>
27192 <layer id="1791" name="bottleneck4_6/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
27193 <data auto_broadcast="numpy"/>
27194 <input>
27195 <port id="0">
27196 <dim>64</dim>
27197 <dim>256</dim>
27198 <dim>1</dim>
27199 <dim>1</dim>
27200 </port>
27201 <port id="1">
27202 <dim>64</dim>
27203 <dim>1</dim>
27204 <dim>1</dim>
27205 <dim>1</dim>
27206 </port>
27207 </input>
27208 <output>
27209 <port id="2" precision="FP16">
27210 <dim>64</dim>
27211 <dim>256</dim>
27212 <dim>1</dim>
27213 <dim>1</dim>
27214 </port>
27215 </output>
27216 </layer>
27217 <layer id="1792" name="bottleneck4_6/dim_red/conv" type="Convolution" version="opset1">
27218 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
27219 <input>
27220 <port id="0">
27221 <dim>1</dim>
27222 <dim>256</dim>
27223 <dim>20</dim>
27224 <dim>34</dim>
27225 </port>
27226 <port id="1">
27227 <dim>64</dim>
27228 <dim>256</dim>
27229 <dim>1</dim>
27230 <dim>1</dim>
27231 </port>
27232 </input>
27233 <output>
27234 <port id="2" precision="FP16">
27235 <dim>1</dim>
27236 <dim>64</dim>
27237 <dim>20</dim>
27238 <dim>34</dim>
27239 </port>
27240 </output>
27241 </layer>
27242 <layer id="1793" name="data_add_2440124406134320532" type="Const" version="opset1">
27243 <data element_type="f16" offset="405368" shape="1,64,1,1" size="128"/>
27244 <output>
27245 <port id="0" precision="FP16">
27246 <dim>1</dim>
27247 <dim>64</dim>
27248 <dim>1</dim>
27249 <dim>1</dim>
27250 </port>
27251 </output>
27252 </layer>
27253 <layer id="1794" name="bottleneck4_6/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
27254 <data auto_broadcast="numpy"/>
27255 <input>
27256 <port id="0">
27257 <dim>1</dim>
27258 <dim>64</dim>
27259 <dim>20</dim>
27260 <dim>34</dim>
27261 </port>
27262 <port id="1">
27263 <dim>1</dim>
27264 <dim>64</dim>
27265 <dim>1</dim>
27266 <dim>1</dim>
27267 </port>
27268 </input>
27269 <output>
27270 <port id="2" names="bottleneck4_6/dim_red/conv" precision="FP16">
27271 <dim>1</dim>
27272 <dim>64</dim>
27273 <dim>20</dim>
27274 <dim>34</dim>
27275 </port>
27276 </output>
27277 </layer>
27278 <layer id="1795" name="bottleneck4_6/dim_red/fn/weights30864401721345" type="Const" version="opset1">
27279 <data element_type="f32" offset="1576" shape="1" size="4"/>
27280 <output>
27281 <port id="0" precision="FP32">
27282 <dim>1</dim>
27283 </port>
27284 </output>
27285 </layer>
27286 <layer id="1796" name="bottleneck4_6/dim_red/fn" type="PReLU" version="opset1">
27287 <input>
27288 <port id="0">
27289 <dim>1</dim>
27290 <dim>64</dim>
27291 <dim>20</dim>
27292 <dim>34</dim>
27293 </port>
27294 <port id="1">
27295 <dim>1</dim>
27296 </port>
27297 </input>
27298 <output>
27299 <port id="2" names="bottleneck4_6/dim_red/conv" precision="FP16">
27300 <dim>1</dim>
27301 <dim>64</dim>
27302 <dim>20</dim>
27303 <dim>34</dim>
27304 </port>
27305 </output>
27306 </layer>
27307 <layer id="1797" name="bottleneck4_6/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
27308 <data auto_broadcast="numpy" levels="256"/>
27309 <input>
27310 <port id="0">
27311 <dim>1</dim>
27312 <dim>64</dim>
27313 <dim>20</dim>
27314 <dim>34</dim>
27315 </port>
27316 <port id="1">
27317 <dim>1</dim>
27318 <dim>64</dim>
27319 <dim>1</dim>
27320 <dim>1</dim>
27321 </port>
27322 <port id="2">
27323 <dim>1</dim>
27324 <dim>64</dim>
27325 <dim>1</dim>
27326 <dim>1</dim>
27327 </port>
27328 <port id="3">
27329 <dim>1</dim>
27330 <dim>64</dim>
27331 <dim>1</dim>
27332 <dim>1</dim>
27333 </port>
27334 <port id="4">
27335 <dim>1</dim>
27336 <dim>64</dim>
27337 <dim>1</dim>
27338 <dim>1</dim>
27339 </port>
27340 </input>
27341 <output>
27342 <port id="5" precision="FP16">
27343 <dim>1</dim>
27344 <dim>64</dim>
27345 <dim>20</dim>
27346 <dim>34</dim>
27347 </port>
27348 </output>
27349 </layer>
27350 <layer id="1798" name="16883/value1688519941" type="Const" version="opset1">
27351 <data element_type="i64" offset="189984" shape="5" size="40"/>
27352 <output>
27353 <port id="0" precision="I64">
27354 <dim>5</dim>
27355 </port>
27356 </output>
27357 </layer>
27358 <layer id="1799" name="bottleneck4_6/inner/dw1/bn/mean/Fused_Mul__copy134710361/quantized1331220829" type="Const" version="opset1">
27359 <data element_type="i8" offset="405496" shape="64,1,3,3" size="576"/>
27360 <output>
27361 <port id="0" precision="I8">
27362 <dim>64</dim>
27363 <dim>1</dim>
27364 <dim>3</dim>
27365 <dim>3</dim>
27366 </port>
27367 </output>
27368 </layer>
27369 <layer id="1800" name="bottleneck4_6/inner/dw1/bn/mean/Fused_Mul__copy134710361/quantized/to_f16" type="Convert" version="opset1">
27370 <data destination_type="f16"/>
27371 <input>
27372 <port id="0">
27373 <dim>64</dim>
27374 <dim>1</dim>
27375 <dim>3</dim>
27376 <dim>3</dim>
27377 </port>
27378 </input>
27379 <output>
27380 <port id="1" precision="FP16">
27381 <dim>64</dim>
27382 <dim>1</dim>
27383 <dim>3</dim>
27384 <dim>3</dim>
27385 </port>
27386 </output>
27387 </layer>
27388 <layer id="1801" name="bottleneck4_6/inner/dw1/conv/fq_weights_1/zero_point1332522011" type="Const" version="opset1">
27389 <data element_type="f16" offset="406072" shape="64,1,1,1" size="128"/>
27390 <output>
27391 <port id="0" precision="FP16">
27392 <dim>64</dim>
27393 <dim>1</dim>
27394 <dim>1</dim>
27395 <dim>1</dim>
27396 </port>
27397 </output>
27398 </layer>
27399 <layer id="1802" name="bottleneck4_6/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
27400 <data auto_broadcast="numpy"/>
27401 <input>
27402 <port id="0">
27403 <dim>64</dim>
27404 <dim>1</dim>
27405 <dim>3</dim>
27406 <dim>3</dim>
27407 </port>
27408 <port id="1">
27409 <dim>64</dim>
27410 <dim>1</dim>
27411 <dim>1</dim>
27412 <dim>1</dim>
27413 </port>
27414 </input>
27415 <output>
27416 <port id="2" precision="FP16">
27417 <dim>64</dim>
27418 <dim>1</dim>
27419 <dim>3</dim>
27420 <dim>3</dim>
27421 </port>
27422 </output>
27423 </layer>
27424 <layer id="1803" name="bottleneck4_6/inner/dw1/conv/fq_weights_1/scale1332021300" type="Const" version="opset1">
27425 <data element_type="f16" offset="406200" shape="64,1,1,1" size="128"/>
27426 <output>
27427 <port id="0" precision="FP16">
27428 <dim>64</dim>
27429 <dim>1</dim>
27430 <dim>1</dim>
27431 <dim>1</dim>
27432 </port>
27433 </output>
27434 </layer>
27435 <layer id="1804" name="bottleneck4_6/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
27436 <data auto_broadcast="numpy"/>
27437 <input>
27438 <port id="0">
27439 <dim>64</dim>
27440 <dim>1</dim>
27441 <dim>3</dim>
27442 <dim>3</dim>
27443 </port>
27444 <port id="1">
27445 <dim>64</dim>
27446 <dim>1</dim>
27447 <dim>1</dim>
27448 <dim>1</dim>
27449 </port>
27450 </input>
27451 <output>
27452 <port id="2" precision="FP16">
27453 <dim>64</dim>
27454 <dim>1</dim>
27455 <dim>3</dim>
27456 <dim>3</dim>
27457 </port>
27458 </output>
27459 </layer>
27460 <layer id="1805" name="16883" type="Reshape" version="opset1">
27461 <data special_zero="true"/>
27462 <input>
27463 <port id="0">
27464 <dim>64</dim>
27465 <dim>1</dim>
27466 <dim>3</dim>
27467 <dim>3</dim>
27468 </port>
27469 <port id="1">
27470 <dim>5</dim>
27471 </port>
27472 </input>
27473 <output>
27474 <port id="2" precision="FP16">
27475 <dim>64</dim>
27476 <dim>1</dim>
27477 <dim>1</dim>
27478 <dim>3</dim>
27479 <dim>3</dim>
27480 </port>
27481 </output>
27482 </layer>
27483 <layer id="1806" name="bottleneck4_6/inner/dw1/conv" type="GroupConvolution" version="opset1">
27484 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
27485 <input>
27486 <port id="0">
27487 <dim>1</dim>
27488 <dim>64</dim>
27489 <dim>20</dim>
27490 <dim>34</dim>
27491 </port>
27492 <port id="1">
27493 <dim>64</dim>
27494 <dim>1</dim>
27495 <dim>1</dim>
27496 <dim>3</dim>
27497 <dim>3</dim>
27498 </port>
27499 </input>
27500 <output>
27501 <port id="2" precision="FP16">
27502 <dim>1</dim>
27503 <dim>64</dim>
27504 <dim>20</dim>
27505 <dim>34</dim>
27506 </port>
27507 </output>
27508 </layer>
27509 <layer id="1807" name="data_add_2440924414134921048" type="Const" version="opset1">
27510 <data element_type="f16" offset="406328" shape="1,64,1,1" size="128"/>
27511 <output>
27512 <port id="0" precision="FP16">
27513 <dim>1</dim>
27514 <dim>64</dim>
27515 <dim>1</dim>
27516 <dim>1</dim>
27517 </port>
27518 </output>
27519 </layer>
27520 <layer id="1808" name="bottleneck4_6/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
27521 <data auto_broadcast="numpy"/>
27522 <input>
27523 <port id="0">
27524 <dim>1</dim>
27525 <dim>64</dim>
27526 <dim>20</dim>
27527 <dim>34</dim>
27528 </port>
27529 <port id="1">
27530 <dim>1</dim>
27531 <dim>64</dim>
27532 <dim>1</dim>
27533 <dim>1</dim>
27534 </port>
27535 </input>
27536 <output>
27537 <port id="2" names="bottleneck4_6/inner/dw1/conv" precision="FP16">
27538 <dim>1</dim>
27539 <dim>64</dim>
27540 <dim>20</dim>
27541 <dim>34</dim>
27542 </port>
27543 </output>
27544 </layer>
27545 <layer id="1809" name="bottleneck4_6/inner/dw1/fn/weights31116402381351" type="Const" version="opset1">
27546 <data element_type="f32" offset="1576" shape="1" size="4"/>
27547 <output>
27548 <port id="0" precision="FP32">
27549 <dim>1</dim>
27550 </port>
27551 </output>
27552 </layer>
27553 <layer id="1810" name="bottleneck4_6/inner/dw1/fn" type="PReLU" version="opset1">
27554 <input>
27555 <port id="0">
27556 <dim>1</dim>
27557 <dim>64</dim>
27558 <dim>20</dim>
27559 <dim>34</dim>
27560 </port>
27561 <port id="1">
27562 <dim>1</dim>
27563 </port>
27564 </input>
27565 <output>
27566 <port id="2" names="bottleneck4_6/inner/dw1/conv" precision="FP16">
27567 <dim>1</dim>
27568 <dim>64</dim>
27569 <dim>20</dim>
27570 <dim>34</dim>
27571 </port>
27572 </output>
27573 </layer>
27574 <layer id="1811" name="bottleneck4_6/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
27575 <data auto_broadcast="numpy" levels="256"/>
27576 <input>
27577 <port id="0">
27578 <dim>1</dim>
27579 <dim>64</dim>
27580 <dim>20</dim>
27581 <dim>34</dim>
27582 </port>
27583 <port id="1"/>
27584 <port id="2"/>
27585 <port id="3"/>
27586 <port id="4"/>
27587 </input>
27588 <output>
27589 <port id="5" precision="FP16">
27590 <dim>1</dim>
27591 <dim>64</dim>
27592 <dim>20</dim>
27593 <dim>34</dim>
27594 </port>
27595 </output>
27596 </layer>
27597 <layer id="1812" name="bottleneck4_6/dim_inc/bn/mean/Fused_Mul__copy135310364/quantized1381619932" type="Const" version="opset1">
27598 <data element_type="i8" offset="406456" shape="256,64,1,1" size="16384"/>
27599 <output>
27600 <port id="0" precision="I8">
27601 <dim>256</dim>
27602 <dim>64</dim>
27603 <dim>1</dim>
27604 <dim>1</dim>
27605 </port>
27606 </output>
27607 </layer>
27608 <layer id="1813" name="bottleneck4_6/dim_inc/bn/mean/Fused_Mul__copy135310364/quantized/to_f16" type="Convert" version="opset1">
27609 <data destination_type="f16"/>
27610 <input>
27611 <port id="0">
27612 <dim>256</dim>
27613 <dim>64</dim>
27614 <dim>1</dim>
27615 <dim>1</dim>
27616 </port>
27617 </input>
27618 <output>
27619 <port id="1" precision="FP16">
27620 <dim>256</dim>
27621 <dim>64</dim>
27622 <dim>1</dim>
27623 <dim>1</dim>
27624 </port>
27625 </output>
27626 </layer>
27627 <layer id="1814" name="bottleneck4_6/dim_inc/conv/fq_weights_1/zero_point1382919689" type="Const" version="opset1">
27628 <data element_type="f16" offset="422840" shape="256,1,1,1" size="512"/>
27629 <output>
27630 <port id="0" precision="FP16">
27631 <dim>256</dim>
27632 <dim>1</dim>
27633 <dim>1</dim>
27634 <dim>1</dim>
27635 </port>
27636 </output>
27637 </layer>
27638 <layer id="1815" name="bottleneck4_6/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
27639 <data auto_broadcast="numpy"/>
27640 <input>
27641 <port id="0">
27642 <dim>256</dim>
27643 <dim>64</dim>
27644 <dim>1</dim>
27645 <dim>1</dim>
27646 </port>
27647 <port id="1">
27648 <dim>256</dim>
27649 <dim>1</dim>
27650 <dim>1</dim>
27651 <dim>1</dim>
27652 </port>
27653 </input>
27654 <output>
27655 <port id="2" precision="FP16">
27656 <dim>256</dim>
27657 <dim>64</dim>
27658 <dim>1</dim>
27659 <dim>1</dim>
27660 </port>
27661 </output>
27662 </layer>
27663 <layer id="1816" name="bottleneck4_6/dim_inc/conv/fq_weights_1/scale1382422968" type="Const" version="opset1">
27664 <data element_type="f16" offset="423352" shape="256,1,1,1" size="512"/>
27665 <output>
27666 <port id="0" precision="FP16">
27667 <dim>256</dim>
27668 <dim>1</dim>
27669 <dim>1</dim>
27670 <dim>1</dim>
27671 </port>
27672 </output>
27673 </layer>
27674 <layer id="1817" name="bottleneck4_6/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
27675 <data auto_broadcast="numpy"/>
27676 <input>
27677 <port id="0">
27678 <dim>256</dim>
27679 <dim>64</dim>
27680 <dim>1</dim>
27681 <dim>1</dim>
27682 </port>
27683 <port id="1">
27684 <dim>256</dim>
27685 <dim>1</dim>
27686 <dim>1</dim>
27687 <dim>1</dim>
27688 </port>
27689 </input>
27690 <output>
27691 <port id="2" precision="FP16">
27692 <dim>256</dim>
27693 <dim>64</dim>
27694 <dim>1</dim>
27695 <dim>1</dim>
27696 </port>
27697 </output>
27698 </layer>
27699 <layer id="1818" name="bottleneck4_6/dim_inc/conv" type="Convolution" version="opset1">
27700 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
27701 <input>
27702 <port id="0">
27703 <dim>1</dim>
27704 <dim>64</dim>
27705 <dim>20</dim>
27706 <dim>34</dim>
27707 </port>
27708 <port id="1">
27709 <dim>256</dim>
27710 <dim>64</dim>
27711 <dim>1</dim>
27712 <dim>1</dim>
27713 </port>
27714 </input>
27715 <output>
27716 <port id="2" precision="FP16">
27717 <dim>1</dim>
27718 <dim>256</dim>
27719 <dim>20</dim>
27720 <dim>34</dim>
27721 </port>
27722 </output>
27723 </layer>
27724 <layer id="1819" name="data_add_2441724422135519569" type="Const" version="opset1">
27725 <data element_type="f16" offset="423864" shape="1,256,1,1" size="512"/>
27726 <output>
27727 <port id="0" precision="FP16">
27728 <dim>1</dim>
27729 <dim>256</dim>
27730 <dim>1</dim>
27731 <dim>1</dim>
27732 </port>
27733 </output>
27734 </layer>
27735 <layer id="1820" name="bottleneck4_6/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
27736 <data auto_broadcast="numpy"/>
27737 <input>
27738 <port id="0">
27739 <dim>1</dim>
27740 <dim>256</dim>
27741 <dim>20</dim>
27742 <dim>34</dim>
27743 </port>
27744 <port id="1">
27745 <dim>1</dim>
27746 <dim>256</dim>
27747 <dim>1</dim>
27748 <dim>1</dim>
27749 </port>
27750 </input>
27751 <output>
27752 <port id="2" names="bottleneck4_6/dim_inc/conv" precision="FP16">
27753 <dim>1</dim>
27754 <dim>256</dim>
27755 <dim>20</dim>
27756 <dim>34</dim>
27757 </port>
27758 </output>
27759 </layer>
27760 <layer id="1821" name="bottleneck4_6/add/fq_input_1" type="FakeQuantize" version="opset1">
27761 <data auto_broadcast="numpy" levels="256"/>
27762 <input>
27763 <port id="0">
27764 <dim>1</dim>
27765 <dim>256</dim>
27766 <dim>20</dim>
27767 <dim>34</dim>
27768 </port>
27769 <port id="1"/>
27770 <port id="2"/>
27771 <port id="3"/>
27772 <port id="4"/>
27773 </input>
27774 <output>
27775 <port id="5" precision="FP16">
27776 <dim>1</dim>
27777 <dim>256</dim>
27778 <dim>20</dim>
27779 <dim>34</dim>
27780 </port>
27781 </output>
27782 </layer>
27783 <layer id="1822" name="bottleneck4_6/add" type="Add" version="opset1">
27784 <data auto_broadcast="numpy"/>
27785 <input>
27786 <port id="0">
27787 <dim>1</dim>
27788 <dim>256</dim>
27789 <dim>20</dim>
27790 <dim>34</dim>
27791 </port>
27792 <port id="1">
27793 <dim>1</dim>
27794 <dim>256</dim>
27795 <dim>20</dim>
27796 <dim>34</dim>
27797 </port>
27798 </input>
27799 <output>
27800 <port id="2" names="bottleneck4_6/add" precision="FP16">
27801 <dim>1</dim>
27802 <dim>256</dim>
27803 <dim>20</dim>
27804 <dim>34</dim>
27805 </port>
27806 </output>
27807 </layer>
27808 <layer id="1823" name="bottleneck4_6/fn/weights30820397731358" type="Const" version="opset1">
27809 <data element_type="f32" offset="1576" shape="1" size="4"/>
27810 <output>
27811 <port id="0" precision="FP32">
27812 <dim>1</dim>
27813 </port>
27814 </output>
27815 </layer>
27816 <layer id="1824" name="bottleneck4_6/fn" type="PReLU" version="opset1">
27817 <input>
27818 <port id="0">
27819 <dim>1</dim>
27820 <dim>256</dim>
27821 <dim>20</dim>
27822 <dim>34</dim>
27823 </port>
27824 <port id="1">
27825 <dim>1</dim>
27826 </port>
27827 </input>
27828 <output>
27829 <port id="2" names="bottleneck4_6/add" precision="FP16">
27830 <dim>1</dim>
27831 <dim>256</dim>
27832 <dim>20</dim>
27833 <dim>34</dim>
27834 </port>
27835 </output>
27836 </layer>
27837 <layer id="1825" name="bottleneck4_7/add/fq_input_0" type="FakeQuantize" version="opset1">
27838 <data auto_broadcast="numpy" levels="256"/>
27839 <input>
27840 <port id="0">
27841 <dim>1</dim>
27842 <dim>256</dim>
27843 <dim>20</dim>
27844 <dim>34</dim>
27845 </port>
27846 <port id="1"/>
27847 <port id="2"/>
27848 <port id="3"/>
27849 <port id="4"/>
27850 </input>
27851 <output>
27852 <port id="5" precision="FP16">
27853 <dim>1</dim>
27854 <dim>256</dim>
27855 <dim>20</dim>
27856 <dim>34</dim>
27857 </port>
27858 </output>
27859 </layer>
27860 <layer id="1826" name="2714271821972" type="Const" version="opset1">
27861 <data element_type="f16" offset="424376" shape="" size="2"/>
27862 <output>
27863 <port id="0" precision="FP16"/>
27864 </output>
27865 </layer>
27866 <layer id="1827" name="2715271922662" type="Const" version="opset1">
27867 <data element_type="f16" offset="424378" shape="" size="2"/>
27868 <output>
27869 <port id="0" precision="FP16"/>
27870 </output>
27871 </layer>
27872 <layer id="1828" name="2716272020352" type="Const" version="opset1">
27873 <data element_type="f16" offset="424376" shape="" size="2"/>
27874 <output>
27875 <port id="0" precision="FP16"/>
27876 </output>
27877 </layer>
27878 <layer id="1829" name="2717272122233" type="Const" version="opset1">
27879 <data element_type="f16" offset="424378" shape="" size="2"/>
27880 <output>
27881 <port id="0" precision="FP16"/>
27882 </output>
27883 </layer>
27884 <layer id="1830" name="4924492820382" type="Const" version="opset1">
27885 <data element_type="f16" offset="424380" shape="" size="2"/>
27886 <output>
27887 <port id="0" precision="FP16"/>
27888 </output>
27889 </layer>
27890 <layer id="1831" name="4925492922206" type="Const" version="opset1">
27891 <data element_type="f16" offset="424382" shape="" size="2"/>
27892 <output>
27893 <port id="0" precision="FP16"/>
27894 </output>
27895 </layer>
27896 <layer id="1832" name="4926493020565" type="Const" version="opset1">
27897 <data element_type="f16" offset="424380" shape="" size="2"/>
27898 <output>
27899 <port id="0" precision="FP16"/>
27900 </output>
27901 </layer>
27902 <layer id="1833" name="4927493119953" type="Const" version="opset1">
27903 <data element_type="f16" offset="424382" shape="" size="2"/>
27904 <output>
27905 <port id="0" precision="FP16"/>
27906 </output>
27907 </layer>
27908 <layer id="1834" name="3264326822242" type="Const" version="opset1">
27909 <data element_type="f16" offset="424384" shape="1,64,1,1" size="128"/>
27910 <output>
27911 <port id="0" precision="FP16">
27912 <dim>1</dim>
27913 <dim>64</dim>
27914 <dim>1</dim>
27915 <dim>1</dim>
27916 </port>
27917 </output>
27918 </layer>
27919 <layer id="1835" name="3265326921876" type="Const" version="opset1">
27920 <data element_type="f16" offset="424512" shape="1,64,1,1" size="128"/>
27921 <output>
27922 <port id="0" precision="FP16">
27923 <dim>1</dim>
27924 <dim>64</dim>
27925 <dim>1</dim>
27926 <dim>1</dim>
27927 </port>
27928 </output>
27929 </layer>
27930 <layer id="1836" name="3266327022854" type="Const" version="opset1">
27931 <data element_type="f16" offset="424384" shape="1,64,1,1" size="128"/>
27932 <output>
27933 <port id="0" precision="FP16">
27934 <dim>1</dim>
27935 <dim>64</dim>
27936 <dim>1</dim>
27937 <dim>1</dim>
27938 </port>
27939 </output>
27940 </layer>
27941 <layer id="1837" name="3267327119479" type="Const" version="opset1">
27942 <data element_type="f16" offset="424512" shape="1,64,1,1" size="128"/>
27943 <output>
27944 <port id="0" precision="FP16">
27945 <dim>1</dim>
27946 <dim>64</dim>
27947 <dim>1</dim>
27948 <dim>1</dim>
27949 </port>
27950 </output>
27951 </layer>
27952 <layer id="1838" name="bottleneck4_7/dim_red/bn/mean/Fused_Mul__copy136010367/quantized1396019887" type="Const" version="opset1">
27953 <data element_type="i8" offset="424640" shape="64,256,1,1" size="16384"/>
27954 <output>
27955 <port id="0" precision="I8">
27956 <dim>64</dim>
27957 <dim>256</dim>
27958 <dim>1</dim>
27959 <dim>1</dim>
27960 </port>
27961 </output>
27962 </layer>
27963 <layer id="1839" name="bottleneck4_7/dim_red/bn/mean/Fused_Mul__copy136010367/quantized/to_f16" type="Convert" version="opset1">
27964 <data destination_type="f16"/>
27965 <input>
27966 <port id="0">
27967 <dim>64</dim>
27968 <dim>256</dim>
27969 <dim>1</dim>
27970 <dim>1</dim>
27971 </port>
27972 </input>
27973 <output>
27974 <port id="1" precision="FP16">
27975 <dim>64</dim>
27976 <dim>256</dim>
27977 <dim>1</dim>
27978 <dim>1</dim>
27979 </port>
27980 </output>
27981 </layer>
27982 <layer id="1840" name="bottleneck4_7/dim_red/conv/fq_weights_1/zero_point1397320340" type="Const" version="opset1">
27983 <data element_type="f16" offset="441024" shape="64,1,1,1" size="128"/>
27984 <output>
27985 <port id="0" precision="FP16">
27986 <dim>64</dim>
27987 <dim>1</dim>
27988 <dim>1</dim>
27989 <dim>1</dim>
27990 </port>
27991 </output>
27992 </layer>
27993 <layer id="1841" name="bottleneck4_7/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
27994 <data auto_broadcast="numpy"/>
27995 <input>
27996 <port id="0">
27997 <dim>64</dim>
27998 <dim>256</dim>
27999 <dim>1</dim>
28000 <dim>1</dim>
28001 </port>
28002 <port id="1">
28003 <dim>64</dim>
28004 <dim>1</dim>
28005 <dim>1</dim>
28006 <dim>1</dim>
28007 </port>
28008 </input>
28009 <output>
28010 <port id="2" precision="FP16">
28011 <dim>64</dim>
28012 <dim>256</dim>
28013 <dim>1</dim>
28014 <dim>1</dim>
28015 </port>
28016 </output>
28017 </layer>
28018 <layer id="1842" name="bottleneck4_7/dim_red/conv/fq_weights_1/scale1396821975" type="Const" version="opset1">
28019 <data element_type="f16" offset="441152" shape="64,1,1,1" size="128"/>
28020 <output>
28021 <port id="0" precision="FP16">
28022 <dim>64</dim>
28023 <dim>1</dim>
28024 <dim>1</dim>
28025 <dim>1</dim>
28026 </port>
28027 </output>
28028 </layer>
28029 <layer id="1843" name="bottleneck4_7/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
28030 <data auto_broadcast="numpy"/>
28031 <input>
28032 <port id="0">
28033 <dim>64</dim>
28034 <dim>256</dim>
28035 <dim>1</dim>
28036 <dim>1</dim>
28037 </port>
28038 <port id="1">
28039 <dim>64</dim>
28040 <dim>1</dim>
28041 <dim>1</dim>
28042 <dim>1</dim>
28043 </port>
28044 </input>
28045 <output>
28046 <port id="2" precision="FP16">
28047 <dim>64</dim>
28048 <dim>256</dim>
28049 <dim>1</dim>
28050 <dim>1</dim>
28051 </port>
28052 </output>
28053 </layer>
28054 <layer id="1844" name="bottleneck4_7/dim_red/conv" type="Convolution" version="opset1">
28055 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
28056 <input>
28057 <port id="0">
28058 <dim>1</dim>
28059 <dim>256</dim>
28060 <dim>20</dim>
28061 <dim>34</dim>
28062 </port>
28063 <port id="1">
28064 <dim>64</dim>
28065 <dim>256</dim>
28066 <dim>1</dim>
28067 <dim>1</dim>
28068 </port>
28069 </input>
28070 <output>
28071 <port id="2" precision="FP16">
28072 <dim>1</dim>
28073 <dim>64</dim>
28074 <dim>20</dim>
28075 <dim>34</dim>
28076 </port>
28077 </output>
28078 </layer>
28079 <layer id="1845" name="data_add_2442524430136219935" type="Const" version="opset1">
28080 <data element_type="f16" offset="441280" shape="1,64,1,1" size="128"/>
28081 <output>
28082 <port id="0" precision="FP16">
28083 <dim>1</dim>
28084 <dim>64</dim>
28085 <dim>1</dim>
28086 <dim>1</dim>
28087 </port>
28088 </output>
28089 </layer>
28090 <layer id="1846" name="bottleneck4_7/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
28091 <data auto_broadcast="numpy"/>
28092 <input>
28093 <port id="0">
28094 <dim>1</dim>
28095 <dim>64</dim>
28096 <dim>20</dim>
28097 <dim>34</dim>
28098 </port>
28099 <port id="1">
28100 <dim>1</dim>
28101 <dim>64</dim>
28102 <dim>1</dim>
28103 <dim>1</dim>
28104 </port>
28105 </input>
28106 <output>
28107 <port id="2" names="bottleneck4_7/dim_red/conv" precision="FP16">
28108 <dim>1</dim>
28109 <dim>64</dim>
28110 <dim>20</dim>
28111 <dim>34</dim>
28112 </port>
28113 </output>
28114 </layer>
28115 <layer id="1847" name="bottleneck4_7/dim_red/fn/weights30816401931364" type="Const" version="opset1">
28116 <data element_type="f32" offset="1576" shape="1" size="4"/>
28117 <output>
28118 <port id="0" precision="FP32">
28119 <dim>1</dim>
28120 </port>
28121 </output>
28122 </layer>
28123 <layer id="1848" name="bottleneck4_7/dim_red/fn" type="PReLU" version="opset1">
28124 <input>
28125 <port id="0">
28126 <dim>1</dim>
28127 <dim>64</dim>
28128 <dim>20</dim>
28129 <dim>34</dim>
28130 </port>
28131 <port id="1">
28132 <dim>1</dim>
28133 </port>
28134 </input>
28135 <output>
28136 <port id="2" names="bottleneck4_7/dim_red/conv" precision="FP16">
28137 <dim>1</dim>
28138 <dim>64</dim>
28139 <dim>20</dim>
28140 <dim>34</dim>
28141 </port>
28142 </output>
28143 </layer>
28144 <layer id="1849" name="bottleneck4_7/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
28145 <data auto_broadcast="numpy" levels="256"/>
28146 <input>
28147 <port id="0">
28148 <dim>1</dim>
28149 <dim>64</dim>
28150 <dim>20</dim>
28151 <dim>34</dim>
28152 </port>
28153 <port id="1">
28154 <dim>1</dim>
28155 <dim>64</dim>
28156 <dim>1</dim>
28157 <dim>1</dim>
28158 </port>
28159 <port id="2">
28160 <dim>1</dim>
28161 <dim>64</dim>
28162 <dim>1</dim>
28163 <dim>1</dim>
28164 </port>
28165 <port id="3">
28166 <dim>1</dim>
28167 <dim>64</dim>
28168 <dim>1</dim>
28169 <dim>1</dim>
28170 </port>
28171 <port id="4">
28172 <dim>1</dim>
28173 <dim>64</dim>
28174 <dim>1</dim>
28175 <dim>1</dim>
28176 </port>
28177 </input>
28178 <output>
28179 <port id="5" precision="FP16">
28180 <dim>1</dim>
28181 <dim>64</dim>
28182 <dim>20</dim>
28183 <dim>34</dim>
28184 </port>
28185 </output>
28186 </layer>
28187 <layer id="1850" name="16823/value1682521726" type="Const" version="opset1">
28188 <data element_type="i64" offset="189984" shape="5" size="40"/>
28189 <output>
28190 <port id="0" precision="I64">
28191 <dim>5</dim>
28192 </port>
28193 </output>
28194 </layer>
28195 <layer id="1851" name="bottleneck4_7/inner/dw1/bn/mean/Fused_Mul__copy136610370/quantized1302419512" type="Const" version="opset1">
28196 <data element_type="i8" offset="441408" shape="64,1,3,3" size="576"/>
28197 <output>
28198 <port id="0" precision="I8">
28199 <dim>64</dim>
28200 <dim>1</dim>
28201 <dim>3</dim>
28202 <dim>3</dim>
28203 </port>
28204 </output>
28205 </layer>
28206 <layer id="1852" name="bottleneck4_7/inner/dw1/bn/mean/Fused_Mul__copy136610370/quantized/to_f16" type="Convert" version="opset1">
28207 <data destination_type="f16"/>
28208 <input>
28209 <port id="0">
28210 <dim>64</dim>
28211 <dim>1</dim>
28212 <dim>3</dim>
28213 <dim>3</dim>
28214 </port>
28215 </input>
28216 <output>
28217 <port id="1" precision="FP16">
28218 <dim>64</dim>
28219 <dim>1</dim>
28220 <dim>3</dim>
28221 <dim>3</dim>
28222 </port>
28223 </output>
28224 </layer>
28225 <layer id="1853" name="bottleneck4_7/inner/dw1/conv/fq_weights_1/zero_point1303720316" type="Const" version="opset1">
28226 <data element_type="f16" offset="441984" shape="64,1,1,1" size="128"/>
28227 <output>
28228 <port id="0" precision="FP16">
28229 <dim>64</dim>
28230 <dim>1</dim>
28231 <dim>1</dim>
28232 <dim>1</dim>
28233 </port>
28234 </output>
28235 </layer>
28236 <layer id="1854" name="bottleneck4_7/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
28237 <data auto_broadcast="numpy"/>
28238 <input>
28239 <port id="0">
28240 <dim>64</dim>
28241 <dim>1</dim>
28242 <dim>3</dim>
28243 <dim>3</dim>
28244 </port>
28245 <port id="1">
28246 <dim>64</dim>
28247 <dim>1</dim>
28248 <dim>1</dim>
28249 <dim>1</dim>
28250 </port>
28251 </input>
28252 <output>
28253 <port id="2" precision="FP16">
28254 <dim>64</dim>
28255 <dim>1</dim>
28256 <dim>3</dim>
28257 <dim>3</dim>
28258 </port>
28259 </output>
28260 </layer>
28261 <layer id="1855" name="bottleneck4_7/inner/dw1/conv/fq_weights_1/scale1303222362" type="Const" version="opset1">
28262 <data element_type="f16" offset="442112" shape="64,1,1,1" size="128"/>
28263 <output>
28264 <port id="0" precision="FP16">
28265 <dim>64</dim>
28266 <dim>1</dim>
28267 <dim>1</dim>
28268 <dim>1</dim>
28269 </port>
28270 </output>
28271 </layer>
28272 <layer id="1856" name="bottleneck4_7/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
28273 <data auto_broadcast="numpy"/>
28274 <input>
28275 <port id="0">
28276 <dim>64</dim>
28277 <dim>1</dim>
28278 <dim>3</dim>
28279 <dim>3</dim>
28280 </port>
28281 <port id="1">
28282 <dim>64</dim>
28283 <dim>1</dim>
28284 <dim>1</dim>
28285 <dim>1</dim>
28286 </port>
28287 </input>
28288 <output>
28289 <port id="2" precision="FP16">
28290 <dim>64</dim>
28291 <dim>1</dim>
28292 <dim>3</dim>
28293 <dim>3</dim>
28294 </port>
28295 </output>
28296 </layer>
28297 <layer id="1857" name="16823" type="Reshape" version="opset1">
28298 <data special_zero="true"/>
28299 <input>
28300 <port id="0">
28301 <dim>64</dim>
28302 <dim>1</dim>
28303 <dim>3</dim>
28304 <dim>3</dim>
28305 </port>
28306 <port id="1">
28307 <dim>5</dim>
28308 </port>
28309 </input>
28310 <output>
28311 <port id="2" precision="FP16">
28312 <dim>64</dim>
28313 <dim>1</dim>
28314 <dim>1</dim>
28315 <dim>3</dim>
28316 <dim>3</dim>
28317 </port>
28318 </output>
28319 </layer>
28320 <layer id="1858" name="bottleneck4_7/inner/dw1/conv" type="GroupConvolution" version="opset1">
28321 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
28322 <input>
28323 <port id="0">
28324 <dim>1</dim>
28325 <dim>64</dim>
28326 <dim>20</dim>
28327 <dim>34</dim>
28328 </port>
28329 <port id="1">
28330 <dim>64</dim>
28331 <dim>1</dim>
28332 <dim>1</dim>
28333 <dim>3</dim>
28334 <dim>3</dim>
28335 </port>
28336 </input>
28337 <output>
28338 <port id="2" precision="FP16">
28339 <dim>1</dim>
28340 <dim>64</dim>
28341 <dim>20</dim>
28342 <dim>34</dim>
28343 </port>
28344 </output>
28345 </layer>
28346 <layer id="1859" name="data_add_2443324438136819881" type="Const" version="opset1">
28347 <data element_type="f16" offset="442240" shape="1,64,1,1" size="128"/>
28348 <output>
28349 <port id="0" precision="FP16">
28350 <dim>1</dim>
28351 <dim>64</dim>
28352 <dim>1</dim>
28353 <dim>1</dim>
28354 </port>
28355 </output>
28356 </layer>
28357 <layer id="1860" name="bottleneck4_7/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
28358 <data auto_broadcast="numpy"/>
28359 <input>
28360 <port id="0">
28361 <dim>1</dim>
28362 <dim>64</dim>
28363 <dim>20</dim>
28364 <dim>34</dim>
28365 </port>
28366 <port id="1">
28367 <dim>1</dim>
28368 <dim>64</dim>
28369 <dim>1</dim>
28370 <dim>1</dim>
28371 </port>
28372 </input>
28373 <output>
28374 <port id="2" names="bottleneck4_7/inner/dw1/conv" precision="FP16">
28375 <dim>1</dim>
28376 <dim>64</dim>
28377 <dim>20</dim>
28378 <dim>34</dim>
28379 </port>
28380 </output>
28381 </layer>
28382 <layer id="1861" name="bottleneck4_7/inner/dw1/fn/weights31112402261370" type="Const" version="opset1">
28383 <data element_type="f32" offset="1576" shape="1" size="4"/>
28384 <output>
28385 <port id="0" precision="FP32">
28386 <dim>1</dim>
28387 </port>
28388 </output>
28389 </layer>
28390 <layer id="1862" name="bottleneck4_7/inner/dw1/fn" type="PReLU" version="opset1">
28391 <input>
28392 <port id="0">
28393 <dim>1</dim>
28394 <dim>64</dim>
28395 <dim>20</dim>
28396 <dim>34</dim>
28397 </port>
28398 <port id="1">
28399 <dim>1</dim>
28400 </port>
28401 </input>
28402 <output>
28403 <port id="2" names="bottleneck4_7/inner/dw1/conv" precision="FP16">
28404 <dim>1</dim>
28405 <dim>64</dim>
28406 <dim>20</dim>
28407 <dim>34</dim>
28408 </port>
28409 </output>
28410 </layer>
28411 <layer id="1863" name="bottleneck4_7/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
28412 <data auto_broadcast="numpy" levels="256"/>
28413 <input>
28414 <port id="0">
28415 <dim>1</dim>
28416 <dim>64</dim>
28417 <dim>20</dim>
28418 <dim>34</dim>
28419 </port>
28420 <port id="1"/>
28421 <port id="2"/>
28422 <port id="3"/>
28423 <port id="4"/>
28424 </input>
28425 <output>
28426 <port id="5" precision="FP16">
28427 <dim>1</dim>
28428 <dim>64</dim>
28429 <dim>20</dim>
28430 <dim>34</dim>
28431 </port>
28432 </output>
28433 </layer>
28434 <layer id="1864" name="bottleneck4_7/dim_inc/bn/mean/Fused_Mul__copy137210373/quantized1271220460" type="Const" version="opset1">
28435 <data element_type="i8" offset="442368" shape="256,64,1,1" size="16384"/>
28436 <output>
28437 <port id="0" precision="I8">
28438 <dim>256</dim>
28439 <dim>64</dim>
28440 <dim>1</dim>
28441 <dim>1</dim>
28442 </port>
28443 </output>
28444 </layer>
28445 <layer id="1865" name="bottleneck4_7/dim_inc/bn/mean/Fused_Mul__copy137210373/quantized/to_f16" type="Convert" version="opset1">
28446 <data destination_type="f16"/>
28447 <input>
28448 <port id="0">
28449 <dim>256</dim>
28450 <dim>64</dim>
28451 <dim>1</dim>
28452 <dim>1</dim>
28453 </port>
28454 </input>
28455 <output>
28456 <port id="1" precision="FP16">
28457 <dim>256</dim>
28458 <dim>64</dim>
28459 <dim>1</dim>
28460 <dim>1</dim>
28461 </port>
28462 </output>
28463 </layer>
28464 <layer id="1866" name="bottleneck4_7/dim_inc/conv/fq_weights_1/zero_point1272520853" type="Const" version="opset1">
28465 <data element_type="f16" offset="458752" shape="256,1,1,1" size="512"/>
28466 <output>
28467 <port id="0" precision="FP16">
28468 <dim>256</dim>
28469 <dim>1</dim>
28470 <dim>1</dim>
28471 <dim>1</dim>
28472 </port>
28473 </output>
28474 </layer>
28475 <layer id="1867" name="bottleneck4_7/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
28476 <data auto_broadcast="numpy"/>
28477 <input>
28478 <port id="0">
28479 <dim>256</dim>
28480 <dim>64</dim>
28481 <dim>1</dim>
28482 <dim>1</dim>
28483 </port>
28484 <port id="1">
28485 <dim>256</dim>
28486 <dim>1</dim>
28487 <dim>1</dim>
28488 <dim>1</dim>
28489 </port>
28490 </input>
28491 <output>
28492 <port id="2" precision="FP16">
28493 <dim>256</dim>
28494 <dim>64</dim>
28495 <dim>1</dim>
28496 <dim>1</dim>
28497 </port>
28498 </output>
28499 </layer>
28500 <layer id="1868" name="bottleneck4_7/dim_inc/conv/fq_weights_1/scale1272022896" type="Const" version="opset1">
28501 <data element_type="f16" offset="459264" shape="256,1,1,1" size="512"/>
28502 <output>
28503 <port id="0" precision="FP16">
28504 <dim>256</dim>
28505 <dim>1</dim>
28506 <dim>1</dim>
28507 <dim>1</dim>
28508 </port>
28509 </output>
28510 </layer>
28511 <layer id="1869" name="bottleneck4_7/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
28512 <data auto_broadcast="numpy"/>
28513 <input>
28514 <port id="0">
28515 <dim>256</dim>
28516 <dim>64</dim>
28517 <dim>1</dim>
28518 <dim>1</dim>
28519 </port>
28520 <port id="1">
28521 <dim>256</dim>
28522 <dim>1</dim>
28523 <dim>1</dim>
28524 <dim>1</dim>
28525 </port>
28526 </input>
28527 <output>
28528 <port id="2" precision="FP16">
28529 <dim>256</dim>
28530 <dim>64</dim>
28531 <dim>1</dim>
28532 <dim>1</dim>
28533 </port>
28534 </output>
28535 </layer>
28536 <layer id="1870" name="bottleneck4_7/dim_inc/conv" type="Convolution" version="opset1">
28537 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
28538 <input>
28539 <port id="0">
28540 <dim>1</dim>
28541 <dim>64</dim>
28542 <dim>20</dim>
28543 <dim>34</dim>
28544 </port>
28545 <port id="1">
28546 <dim>256</dim>
28547 <dim>64</dim>
28548 <dim>1</dim>
28549 <dim>1</dim>
28550 </port>
28551 </input>
28552 <output>
28553 <port id="2" precision="FP16">
28554 <dim>1</dim>
28555 <dim>256</dim>
28556 <dim>20</dim>
28557 <dim>34</dim>
28558 </port>
28559 </output>
28560 </layer>
28561 <layer id="1871" name="data_add_2444124446137421471" type="Const" version="opset1">
28562 <data element_type="f16" offset="459776" shape="1,256,1,1" size="512"/>
28563 <output>
28564 <port id="0" precision="FP16">
28565 <dim>1</dim>
28566 <dim>256</dim>
28567 <dim>1</dim>
28568 <dim>1</dim>
28569 </port>
28570 </output>
28571 </layer>
28572 <layer id="1872" name="bottleneck4_7/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
28573 <data auto_broadcast="numpy"/>
28574 <input>
28575 <port id="0">
28576 <dim>1</dim>
28577 <dim>256</dim>
28578 <dim>20</dim>
28579 <dim>34</dim>
28580 </port>
28581 <port id="1">
28582 <dim>1</dim>
28583 <dim>256</dim>
28584 <dim>1</dim>
28585 <dim>1</dim>
28586 </port>
28587 </input>
28588 <output>
28589 <port id="2" names="bottleneck4_7/dim_inc/conv" precision="FP16">
28590 <dim>1</dim>
28591 <dim>256</dim>
28592 <dim>20</dim>
28593 <dim>34</dim>
28594 </port>
28595 </output>
28596 </layer>
28597 <layer id="1873" name="bottleneck4_7/add/fq_input_1" type="FakeQuantize" version="opset1">
28598 <data auto_broadcast="numpy" levels="256"/>
28599 <input>
28600 <port id="0">
28601 <dim>1</dim>
28602 <dim>256</dim>
28603 <dim>20</dim>
28604 <dim>34</dim>
28605 </port>
28606 <port id="1"/>
28607 <port id="2"/>
28608 <port id="3"/>
28609 <port id="4"/>
28610 </input>
28611 <output>
28612 <port id="5" precision="FP16">
28613 <dim>1</dim>
28614 <dim>256</dim>
28615 <dim>20</dim>
28616 <dim>34</dim>
28617 </port>
28618 </output>
28619 </layer>
28620 <layer id="1874" name="bottleneck4_7/add" type="Add" version="opset1">
28621 <data auto_broadcast="numpy"/>
28622 <input>
28623 <port id="0">
28624 <dim>1</dim>
28625 <dim>256</dim>
28626 <dim>20</dim>
28627 <dim>34</dim>
28628 </port>
28629 <port id="1">
28630 <dim>1</dim>
28631 <dim>256</dim>
28632 <dim>20</dim>
28633 <dim>34</dim>
28634 </port>
28635 </input>
28636 <output>
28637 <port id="2" names="bottleneck4_7/add" precision="FP16">
28638 <dim>1</dim>
28639 <dim>256</dim>
28640 <dim>20</dim>
28641 <dim>34</dim>
28642 </port>
28643 </output>
28644 </layer>
28645 <layer id="1875" name="bottleneck4_7/fn/weights31088405411377" type="Const" version="opset1">
28646 <data element_type="f32" offset="1576" shape="1" size="4"/>
28647 <output>
28648 <port id="0" precision="FP32">
28649 <dim>1</dim>
28650 </port>
28651 </output>
28652 </layer>
28653 <layer id="1876" name="bottleneck4_7/fn" type="PReLU" version="opset1">
28654 <input>
28655 <port id="0">
28656 <dim>1</dim>
28657 <dim>256</dim>
28658 <dim>20</dim>
28659 <dim>34</dim>
28660 </port>
28661 <port id="1">
28662 <dim>1</dim>
28663 </port>
28664 </input>
28665 <output>
28666 <port id="2" names="bottleneck4_7/add" precision="FP16">
28667 <dim>1</dim>
28668 <dim>256</dim>
28669 <dim>20</dim>
28670 <dim>34</dim>
28671 </port>
28672 </output>
28673 </layer>
28674 <layer id="1877" name="bottleneck4_8/add/fq_input_0" type="FakeQuantize" version="opset1">
28675 <data auto_broadcast="numpy" levels="256"/>
28676 <input>
28677 <port id="0">
28678 <dim>1</dim>
28679 <dim>256</dim>
28680 <dim>20</dim>
28681 <dim>34</dim>
28682 </port>
28683 <port id="1"/>
28684 <port id="2"/>
28685 <port id="3"/>
28686 <port id="4"/>
28687 </input>
28688 <output>
28689 <port id="5" precision="FP16">
28690 <dim>1</dim>
28691 <dim>256</dim>
28692 <dim>20</dim>
28693 <dim>34</dim>
28694 </port>
28695 </output>
28696 </layer>
28697 <layer id="1878" name="5094509822215" type="Const" version="opset1">
28698 <data element_type="f16" offset="460288" shape="" size="2"/>
28699 <output>
28700 <port id="0" precision="FP16"/>
28701 </output>
28702 </layer>
28703 <layer id="1879" name="5095509922593" type="Const" version="opset1">
28704 <data element_type="f16" offset="460290" shape="" size="2"/>
28705 <output>
28706 <port id="0" precision="FP16"/>
28707 </output>
28708 </layer>
28709 <layer id="1880" name="5096510021438" type="Const" version="opset1">
28710 <data element_type="f16" offset="460288" shape="" size="2"/>
28711 <output>
28712 <port id="0" precision="FP16"/>
28713 </output>
28714 </layer>
28715 <layer id="1881" name="5097510119770" type="Const" version="opset1">
28716 <data element_type="f16" offset="460290" shape="" size="2"/>
28717 <output>
28718 <port id="0" precision="FP16"/>
28719 </output>
28720 </layer>
28721 <layer id="1882" name="3624362821117" type="Const" version="opset1">
28722 <data element_type="f16" offset="460292" shape="" size="2"/>
28723 <output>
28724 <port id="0" precision="FP16"/>
28725 </output>
28726 </layer>
28727 <layer id="1883" name="3625362921543" type="Const" version="opset1">
28728 <data element_type="f16" offset="460294" shape="" size="2"/>
28729 <output>
28730 <port id="0" precision="FP16"/>
28731 </output>
28732 </layer>
28733 <layer id="1884" name="3626363022023" type="Const" version="opset1">
28734 <data element_type="f16" offset="460292" shape="" size="2"/>
28735 <output>
28736 <port id="0" precision="FP16"/>
28737 </output>
28738 </layer>
28739 <layer id="1885" name="3627363122647" type="Const" version="opset1">
28740 <data element_type="f16" offset="460294" shape="" size="2"/>
28741 <output>
28742 <port id="0" precision="FP16"/>
28743 </output>
28744 </layer>
28745 <layer id="1886" name="3444344820952" type="Const" version="opset1">
28746 <data element_type="f16" offset="460296" shape="1,64,1,1" size="128"/>
28747 <output>
28748 <port id="0" precision="FP16">
28749 <dim>1</dim>
28750 <dim>64</dim>
28751 <dim>1</dim>
28752 <dim>1</dim>
28753 </port>
28754 </output>
28755 </layer>
28756 <layer id="1887" name="3445344921621" type="Const" version="opset1">
28757 <data element_type="f16" offset="460424" shape="1,64,1,1" size="128"/>
28758 <output>
28759 <port id="0" precision="FP16">
28760 <dim>1</dim>
28761 <dim>64</dim>
28762 <dim>1</dim>
28763 <dim>1</dim>
28764 </port>
28765 </output>
28766 </layer>
28767 <layer id="1888" name="3446345022500" type="Const" version="opset1">
28768 <data element_type="f16" offset="460296" shape="1,64,1,1" size="128"/>
28769 <output>
28770 <port id="0" precision="FP16">
28771 <dim>1</dim>
28772 <dim>64</dim>
28773 <dim>1</dim>
28774 <dim>1</dim>
28775 </port>
28776 </output>
28777 </layer>
28778 <layer id="1889" name="3447345122788" type="Const" version="opset1">
28779 <data element_type="f16" offset="460424" shape="1,64,1,1" size="128"/>
28780 <output>
28781 <port id="0" precision="FP16">
28782 <dim>1</dim>
28783 <dim>64</dim>
28784 <dim>1</dim>
28785 <dim>1</dim>
28786 </port>
28787 </output>
28788 </layer>
28789 <layer id="1890" name="bottleneck4_8/dim_red/bn/mean/Fused_Mul__copy137910376/quantized1192020373" type="Const" version="opset1">
28790 <data element_type="i8" offset="460552" shape="64,256,1,1" size="16384"/>
28791 <output>
28792 <port id="0" precision="I8">
28793 <dim>64</dim>
28794 <dim>256</dim>
28795 <dim>1</dim>
28796 <dim>1</dim>
28797 </port>
28798 </output>
28799 </layer>
28800 <layer id="1891" name="bottleneck4_8/dim_red/bn/mean/Fused_Mul__copy137910376/quantized/to_f16" type="Convert" version="opset1">
28801 <data destination_type="f16"/>
28802 <input>
28803 <port id="0">
28804 <dim>64</dim>
28805 <dim>256</dim>
28806 <dim>1</dim>
28807 <dim>1</dim>
28808 </port>
28809 </input>
28810 <output>
28811 <port id="1" precision="FP16">
28812 <dim>64</dim>
28813 <dim>256</dim>
28814 <dim>1</dim>
28815 <dim>1</dim>
28816 </port>
28817 </output>
28818 </layer>
28819 <layer id="1892" name="bottleneck4_8/dim_red/conv/fq_weights_1/zero_point1193321264" type="Const" version="opset1">
28820 <data element_type="f16" offset="476936" shape="64,1,1,1" size="128"/>
28821 <output>
28822 <port id="0" precision="FP16">
28823 <dim>64</dim>
28824 <dim>1</dim>
28825 <dim>1</dim>
28826 <dim>1</dim>
28827 </port>
28828 </output>
28829 </layer>
28830 <layer id="1893" name="bottleneck4_8/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
28831 <data auto_broadcast="numpy"/>
28832 <input>
28833 <port id="0">
28834 <dim>64</dim>
28835 <dim>256</dim>
28836 <dim>1</dim>
28837 <dim>1</dim>
28838 </port>
28839 <port id="1">
28840 <dim>64</dim>
28841 <dim>1</dim>
28842 <dim>1</dim>
28843 <dim>1</dim>
28844 </port>
28845 </input>
28846 <output>
28847 <port id="2" precision="FP16">
28848 <dim>64</dim>
28849 <dim>256</dim>
28850 <dim>1</dim>
28851 <dim>1</dim>
28852 </port>
28853 </output>
28854 </layer>
28855 <layer id="1894" name="bottleneck4_8/dim_red/conv/fq_weights_1/scale1192820028" type="Const" version="opset1">
28856 <data element_type="f16" offset="477064" shape="64,1,1,1" size="128"/>
28857 <output>
28858 <port id="0" precision="FP16">
28859 <dim>64</dim>
28860 <dim>1</dim>
28861 <dim>1</dim>
28862 <dim>1</dim>
28863 </port>
28864 </output>
28865 </layer>
28866 <layer id="1895" name="bottleneck4_8/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
28867 <data auto_broadcast="numpy"/>
28868 <input>
28869 <port id="0">
28870 <dim>64</dim>
28871 <dim>256</dim>
28872 <dim>1</dim>
28873 <dim>1</dim>
28874 </port>
28875 <port id="1">
28876 <dim>64</dim>
28877 <dim>1</dim>
28878 <dim>1</dim>
28879 <dim>1</dim>
28880 </port>
28881 </input>
28882 <output>
28883 <port id="2" precision="FP16">
28884 <dim>64</dim>
28885 <dim>256</dim>
28886 <dim>1</dim>
28887 <dim>1</dim>
28888 </port>
28889 </output>
28890 </layer>
28891 <layer id="1896" name="bottleneck4_8/dim_red/conv" type="Convolution" version="opset1">
28892 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
28893 <input>
28894 <port id="0">
28895 <dim>1</dim>
28896 <dim>256</dim>
28897 <dim>20</dim>
28898 <dim>34</dim>
28899 </port>
28900 <port id="1">
28901 <dim>64</dim>
28902 <dim>256</dim>
28903 <dim>1</dim>
28904 <dim>1</dim>
28905 </port>
28906 </input>
28907 <output>
28908 <port id="2" precision="FP16">
28909 <dim>1</dim>
28910 <dim>64</dim>
28911 <dim>20</dim>
28912 <dim>34</dim>
28913 </port>
28914 </output>
28915 </layer>
28916 <layer id="1897" name="data_add_2444924454138121021" type="Const" version="opset1">
28917 <data element_type="f16" offset="477192" shape="1,64,1,1" size="128"/>
28918 <output>
28919 <port id="0" precision="FP16">
28920 <dim>1</dim>
28921 <dim>64</dim>
28922 <dim>1</dim>
28923 <dim>1</dim>
28924 </port>
28925 </output>
28926 </layer>
28927 <layer id="1898" name="bottleneck4_8/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
28928 <data auto_broadcast="numpy"/>
28929 <input>
28930 <port id="0">
28931 <dim>1</dim>
28932 <dim>64</dim>
28933 <dim>20</dim>
28934 <dim>34</dim>
28935 </port>
28936 <port id="1">
28937 <dim>1</dim>
28938 <dim>64</dim>
28939 <dim>1</dim>
28940 <dim>1</dim>
28941 </port>
28942 </input>
28943 <output>
28944 <port id="2" names="bottleneck4_8/dim_red/conv" precision="FP16">
28945 <dim>1</dim>
28946 <dim>64</dim>
28947 <dim>20</dim>
28948 <dim>34</dim>
28949 </port>
28950 </output>
28951 </layer>
28952 <layer id="1899" name="bottleneck4_8/dim_red/fn/weights31128404361383" type="Const" version="opset1">
28953 <data element_type="f32" offset="1576" shape="1" size="4"/>
28954 <output>
28955 <port id="0" precision="FP32">
28956 <dim>1</dim>
28957 </port>
28958 </output>
28959 </layer>
28960 <layer id="1900" name="bottleneck4_8/dim_red/fn" type="PReLU" version="opset1">
28961 <input>
28962 <port id="0">
28963 <dim>1</dim>
28964 <dim>64</dim>
28965 <dim>20</dim>
28966 <dim>34</dim>
28967 </port>
28968 <port id="1">
28969 <dim>1</dim>
28970 </port>
28971 </input>
28972 <output>
28973 <port id="2" names="bottleneck4_8/dim_red/conv" precision="FP16">
28974 <dim>1</dim>
28975 <dim>64</dim>
28976 <dim>20</dim>
28977 <dim>34</dim>
28978 </port>
28979 </output>
28980 </layer>
28981 <layer id="1901" name="bottleneck4_8/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
28982 <data auto_broadcast="numpy" levels="256"/>
28983 <input>
28984 <port id="0">
28985 <dim>1</dim>
28986 <dim>64</dim>
28987 <dim>20</dim>
28988 <dim>34</dim>
28989 </port>
28990 <port id="1">
28991 <dim>1</dim>
28992 <dim>64</dim>
28993 <dim>1</dim>
28994 <dim>1</dim>
28995 </port>
28996 <port id="2">
28997 <dim>1</dim>
28998 <dim>64</dim>
28999 <dim>1</dim>
29000 <dim>1</dim>
29001 </port>
29002 <port id="3">
29003 <dim>1</dim>
29004 <dim>64</dim>
29005 <dim>1</dim>
29006 <dim>1</dim>
29007 </port>
29008 <port id="4">
29009 <dim>1</dim>
29010 <dim>64</dim>
29011 <dim>1</dim>
29012 <dim>1</dim>
29013 </port>
29014 </input>
29015 <output>
29016 <port id="5" precision="FP16">
29017 <dim>1</dim>
29018 <dim>64</dim>
29019 <dim>20</dim>
29020 <dim>34</dim>
29021 </port>
29022 </output>
29023 </layer>
29024 <layer id="1902" name="16835/value1683721348" type="Const" version="opset1">
29025 <data element_type="i64" offset="189984" shape="5" size="40"/>
29026 <output>
29027 <port id="0" precision="I64">
29028 <dim>5</dim>
29029 </port>
29030 </output>
29031 </layer>
29032 <layer id="1903" name="bottleneck4_8/inner/dw1/bn/mean/Fused_Mul__copy138510379/quantized1199221804" type="Const" version="opset1">
29033 <data element_type="i8" offset="477320" shape="64,1,3,3" size="576"/>
29034 <output>
29035 <port id="0" precision="I8">
29036 <dim>64</dim>
29037 <dim>1</dim>
29038 <dim>3</dim>
29039 <dim>3</dim>
29040 </port>
29041 </output>
29042 </layer>
29043 <layer id="1904" name="bottleneck4_8/inner/dw1/bn/mean/Fused_Mul__copy138510379/quantized/to_f16" type="Convert" version="opset1">
29044 <data destination_type="f16"/>
29045 <input>
29046 <port id="0">
29047 <dim>64</dim>
29048 <dim>1</dim>
29049 <dim>3</dim>
29050 <dim>3</dim>
29051 </port>
29052 </input>
29053 <output>
29054 <port id="1" precision="FP16">
29055 <dim>64</dim>
29056 <dim>1</dim>
29057 <dim>3</dim>
29058 <dim>3</dim>
29059 </port>
29060 </output>
29061 </layer>
29062 <layer id="1905" name="bottleneck4_8/inner/dw1/conv/fq_weights_1/zero_point1200522617" type="Const" version="opset1">
29063 <data element_type="f16" offset="477896" shape="64,1,1,1" size="128"/>
29064 <output>
29065 <port id="0" precision="FP16">
29066 <dim>64</dim>
29067 <dim>1</dim>
29068 <dim>1</dim>
29069 <dim>1</dim>
29070 </port>
29071 </output>
29072 </layer>
29073 <layer id="1906" name="bottleneck4_8/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
29074 <data auto_broadcast="numpy"/>
29075 <input>
29076 <port id="0">
29077 <dim>64</dim>
29078 <dim>1</dim>
29079 <dim>3</dim>
29080 <dim>3</dim>
29081 </port>
29082 <port id="1">
29083 <dim>64</dim>
29084 <dim>1</dim>
29085 <dim>1</dim>
29086 <dim>1</dim>
29087 </port>
29088 </input>
29089 <output>
29090 <port id="2" precision="FP16">
29091 <dim>64</dim>
29092 <dim>1</dim>
29093 <dim>3</dim>
29094 <dim>3</dim>
29095 </port>
29096 </output>
29097 </layer>
29098 <layer id="1907" name="bottleneck4_8/inner/dw1/conv/fq_weights_1/scale1200022920" type="Const" version="opset1">
29099 <data element_type="f16" offset="478024" shape="64,1,1,1" size="128"/>
29100 <output>
29101 <port id="0" precision="FP16">
29102 <dim>64</dim>
29103 <dim>1</dim>
29104 <dim>1</dim>
29105 <dim>1</dim>
29106 </port>
29107 </output>
29108 </layer>
29109 <layer id="1908" name="bottleneck4_8/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
29110 <data auto_broadcast="numpy"/>
29111 <input>
29112 <port id="0">
29113 <dim>64</dim>
29114 <dim>1</dim>
29115 <dim>3</dim>
29116 <dim>3</dim>
29117 </port>
29118 <port id="1">
29119 <dim>64</dim>
29120 <dim>1</dim>
29121 <dim>1</dim>
29122 <dim>1</dim>
29123 </port>
29124 </input>
29125 <output>
29126 <port id="2" precision="FP16">
29127 <dim>64</dim>
29128 <dim>1</dim>
29129 <dim>3</dim>
29130 <dim>3</dim>
29131 </port>
29132 </output>
29133 </layer>
29134 <layer id="1909" name="16835" type="Reshape" version="opset1">
29135 <data special_zero="true"/>
29136 <input>
29137 <port id="0">
29138 <dim>64</dim>
29139 <dim>1</dim>
29140 <dim>3</dim>
29141 <dim>3</dim>
29142 </port>
29143 <port id="1">
29144 <dim>5</dim>
29145 </port>
29146 </input>
29147 <output>
29148 <port id="2" precision="FP16">
29149 <dim>64</dim>
29150 <dim>1</dim>
29151 <dim>1</dim>
29152 <dim>3</dim>
29153 <dim>3</dim>
29154 </port>
29155 </output>
29156 </layer>
29157 <layer id="1910" name="bottleneck4_8/inner/dw1/conv" type="GroupConvolution" version="opset1">
29158 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
29159 <input>
29160 <port id="0">
29161 <dim>1</dim>
29162 <dim>64</dim>
29163 <dim>20</dim>
29164 <dim>34</dim>
29165 </port>
29166 <port id="1">
29167 <dim>64</dim>
29168 <dim>1</dim>
29169 <dim>1</dim>
29170 <dim>3</dim>
29171 <dim>3</dim>
29172 </port>
29173 </input>
29174 <output>
29175 <port id="2" precision="FP16">
29176 <dim>1</dim>
29177 <dim>64</dim>
29178 <dim>20</dim>
29179 <dim>34</dim>
29180 </port>
29181 </output>
29182 </layer>
29183 <layer id="1911" name="data_add_2445724462138720841" type="Const" version="opset1">
29184 <data element_type="f16" offset="478152" shape="1,64,1,1" size="128"/>
29185 <output>
29186 <port id="0" precision="FP16">
29187 <dim>1</dim>
29188 <dim>64</dim>
29189 <dim>1</dim>
29190 <dim>1</dim>
29191 </port>
29192 </output>
29193 </layer>
29194 <layer id="1912" name="bottleneck4_8/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
29195 <data auto_broadcast="numpy"/>
29196 <input>
29197 <port id="0">
29198 <dim>1</dim>
29199 <dim>64</dim>
29200 <dim>20</dim>
29201 <dim>34</dim>
29202 </port>
29203 <port id="1">
29204 <dim>1</dim>
29205 <dim>64</dim>
29206 <dim>1</dim>
29207 <dim>1</dim>
29208 </port>
29209 </input>
29210 <output>
29211 <port id="2" names="bottleneck4_8/inner/dw1/conv" precision="FP16">
29212 <dim>1</dim>
29213 <dim>64</dim>
29214 <dim>20</dim>
29215 <dim>34</dim>
29216 </port>
29217 </output>
29218 </layer>
29219 <layer id="1913" name="bottleneck4_8/inner/dw1/fn/weights30888398541389" type="Const" version="opset1">
29220 <data element_type="f32" offset="1576" shape="1" size="4"/>
29221 <output>
29222 <port id="0" precision="FP32">
29223 <dim>1</dim>
29224 </port>
29225 </output>
29226 </layer>
29227 <layer id="1914" name="bottleneck4_8/inner/dw1/fn" type="PReLU" version="opset1">
29228 <input>
29229 <port id="0">
29230 <dim>1</dim>
29231 <dim>64</dim>
29232 <dim>20</dim>
29233 <dim>34</dim>
29234 </port>
29235 <port id="1">
29236 <dim>1</dim>
29237 </port>
29238 </input>
29239 <output>
29240 <port id="2" names="bottleneck4_8/inner/dw1/conv" precision="FP16">
29241 <dim>1</dim>
29242 <dim>64</dim>
29243 <dim>20</dim>
29244 <dim>34</dim>
29245 </port>
29246 </output>
29247 </layer>
29248 <layer id="1915" name="bottleneck4_8/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
29249 <data auto_broadcast="numpy" levels="256"/>
29250 <input>
29251 <port id="0">
29252 <dim>1</dim>
29253 <dim>64</dim>
29254 <dim>20</dim>
29255 <dim>34</dim>
29256 </port>
29257 <port id="1"/>
29258 <port id="2"/>
29259 <port id="3"/>
29260 <port id="4"/>
29261 </input>
29262 <output>
29263 <port id="5" precision="FP16">
29264 <dim>1</dim>
29265 <dim>64</dim>
29266 <dim>20</dim>
29267 <dim>34</dim>
29268 </port>
29269 </output>
29270 </layer>
29271 <layer id="1916" name="bottleneck4_8/dim_inc/bn/mean/Fused_Mul__copy139110382/quantized1223221762" type="Const" version="opset1">
29272 <data element_type="i8" offset="478280" shape="256,64,1,1" size="16384"/>
29273 <output>
29274 <port id="0" precision="I8">
29275 <dim>256</dim>
29276 <dim>64</dim>
29277 <dim>1</dim>
29278 <dim>1</dim>
29279 </port>
29280 </output>
29281 </layer>
29282 <layer id="1917" name="bottleneck4_8/dim_inc/bn/mean/Fused_Mul__copy139110382/quantized/to_f16" type="Convert" version="opset1">
29283 <data destination_type="f16"/>
29284 <input>
29285 <port id="0">
29286 <dim>256</dim>
29287 <dim>64</dim>
29288 <dim>1</dim>
29289 <dim>1</dim>
29290 </port>
29291 </input>
29292 <output>
29293 <port id="1" precision="FP16">
29294 <dim>256</dim>
29295 <dim>64</dim>
29296 <dim>1</dim>
29297 <dim>1</dim>
29298 </port>
29299 </output>
29300 </layer>
29301 <layer id="1918" name="bottleneck4_8/dim_inc/conv/fq_weights_1/zero_point1224522914" type="Const" version="opset1">
29302 <data element_type="f16" offset="494664" shape="256,1,1,1" size="512"/>
29303 <output>
29304 <port id="0" precision="FP16">
29305 <dim>256</dim>
29306 <dim>1</dim>
29307 <dim>1</dim>
29308 <dim>1</dim>
29309 </port>
29310 </output>
29311 </layer>
29312 <layer id="1919" name="bottleneck4_8/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
29313 <data auto_broadcast="numpy"/>
29314 <input>
29315 <port id="0">
29316 <dim>256</dim>
29317 <dim>64</dim>
29318 <dim>1</dim>
29319 <dim>1</dim>
29320 </port>
29321 <port id="1">
29322 <dim>256</dim>
29323 <dim>1</dim>
29324 <dim>1</dim>
29325 <dim>1</dim>
29326 </port>
29327 </input>
29328 <output>
29329 <port id="2" precision="FP16">
29330 <dim>256</dim>
29331 <dim>64</dim>
29332 <dim>1</dim>
29333 <dim>1</dim>
29334 </port>
29335 </output>
29336 </layer>
29337 <layer id="1920" name="bottleneck4_8/dim_inc/conv/fq_weights_1/scale1224020391" type="Const" version="opset1">
29338 <data element_type="f16" offset="495176" shape="256,1,1,1" size="512"/>
29339 <output>
29340 <port id="0" precision="FP16">
29341 <dim>256</dim>
29342 <dim>1</dim>
29343 <dim>1</dim>
29344 <dim>1</dim>
29345 </port>
29346 </output>
29347 </layer>
29348 <layer id="1921" name="bottleneck4_8/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
29349 <data auto_broadcast="numpy"/>
29350 <input>
29351 <port id="0">
29352 <dim>256</dim>
29353 <dim>64</dim>
29354 <dim>1</dim>
29355 <dim>1</dim>
29356 </port>
29357 <port id="1">
29358 <dim>256</dim>
29359 <dim>1</dim>
29360 <dim>1</dim>
29361 <dim>1</dim>
29362 </port>
29363 </input>
29364 <output>
29365 <port id="2" precision="FP16">
29366 <dim>256</dim>
29367 <dim>64</dim>
29368 <dim>1</dim>
29369 <dim>1</dim>
29370 </port>
29371 </output>
29372 </layer>
29373 <layer id="1922" name="bottleneck4_8/dim_inc/conv" type="Convolution" version="opset1">
29374 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
29375 <input>
29376 <port id="0">
29377 <dim>1</dim>
29378 <dim>64</dim>
29379 <dim>20</dim>
29380 <dim>34</dim>
29381 </port>
29382 <port id="1">
29383 <dim>256</dim>
29384 <dim>64</dim>
29385 <dim>1</dim>
29386 <dim>1</dim>
29387 </port>
29388 </input>
29389 <output>
29390 <port id="2" precision="FP16">
29391 <dim>1</dim>
29392 <dim>256</dim>
29393 <dim>20</dim>
29394 <dim>34</dim>
29395 </port>
29396 </output>
29397 </layer>
29398 <layer id="1923" name="data_add_2446524470139319638" type="Const" version="opset1">
29399 <data element_type="f16" offset="495688" shape="1,256,1,1" size="512"/>
29400 <output>
29401 <port id="0" precision="FP16">
29402 <dim>1</dim>
29403 <dim>256</dim>
29404 <dim>1</dim>
29405 <dim>1</dim>
29406 </port>
29407 </output>
29408 </layer>
29409 <layer id="1924" name="bottleneck4_8/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
29410 <data auto_broadcast="numpy"/>
29411 <input>
29412 <port id="0">
29413 <dim>1</dim>
29414 <dim>256</dim>
29415 <dim>20</dim>
29416 <dim>34</dim>
29417 </port>
29418 <port id="1">
29419 <dim>1</dim>
29420 <dim>256</dim>
29421 <dim>1</dim>
29422 <dim>1</dim>
29423 </port>
29424 </input>
29425 <output>
29426 <port id="2" names="bottleneck4_8/dim_inc/conv" precision="FP16">
29427 <dim>1</dim>
29428 <dim>256</dim>
29429 <dim>20</dim>
29430 <dim>34</dim>
29431 </port>
29432 </output>
29433 </layer>
29434 <layer id="1925" name="bottleneck4_8/add/fq_input_1" type="FakeQuantize" version="opset1">
29435 <data auto_broadcast="numpy" levels="256"/>
29436 <input>
29437 <port id="0">
29438 <dim>1</dim>
29439 <dim>256</dim>
29440 <dim>20</dim>
29441 <dim>34</dim>
29442 </port>
29443 <port id="1"/>
29444 <port id="2"/>
29445 <port id="3"/>
29446 <port id="4"/>
29447 </input>
29448 <output>
29449 <port id="5" precision="FP16">
29450 <dim>1</dim>
29451 <dim>256</dim>
29452 <dim>20</dim>
29453 <dim>34</dim>
29454 </port>
29455 </output>
29456 </layer>
29457 <layer id="1926" name="bottleneck4_8/add" type="Add" version="opset1">
29458 <data auto_broadcast="numpy"/>
29459 <input>
29460 <port id="0">
29461 <dim>1</dim>
29462 <dim>256</dim>
29463 <dim>20</dim>
29464 <dim>34</dim>
29465 </port>
29466 <port id="1">
29467 <dim>1</dim>
29468 <dim>256</dim>
29469 <dim>20</dim>
29470 <dim>34</dim>
29471 </port>
29472 </input>
29473 <output>
29474 <port id="2" names="bottleneck4_8/add" precision="FP16">
29475 <dim>1</dim>
29476 <dim>256</dim>
29477 <dim>20</dim>
29478 <dim>34</dim>
29479 </port>
29480 </output>
29481 </layer>
29482 <layer id="1927" name="bottleneck4_8/fn/weights30920401211396" type="Const" version="opset1">
29483 <data element_type="f32" offset="1576" shape="1" size="4"/>
29484 <output>
29485 <port id="0" precision="FP32">
29486 <dim>1</dim>
29487 </port>
29488 </output>
29489 </layer>
29490 <layer id="1928" name="bottleneck4_8/fn" type="PReLU" version="opset1">
29491 <input>
29492 <port id="0">
29493 <dim>1</dim>
29494 <dim>256</dim>
29495 <dim>20</dim>
29496 <dim>34</dim>
29497 </port>
29498 <port id="1">
29499 <dim>1</dim>
29500 </port>
29501 </input>
29502 <output>
29503 <port id="2" names="bottleneck4_8/add" precision="FP16">
29504 <dim>1</dim>
29505 <dim>256</dim>
29506 <dim>20</dim>
29507 <dim>34</dim>
29508 </port>
29509 </output>
29510 </layer>
29511 <layer id="1929" name="bottleneck4_9/add/fq_input_0" type="FakeQuantize" version="opset1">
29512 <data auto_broadcast="numpy" levels="256"/>
29513 <input>
29514 <port id="0">
29515 <dim>1</dim>
29516 <dim>256</dim>
29517 <dim>20</dim>
29518 <dim>34</dim>
29519 </port>
29520 <port id="1"/>
29521 <port id="2"/>
29522 <port id="3"/>
29523 <port id="4"/>
29524 </input>
29525 <output>
29526 <port id="5" precision="FP16">
29527 <dim>1</dim>
29528 <dim>256</dim>
29529 <dim>20</dim>
29530 <dim>34</dim>
29531 </port>
29532 </output>
29533 </layer>
29534 <layer id="1930" name="2974297822422" type="Const" version="opset1">
29535 <data element_type="f16" offset="496200" shape="" size="2"/>
29536 <output>
29537 <port id="0" precision="FP16"/>
29538 </output>
29539 </layer>
29540 <layer id="1931" name="2975297920355" type="Const" version="opset1">
29541 <data element_type="f16" offset="496202" shape="" size="2"/>
29542 <output>
29543 <port id="0" precision="FP16"/>
29544 </output>
29545 </layer>
29546 <layer id="1932" name="2976298020496" type="Const" version="opset1">
29547 <data element_type="f16" offset="496200" shape="" size="2"/>
29548 <output>
29549 <port id="0" precision="FP16"/>
29550 </output>
29551 </layer>
29552 <layer id="1933" name="2977298120955" type="Const" version="opset1">
29553 <data element_type="f16" offset="496202" shape="" size="2"/>
29554 <output>
29555 <port id="0" precision="FP16"/>
29556 </output>
29557 </layer>
29558 <layer id="1934" name="4904490822641" type="Const" version="opset1">
29559 <data element_type="f16" offset="496204" shape="" size="2"/>
29560 <output>
29561 <port id="0" precision="FP16"/>
29562 </output>
29563 </layer>
29564 <layer id="1935" name="4905490922407" type="Const" version="opset1">
29565 <data element_type="f16" offset="496206" shape="" size="2"/>
29566 <output>
29567 <port id="0" precision="FP16"/>
29568 </output>
29569 </layer>
29570 <layer id="1936" name="4906491019527" type="Const" version="opset1">
29571 <data element_type="f16" offset="496204" shape="" size="2"/>
29572 <output>
29573 <port id="0" precision="FP16"/>
29574 </output>
29575 </layer>
29576 <layer id="1937" name="4907491122473" type="Const" version="opset1">
29577 <data element_type="f16" offset="496206" shape="" size="2"/>
29578 <output>
29579 <port id="0" precision="FP16"/>
29580 </output>
29581 </layer>
29582 <layer id="1938" name="4984498821795" type="Const" version="opset1">
29583 <data element_type="f16" offset="496208" shape="1,64,1,1" size="128"/>
29584 <output>
29585 <port id="0" precision="FP16">
29586 <dim>1</dim>
29587 <dim>64</dim>
29588 <dim>1</dim>
29589 <dim>1</dim>
29590 </port>
29591 </output>
29592 </layer>
29593 <layer id="1939" name="4985498922278" type="Const" version="opset1">
29594 <data element_type="f16" offset="496336" shape="1,64,1,1" size="128"/>
29595 <output>
29596 <port id="0" precision="FP16">
29597 <dim>1</dim>
29598 <dim>64</dim>
29599 <dim>1</dim>
29600 <dim>1</dim>
29601 </port>
29602 </output>
29603 </layer>
29604 <layer id="1940" name="4986499021240" type="Const" version="opset1">
29605 <data element_type="f16" offset="496208" shape="1,64,1,1" size="128"/>
29606 <output>
29607 <port id="0" precision="FP16">
29608 <dim>1</dim>
29609 <dim>64</dim>
29610 <dim>1</dim>
29611 <dim>1</dim>
29612 </port>
29613 </output>
29614 </layer>
29615 <layer id="1941" name="4987499119680" type="Const" version="opset1">
29616 <data element_type="f16" offset="496336" shape="1,64,1,1" size="128"/>
29617 <output>
29618 <port id="0" precision="FP16">
29619 <dim>1</dim>
29620 <dim>64</dim>
29621 <dim>1</dim>
29622 <dim>1</dim>
29623 </port>
29624 </output>
29625 </layer>
29626 <layer id="1942" name="bottleneck4_9/dim_red/bn/mean/Fused_Mul__copy139810385/quantized1216020244" type="Const" version="opset1">
29627 <data element_type="i8" offset="496464" shape="64,256,1,1" size="16384"/>
29628 <output>
29629 <port id="0" precision="I8">
29630 <dim>64</dim>
29631 <dim>256</dim>
29632 <dim>1</dim>
29633 <dim>1</dim>
29634 </port>
29635 </output>
29636 </layer>
29637 <layer id="1943" name="bottleneck4_9/dim_red/bn/mean/Fused_Mul__copy139810385/quantized/to_f16" type="Convert" version="opset1">
29638 <data destination_type="f16"/>
29639 <input>
29640 <port id="0">
29641 <dim>64</dim>
29642 <dim>256</dim>
29643 <dim>1</dim>
29644 <dim>1</dim>
29645 </port>
29646 </input>
29647 <output>
29648 <port id="1" precision="FP16">
29649 <dim>64</dim>
29650 <dim>256</dim>
29651 <dim>1</dim>
29652 <dim>1</dim>
29653 </port>
29654 </output>
29655 </layer>
29656 <layer id="1944" name="bottleneck4_9/dim_red/conv/fq_weights_1/zero_point1217319644" type="Const" version="opset1">
29657 <data element_type="f16" offset="512848" shape="64,1,1,1" size="128"/>
29658 <output>
29659 <port id="0" precision="FP16">
29660 <dim>64</dim>
29661 <dim>1</dim>
29662 <dim>1</dim>
29663 <dim>1</dim>
29664 </port>
29665 </output>
29666 </layer>
29667 <layer id="1945" name="bottleneck4_9/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
29668 <data auto_broadcast="numpy"/>
29669 <input>
29670 <port id="0">
29671 <dim>64</dim>
29672 <dim>256</dim>
29673 <dim>1</dim>
29674 <dim>1</dim>
29675 </port>
29676 <port id="1">
29677 <dim>64</dim>
29678 <dim>1</dim>
29679 <dim>1</dim>
29680 <dim>1</dim>
29681 </port>
29682 </input>
29683 <output>
29684 <port id="2" precision="FP16">
29685 <dim>64</dim>
29686 <dim>256</dim>
29687 <dim>1</dim>
29688 <dim>1</dim>
29689 </port>
29690 </output>
29691 </layer>
29692 <layer id="1946" name="bottleneck4_9/dim_red/conv/fq_weights_1/scale1216822608" type="Const" version="opset1">
29693 <data element_type="f16" offset="512976" shape="64,1,1,1" size="128"/>
29694 <output>
29695 <port id="0" precision="FP16">
29696 <dim>64</dim>
29697 <dim>1</dim>
29698 <dim>1</dim>
29699 <dim>1</dim>
29700 </port>
29701 </output>
29702 </layer>
29703 <layer id="1947" name="bottleneck4_9/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
29704 <data auto_broadcast="numpy"/>
29705 <input>
29706 <port id="0">
29707 <dim>64</dim>
29708 <dim>256</dim>
29709 <dim>1</dim>
29710 <dim>1</dim>
29711 </port>
29712 <port id="1">
29713 <dim>64</dim>
29714 <dim>1</dim>
29715 <dim>1</dim>
29716 <dim>1</dim>
29717 </port>
29718 </input>
29719 <output>
29720 <port id="2" precision="FP16">
29721 <dim>64</dim>
29722 <dim>256</dim>
29723 <dim>1</dim>
29724 <dim>1</dim>
29725 </port>
29726 </output>
29727 </layer>
29728 <layer id="1948" name="bottleneck4_9/dim_red/conv" type="Convolution" version="opset1">
29729 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
29730 <input>
29731 <port id="0">
29732 <dim>1</dim>
29733 <dim>256</dim>
29734 <dim>20</dim>
29735 <dim>34</dim>
29736 </port>
29737 <port id="1">
29738 <dim>64</dim>
29739 <dim>256</dim>
29740 <dim>1</dim>
29741 <dim>1</dim>
29742 </port>
29743 </input>
29744 <output>
29745 <port id="2" precision="FP16">
29746 <dim>1</dim>
29747 <dim>64</dim>
29748 <dim>20</dim>
29749 <dim>34</dim>
29750 </port>
29751 </output>
29752 </layer>
29753 <layer id="1949" name="data_add_2447324478140021315" type="Const" version="opset1">
29754 <data element_type="f16" offset="513104" shape="1,64,1,1" size="128"/>
29755 <output>
29756 <port id="0" precision="FP16">
29757 <dim>1</dim>
29758 <dim>64</dim>
29759 <dim>1</dim>
29760 <dim>1</dim>
29761 </port>
29762 </output>
29763 </layer>
29764 <layer id="1950" name="bottleneck4_9/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
29765 <data auto_broadcast="numpy"/>
29766 <input>
29767 <port id="0">
29768 <dim>1</dim>
29769 <dim>64</dim>
29770 <dim>20</dim>
29771 <dim>34</dim>
29772 </port>
29773 <port id="1">
29774 <dim>1</dim>
29775 <dim>64</dim>
29776 <dim>1</dim>
29777 <dim>1</dim>
29778 </port>
29779 </input>
29780 <output>
29781 <port id="2" names="bottleneck4_9/dim_red/conv" precision="FP16">
29782 <dim>1</dim>
29783 <dim>64</dim>
29784 <dim>20</dim>
29785 <dim>34</dim>
29786 </port>
29787 </output>
29788 </layer>
29789 <layer id="1951" name="bottleneck4_9/dim_red/fn/weights30936397281402" type="Const" version="opset1">
29790 <data element_type="f32" offset="1576" shape="1" size="4"/>
29791 <output>
29792 <port id="0" precision="FP32">
29793 <dim>1</dim>
29794 </port>
29795 </output>
29796 </layer>
29797 <layer id="1952" name="bottleneck4_9/dim_red/fn" type="PReLU" version="opset1">
29798 <input>
29799 <port id="0">
29800 <dim>1</dim>
29801 <dim>64</dim>
29802 <dim>20</dim>
29803 <dim>34</dim>
29804 </port>
29805 <port id="1">
29806 <dim>1</dim>
29807 </port>
29808 </input>
29809 <output>
29810 <port id="2" names="bottleneck4_9/dim_red/conv" precision="FP16">
29811 <dim>1</dim>
29812 <dim>64</dim>
29813 <dim>20</dim>
29814 <dim>34</dim>
29815 </port>
29816 </output>
29817 </layer>
29818 <layer id="1953" name="bottleneck4_9/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
29819 <data auto_broadcast="numpy" levels="256"/>
29820 <input>
29821 <port id="0">
29822 <dim>1</dim>
29823 <dim>64</dim>
29824 <dim>20</dim>
29825 <dim>34</dim>
29826 </port>
29827 <port id="1">
29828 <dim>1</dim>
29829 <dim>64</dim>
29830 <dim>1</dim>
29831 <dim>1</dim>
29832 </port>
29833 <port id="2">
29834 <dim>1</dim>
29835 <dim>64</dim>
29836 <dim>1</dim>
29837 <dim>1</dim>
29838 </port>
29839 <port id="3">
29840 <dim>1</dim>
29841 <dim>64</dim>
29842 <dim>1</dim>
29843 <dim>1</dim>
29844 </port>
29845 <port id="4">
29846 <dim>1</dim>
29847 <dim>64</dim>
29848 <dim>1</dim>
29849 <dim>1</dim>
29850 </port>
29851 </input>
29852 <output>
29853 <port id="5" precision="FP16">
29854 <dim>1</dim>
29855 <dim>64</dim>
29856 <dim>20</dim>
29857 <dim>34</dim>
29858 </port>
29859 </output>
29860 </layer>
29861 <layer id="1954" name="16915/value1691719677" type="Const" version="opset1">
29862 <data element_type="i64" offset="189984" shape="5" size="40"/>
29863 <output>
29864 <port id="0" precision="I64">
29865 <dim>5</dim>
29866 </port>
29867 </output>
29868 </layer>
29869 <layer id="1955" name="bottleneck4_9/inner/dw1/bn/mean/Fused_Mul__copy140410388/quantized1228020043" type="Const" version="opset1">
29870 <data element_type="i8" offset="513232" shape="64,1,3,3" size="576"/>
29871 <output>
29872 <port id="0" precision="I8">
29873 <dim>64</dim>
29874 <dim>1</dim>
29875 <dim>3</dim>
29876 <dim>3</dim>
29877 </port>
29878 </output>
29879 </layer>
29880 <layer id="1956" name="bottleneck4_9/inner/dw1/bn/mean/Fused_Mul__copy140410388/quantized/to_f16" type="Convert" version="opset1">
29881 <data destination_type="f16"/>
29882 <input>
29883 <port id="0">
29884 <dim>64</dim>
29885 <dim>1</dim>
29886 <dim>3</dim>
29887 <dim>3</dim>
29888 </port>
29889 </input>
29890 <output>
29891 <port id="1" precision="FP16">
29892 <dim>64</dim>
29893 <dim>1</dim>
29894 <dim>3</dim>
29895 <dim>3</dim>
29896 </port>
29897 </output>
29898 </layer>
29899 <layer id="1957" name="bottleneck4_9/inner/dw1/conv/fq_weights_1/zero_point1229319707" type="Const" version="opset1">
29900 <data element_type="f16" offset="513808" shape="64,1,1,1" size="128"/>
29901 <output>
29902 <port id="0" precision="FP16">
29903 <dim>64</dim>
29904 <dim>1</dim>
29905 <dim>1</dim>
29906 <dim>1</dim>
29907 </port>
29908 </output>
29909 </layer>
29910 <layer id="1958" name="bottleneck4_9/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
29911 <data auto_broadcast="numpy"/>
29912 <input>
29913 <port id="0">
29914 <dim>64</dim>
29915 <dim>1</dim>
29916 <dim>3</dim>
29917 <dim>3</dim>
29918 </port>
29919 <port id="1">
29920 <dim>64</dim>
29921 <dim>1</dim>
29922 <dim>1</dim>
29923 <dim>1</dim>
29924 </port>
29925 </input>
29926 <output>
29927 <port id="2" precision="FP16">
29928 <dim>64</dim>
29929 <dim>1</dim>
29930 <dim>3</dim>
29931 <dim>3</dim>
29932 </port>
29933 </output>
29934 </layer>
29935 <layer id="1959" name="bottleneck4_9/inner/dw1/conv/fq_weights_1/scale1228820058" type="Const" version="opset1">
29936 <data element_type="f16" offset="513936" shape="64,1,1,1" size="128"/>
29937 <output>
29938 <port id="0" precision="FP16">
29939 <dim>64</dim>
29940 <dim>1</dim>
29941 <dim>1</dim>
29942 <dim>1</dim>
29943 </port>
29944 </output>
29945 </layer>
29946 <layer id="1960" name="bottleneck4_9/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
29947 <data auto_broadcast="numpy"/>
29948 <input>
29949 <port id="0">
29950 <dim>64</dim>
29951 <dim>1</dim>
29952 <dim>3</dim>
29953 <dim>3</dim>
29954 </port>
29955 <port id="1">
29956 <dim>64</dim>
29957 <dim>1</dim>
29958 <dim>1</dim>
29959 <dim>1</dim>
29960 </port>
29961 </input>
29962 <output>
29963 <port id="2" precision="FP16">
29964 <dim>64</dim>
29965 <dim>1</dim>
29966 <dim>3</dim>
29967 <dim>3</dim>
29968 </port>
29969 </output>
29970 </layer>
29971 <layer id="1961" name="16915" type="Reshape" version="opset1">
29972 <data special_zero="true"/>
29973 <input>
29974 <port id="0">
29975 <dim>64</dim>
29976 <dim>1</dim>
29977 <dim>3</dim>
29978 <dim>3</dim>
29979 </port>
29980 <port id="1">
29981 <dim>5</dim>
29982 </port>
29983 </input>
29984 <output>
29985 <port id="2" precision="FP16">
29986 <dim>64</dim>
29987 <dim>1</dim>
29988 <dim>1</dim>
29989 <dim>3</dim>
29990 <dim>3</dim>
29991 </port>
29992 </output>
29993 </layer>
29994 <layer id="1962" name="bottleneck4_9/inner/dw1/conv" type="GroupConvolution" version="opset1">
29995 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
29996 <input>
29997 <port id="0">
29998 <dim>1</dim>
29999 <dim>64</dim>
30000 <dim>20</dim>
30001 <dim>34</dim>
30002 </port>
30003 <port id="1">
30004 <dim>64</dim>
30005 <dim>1</dim>
30006 <dim>1</dim>
30007 <dim>3</dim>
30008 <dim>3</dim>
30009 </port>
30010 </input>
30011 <output>
30012 <port id="2" precision="FP16">
30013 <dim>1</dim>
30014 <dim>64</dim>
30015 <dim>20</dim>
30016 <dim>34</dim>
30017 </port>
30018 </output>
30019 </layer>
30020 <layer id="1963" name="data_add_2448124486140620325" type="Const" version="opset1">
30021 <data element_type="f16" offset="514064" shape="1,64,1,1" size="128"/>
30022 <output>
30023 <port id="0" precision="FP16">
30024 <dim>1</dim>
30025 <dim>64</dim>
30026 <dim>1</dim>
30027 <dim>1</dim>
30028 </port>
30029 </output>
30030 </layer>
30031 <layer id="1964" name="bottleneck4_9/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
30032 <data auto_broadcast="numpy"/>
30033 <input>
30034 <port id="0">
30035 <dim>1</dim>
30036 <dim>64</dim>
30037 <dim>20</dim>
30038 <dim>34</dim>
30039 </port>
30040 <port id="1">
30041 <dim>1</dim>
30042 <dim>64</dim>
30043 <dim>1</dim>
30044 <dim>1</dim>
30045 </port>
30046 </input>
30047 <output>
30048 <port id="2" names="bottleneck4_9/inner/dw1/conv" precision="FP16">
30049 <dim>1</dim>
30050 <dim>64</dim>
30051 <dim>20</dim>
30052 <dim>34</dim>
30053 </port>
30054 </output>
30055 </layer>
30056 <layer id="1965" name="bottleneck4_9/inner/dw1/fn/weights31000398631408" type="Const" version="opset1">
30057 <data element_type="f32" offset="1576" shape="1" size="4"/>
30058 <output>
30059 <port id="0" precision="FP32">
30060 <dim>1</dim>
30061 </port>
30062 </output>
30063 </layer>
30064 <layer id="1966" name="bottleneck4_9/inner/dw1/fn" type="PReLU" version="opset1">
30065 <input>
30066 <port id="0">
30067 <dim>1</dim>
30068 <dim>64</dim>
30069 <dim>20</dim>
30070 <dim>34</dim>
30071 </port>
30072 <port id="1">
30073 <dim>1</dim>
30074 </port>
30075 </input>
30076 <output>
30077 <port id="2" names="bottleneck4_9/inner/dw1/conv" precision="FP16">
30078 <dim>1</dim>
30079 <dim>64</dim>
30080 <dim>20</dim>
30081 <dim>34</dim>
30082 </port>
30083 </output>
30084 </layer>
30085 <layer id="1967" name="bottleneck4_9/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
30086 <data auto_broadcast="numpy" levels="256"/>
30087 <input>
30088 <port id="0">
30089 <dim>1</dim>
30090 <dim>64</dim>
30091 <dim>20</dim>
30092 <dim>34</dim>
30093 </port>
30094 <port id="1"/>
30095 <port id="2"/>
30096 <port id="3"/>
30097 <port id="4"/>
30098 </input>
30099 <output>
30100 <port id="5" precision="FP16">
30101 <dim>1</dim>
30102 <dim>64</dim>
30103 <dim>20</dim>
30104 <dim>34</dim>
30105 </port>
30106 </output>
30107 </layer>
30108 <layer id="1968" name="bottleneck4_9/dim_inc/bn/mean/Fused_Mul__copy141010391/quantized1304820226" type="Const" version="opset1">
30109 <data element_type="i8" offset="514192" shape="256,64,1,1" size="16384"/>
30110 <output>
30111 <port id="0" precision="I8">
30112 <dim>256</dim>
30113 <dim>64</dim>
30114 <dim>1</dim>
30115 <dim>1</dim>
30116 </port>
30117 </output>
30118 </layer>
30119 <layer id="1969" name="bottleneck4_9/dim_inc/bn/mean/Fused_Mul__copy141010391/quantized/to_f16" type="Convert" version="opset1">
30120 <data destination_type="f16"/>
30121 <input>
30122 <port id="0">
30123 <dim>256</dim>
30124 <dim>64</dim>
30125 <dim>1</dim>
30126 <dim>1</dim>
30127 </port>
30128 </input>
30129 <output>
30130 <port id="1" precision="FP16">
30131 <dim>256</dim>
30132 <dim>64</dim>
30133 <dim>1</dim>
30134 <dim>1</dim>
30135 </port>
30136 </output>
30137 </layer>
30138 <layer id="1970" name="bottleneck4_9/dim_inc/conv/fq_weights_1/zero_point1306120388" type="Const" version="opset1">
30139 <data element_type="f16" offset="530576" shape="256,1,1,1" size="512"/>
30140 <output>
30141 <port id="0" precision="FP16">
30142 <dim>256</dim>
30143 <dim>1</dim>
30144 <dim>1</dim>
30145 <dim>1</dim>
30146 </port>
30147 </output>
30148 </layer>
30149 <layer id="1971" name="bottleneck4_9/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
30150 <data auto_broadcast="numpy"/>
30151 <input>
30152 <port id="0">
30153 <dim>256</dim>
30154 <dim>64</dim>
30155 <dim>1</dim>
30156 <dim>1</dim>
30157 </port>
30158 <port id="1">
30159 <dim>256</dim>
30160 <dim>1</dim>
30161 <dim>1</dim>
30162 <dim>1</dim>
30163 </port>
30164 </input>
30165 <output>
30166 <port id="2" precision="FP16">
30167 <dim>256</dim>
30168 <dim>64</dim>
30169 <dim>1</dim>
30170 <dim>1</dim>
30171 </port>
30172 </output>
30173 </layer>
30174 <layer id="1972" name="bottleneck4_9/dim_inc/conv/fq_weights_1/scale1305622743" type="Const" version="opset1">
30175 <data element_type="f16" offset="531088" shape="256,1,1,1" size="512"/>
30176 <output>
30177 <port id="0" precision="FP16">
30178 <dim>256</dim>
30179 <dim>1</dim>
30180 <dim>1</dim>
30181 <dim>1</dim>
30182 </port>
30183 </output>
30184 </layer>
30185 <layer id="1973" name="bottleneck4_9/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
30186 <data auto_broadcast="numpy"/>
30187 <input>
30188 <port id="0">
30189 <dim>256</dim>
30190 <dim>64</dim>
30191 <dim>1</dim>
30192 <dim>1</dim>
30193 </port>
30194 <port id="1">
30195 <dim>256</dim>
30196 <dim>1</dim>
30197 <dim>1</dim>
30198 <dim>1</dim>
30199 </port>
30200 </input>
30201 <output>
30202 <port id="2" precision="FP16">
30203 <dim>256</dim>
30204 <dim>64</dim>
30205 <dim>1</dim>
30206 <dim>1</dim>
30207 </port>
30208 </output>
30209 </layer>
30210 <layer id="1974" name="bottleneck4_9/dim_inc/conv" type="Convolution" version="opset1">
30211 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
30212 <input>
30213 <port id="0">
30214 <dim>1</dim>
30215 <dim>64</dim>
30216 <dim>20</dim>
30217 <dim>34</dim>
30218 </port>
30219 <port id="1">
30220 <dim>256</dim>
30221 <dim>64</dim>
30222 <dim>1</dim>
30223 <dim>1</dim>
30224 </port>
30225 </input>
30226 <output>
30227 <port id="2" precision="FP16">
30228 <dim>1</dim>
30229 <dim>256</dim>
30230 <dim>20</dim>
30231 <dim>34</dim>
30232 </port>
30233 </output>
30234 </layer>
30235 <layer id="1975" name="data_add_2448924494141220499" type="Const" version="opset1">
30236 <data element_type="f16" offset="531600" shape="1,256,1,1" size="512"/>
30237 <output>
30238 <port id="0" precision="FP16">
30239 <dim>1</dim>
30240 <dim>256</dim>
30241 <dim>1</dim>
30242 <dim>1</dim>
30243 </port>
30244 </output>
30245 </layer>
30246 <layer id="1976" name="bottleneck4_9/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
30247 <data auto_broadcast="numpy"/>
30248 <input>
30249 <port id="0">
30250 <dim>1</dim>
30251 <dim>256</dim>
30252 <dim>20</dim>
30253 <dim>34</dim>
30254 </port>
30255 <port id="1">
30256 <dim>1</dim>
30257 <dim>256</dim>
30258 <dim>1</dim>
30259 <dim>1</dim>
30260 </port>
30261 </input>
30262 <output>
30263 <port id="2" names="bottleneck4_9/dim_inc/conv" precision="FP16">
30264 <dim>1</dim>
30265 <dim>256</dim>
30266 <dim>20</dim>
30267 <dim>34</dim>
30268 </port>
30269 </output>
30270 </layer>
30271 <layer id="1977" name="bottleneck4_9/add/fq_input_1" type="FakeQuantize" version="opset1">
30272 <data auto_broadcast="numpy" levels="256"/>
30273 <input>
30274 <port id="0">
30275 <dim>1</dim>
30276 <dim>256</dim>
30277 <dim>20</dim>
30278 <dim>34</dim>
30279 </port>
30280 <port id="1"/>
30281 <port id="2"/>
30282 <port id="3"/>
30283 <port id="4"/>
30284 </input>
30285 <output>
30286 <port id="5" precision="FP16">
30287 <dim>1</dim>
30288 <dim>256</dim>
30289 <dim>20</dim>
30290 <dim>34</dim>
30291 </port>
30292 </output>
30293 </layer>
30294 <layer id="1978" name="bottleneck4_9/add" type="Add" version="opset1">
30295 <data auto_broadcast="numpy"/>
30296 <input>
30297 <port id="0">
30298 <dim>1</dim>
30299 <dim>256</dim>
30300 <dim>20</dim>
30301 <dim>34</dim>
30302 </port>
30303 <port id="1">
30304 <dim>1</dim>
30305 <dim>256</dim>
30306 <dim>20</dim>
30307 <dim>34</dim>
30308 </port>
30309 </input>
30310 <output>
30311 <port id="2" names="bottleneck4_9/add" precision="FP16">
30312 <dim>1</dim>
30313 <dim>256</dim>
30314 <dim>20</dim>
30315 <dim>34</dim>
30316 </port>
30317 </output>
30318 </layer>
30319 <layer id="1979" name="bottleneck4_9/fn/weights30924401331415" type="Const" version="opset1">
30320 <data element_type="f32" offset="1576" shape="1" size="4"/>
30321 <output>
30322 <port id="0" precision="FP32">
30323 <dim>1</dim>
30324 </port>
30325 </output>
30326 </layer>
30327 <layer id="1980" name="bottleneck4_9/fn" type="PReLU" version="opset1">
30328 <input>
30329 <port id="0">
30330 <dim>1</dim>
30331 <dim>256</dim>
30332 <dim>20</dim>
30333 <dim>34</dim>
30334 </port>
30335 <port id="1">
30336 <dim>1</dim>
30337 </port>
30338 </input>
30339 <output>
30340 <port id="2" names="bottleneck4_9/add" precision="FP16">
30341 <dim>1</dim>
30342 <dim>256</dim>
30343 <dim>20</dim>
30344 <dim>34</dim>
30345 </port>
30346 </output>
30347 </layer>
30348 <layer id="1981" name="bottleneck4_10/add/fq_input_0" type="FakeQuantize" version="opset1">
30349 <data auto_broadcast="numpy" levels="256"/>
30350 <input>
30351 <port id="0">
30352 <dim>1</dim>
30353 <dim>256</dim>
30354 <dim>20</dim>
30355 <dim>34</dim>
30356 </port>
30357 <port id="1"/>
30358 <port id="2"/>
30359 <port id="3"/>
30360 <port id="4"/>
30361 </input>
30362 <output>
30363 <port id="5" precision="FP16">
30364 <dim>1</dim>
30365 <dim>256</dim>
30366 <dim>20</dim>
30367 <dim>34</dim>
30368 </port>
30369 </output>
30370 </layer>
30371 <layer id="1982" name="5454545819371" type="Const" version="opset1">
30372 <data element_type="f16" offset="532112" shape="" size="2"/>
30373 <output>
30374 <port id="0" precision="FP16"/>
30375 </output>
30376 </layer>
30377 <layer id="1983" name="5455545922059" type="Const" version="opset1">
30378 <data element_type="f16" offset="532114" shape="" size="2"/>
30379 <output>
30380 <port id="0" precision="FP16"/>
30381 </output>
30382 </layer>
30383 <layer id="1984" name="5456546021150" type="Const" version="opset1">
30384 <data element_type="f16" offset="532112" shape="" size="2"/>
30385 <output>
30386 <port id="0" precision="FP16"/>
30387 </output>
30388 </layer>
30389 <layer id="1985" name="5457546121081" type="Const" version="opset1">
30390 <data element_type="f16" offset="532114" shape="" size="2"/>
30391 <output>
30392 <port id="0" precision="FP16"/>
30393 </output>
30394 </layer>
30395 <layer id="1986" name="5124512821459" type="Const" version="opset1">
30396 <data element_type="f16" offset="532116" shape="" size="2"/>
30397 <output>
30398 <port id="0" precision="FP16"/>
30399 </output>
30400 </layer>
30401 <layer id="1987" name="5125512921867" type="Const" version="opset1">
30402 <data element_type="f16" offset="532118" shape="" size="2"/>
30403 <output>
30404 <port id="0" precision="FP16"/>
30405 </output>
30406 </layer>
30407 <layer id="1988" name="5126513020775" type="Const" version="opset1">
30408 <data element_type="f16" offset="532116" shape="" size="2"/>
30409 <output>
30410 <port id="0" precision="FP16"/>
30411 </output>
30412 </layer>
30413 <layer id="1989" name="5127513120931" type="Const" version="opset1">
30414 <data element_type="f16" offset="532118" shape="" size="2"/>
30415 <output>
30416 <port id="0" precision="FP16"/>
30417 </output>
30418 </layer>
30419 <layer id="1990" name="3484348822092" type="Const" version="opset1">
30420 <data element_type="f16" offset="532120" shape="1,64,1,1" size="128"/>
30421 <output>
30422 <port id="0" precision="FP16">
30423 <dim>1</dim>
30424 <dim>64</dim>
30425 <dim>1</dim>
30426 <dim>1</dim>
30427 </port>
30428 </output>
30429 </layer>
30430 <layer id="1991" name="3485348921060" type="Const" version="opset1">
30431 <data element_type="f16" offset="532248" shape="1,64,1,1" size="128"/>
30432 <output>
30433 <port id="0" precision="FP16">
30434 <dim>1</dim>
30435 <dim>64</dim>
30436 <dim>1</dim>
30437 <dim>1</dim>
30438 </port>
30439 </output>
30440 </layer>
30441 <layer id="1992" name="3486349022581" type="Const" version="opset1">
30442 <data element_type="f16" offset="532120" shape="1,64,1,1" size="128"/>
30443 <output>
30444 <port id="0" precision="FP16">
30445 <dim>1</dim>
30446 <dim>64</dim>
30447 <dim>1</dim>
30448 <dim>1</dim>
30449 </port>
30450 </output>
30451 </layer>
30452 <layer id="1993" name="3487349122227" type="Const" version="opset1">
30453 <data element_type="f16" offset="532248" shape="1,64,1,1" size="128"/>
30454 <output>
30455 <port id="0" precision="FP16">
30456 <dim>1</dim>
30457 <dim>64</dim>
30458 <dim>1</dim>
30459 <dim>1</dim>
30460 </port>
30461 </output>
30462 </layer>
30463 <layer id="1994" name="bottleneck4_10/dim_red/bn/mean/Fused_Mul__copy141710394/quantized1187219908" type="Const" version="opset1">
30464 <data element_type="i8" offset="532376" shape="64,256,1,1" size="16384"/>
30465 <output>
30466 <port id="0" precision="I8">
30467 <dim>64</dim>
30468 <dim>256</dim>
30469 <dim>1</dim>
30470 <dim>1</dim>
30471 </port>
30472 </output>
30473 </layer>
30474 <layer id="1995" name="bottleneck4_10/dim_red/bn/mean/Fused_Mul__copy141710394/quantized/to_f16" type="Convert" version="opset1">
30475 <data destination_type="f16"/>
30476 <input>
30477 <port id="0">
30478 <dim>64</dim>
30479 <dim>256</dim>
30480 <dim>1</dim>
30481 <dim>1</dim>
30482 </port>
30483 </input>
30484 <output>
30485 <port id="1" precision="FP16">
30486 <dim>64</dim>
30487 <dim>256</dim>
30488 <dim>1</dim>
30489 <dim>1</dim>
30490 </port>
30491 </output>
30492 </layer>
30493 <layer id="1996" name="bottleneck4_10/dim_red/conv/fq_weights_1/zero_point1188522767" type="Const" version="opset1">
30494 <data element_type="f16" offset="548760" shape="64,1,1,1" size="128"/>
30495 <output>
30496 <port id="0" precision="FP16">
30497 <dim>64</dim>
30498 <dim>1</dim>
30499 <dim>1</dim>
30500 <dim>1</dim>
30501 </port>
30502 </output>
30503 </layer>
30504 <layer id="1997" name="bottleneck4_10/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
30505 <data auto_broadcast="numpy"/>
30506 <input>
30507 <port id="0">
30508 <dim>64</dim>
30509 <dim>256</dim>
30510 <dim>1</dim>
30511 <dim>1</dim>
30512 </port>
30513 <port id="1">
30514 <dim>64</dim>
30515 <dim>1</dim>
30516 <dim>1</dim>
30517 <dim>1</dim>
30518 </port>
30519 </input>
30520 <output>
30521 <port id="2" precision="FP16">
30522 <dim>64</dim>
30523 <dim>256</dim>
30524 <dim>1</dim>
30525 <dim>1</dim>
30526 </port>
30527 </output>
30528 </layer>
30529 <layer id="1998" name="bottleneck4_10/dim_red/conv/fq_weights_1/scale1188021270" type="Const" version="opset1">
30530 <data element_type="f16" offset="548888" shape="64,1,1,1" size="128"/>
30531 <output>
30532 <port id="0" precision="FP16">
30533 <dim>64</dim>
30534 <dim>1</dim>
30535 <dim>1</dim>
30536 <dim>1</dim>
30537 </port>
30538 </output>
30539 </layer>
30540 <layer id="1999" name="bottleneck4_10/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
30541 <data auto_broadcast="numpy"/>
30542 <input>
30543 <port id="0">
30544 <dim>64</dim>
30545 <dim>256</dim>
30546 <dim>1</dim>
30547 <dim>1</dim>
30548 </port>
30549 <port id="1">
30550 <dim>64</dim>
30551 <dim>1</dim>
30552 <dim>1</dim>
30553 <dim>1</dim>
30554 </port>
30555 </input>
30556 <output>
30557 <port id="2" precision="FP16">
30558 <dim>64</dim>
30559 <dim>256</dim>
30560 <dim>1</dim>
30561 <dim>1</dim>
30562 </port>
30563 </output>
30564 </layer>
30565 <layer id="2000" name="bottleneck4_10/dim_red/conv" type="Convolution" version="opset1">
30566 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
30567 <input>
30568 <port id="0">
30569 <dim>1</dim>
30570 <dim>256</dim>
30571 <dim>20</dim>
30572 <dim>34</dim>
30573 </port>
30574 <port id="1">
30575 <dim>64</dim>
30576 <dim>256</dim>
30577 <dim>1</dim>
30578 <dim>1</dim>
30579 </port>
30580 </input>
30581 <output>
30582 <port id="2" precision="FP16">
30583 <dim>1</dim>
30584 <dim>64</dim>
30585 <dim>20</dim>
30586 <dim>34</dim>
30587 </port>
30588 </output>
30589 </layer>
30590 <layer id="2001" name="data_add_2449724502141920214" type="Const" version="opset1">
30591 <data element_type="f16" offset="549016" shape="1,64,1,1" size="128"/>
30592 <output>
30593 <port id="0" precision="FP16">
30594 <dim>1</dim>
30595 <dim>64</dim>
30596 <dim>1</dim>
30597 <dim>1</dim>
30598 </port>
30599 </output>
30600 </layer>
30601 <layer id="2002" name="bottleneck4_10/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
30602 <data auto_broadcast="numpy"/>
30603 <input>
30604 <port id="0">
30605 <dim>1</dim>
30606 <dim>64</dim>
30607 <dim>20</dim>
30608 <dim>34</dim>
30609 </port>
30610 <port id="1">
30611 <dim>1</dim>
30612 <dim>64</dim>
30613 <dim>1</dim>
30614 <dim>1</dim>
30615 </port>
30616 </input>
30617 <output>
30618 <port id="2" names="bottleneck4_10/dim_red/conv" precision="FP16">
30619 <dim>1</dim>
30620 <dim>64</dim>
30621 <dim>20</dim>
30622 <dim>34</dim>
30623 </port>
30624 </output>
30625 </layer>
30626 <layer id="2003" name="bottleneck4_10/dim_red/fn/weights31168400491421" type="Const" version="opset1">
30627 <data element_type="f32" offset="1576" shape="1" size="4"/>
30628 <output>
30629 <port id="0" precision="FP32">
30630 <dim>1</dim>
30631 </port>
30632 </output>
30633 </layer>
30634 <layer id="2004" name="bottleneck4_10/dim_red/fn" type="PReLU" version="opset1">
30635 <input>
30636 <port id="0">
30637 <dim>1</dim>
30638 <dim>64</dim>
30639 <dim>20</dim>
30640 <dim>34</dim>
30641 </port>
30642 <port id="1">
30643 <dim>1</dim>
30644 </port>
30645 </input>
30646 <output>
30647 <port id="2" names="bottleneck4_10/dim_red/conv" precision="FP16">
30648 <dim>1</dim>
30649 <dim>64</dim>
30650 <dim>20</dim>
30651 <dim>34</dim>
30652 </port>
30653 </output>
30654 </layer>
30655 <layer id="2005" name="bottleneck4_10/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
30656 <data auto_broadcast="numpy" levels="256"/>
30657 <input>
30658 <port id="0">
30659 <dim>1</dim>
30660 <dim>64</dim>
30661 <dim>20</dim>
30662 <dim>34</dim>
30663 </port>
30664 <port id="1">
30665 <dim>1</dim>
30666 <dim>64</dim>
30667 <dim>1</dim>
30668 <dim>1</dim>
30669 </port>
30670 <port id="2">
30671 <dim>1</dim>
30672 <dim>64</dim>
30673 <dim>1</dim>
30674 <dim>1</dim>
30675 </port>
30676 <port id="3">
30677 <dim>1</dim>
30678 <dim>64</dim>
30679 <dim>1</dim>
30680 <dim>1</dim>
30681 </port>
30682 <port id="4">
30683 <dim>1</dim>
30684 <dim>64</dim>
30685 <dim>1</dim>
30686 <dim>1</dim>
30687 </port>
30688 </input>
30689 <output>
30690 <port id="5" precision="FP16">
30691 <dim>1</dim>
30692 <dim>64</dim>
30693 <dim>20</dim>
30694 <dim>34</dim>
30695 </port>
30696 </output>
30697 </layer>
30698 <layer id="2006" name="16839/value1684122488" type="Const" version="opset1">
30699 <data element_type="i64" offset="189984" shape="5" size="40"/>
30700 <output>
30701 <port id="0" precision="I64">
30702 <dim>5</dim>
30703 </port>
30704 </output>
30705 </layer>
30706 <layer id="2007" name="bottleneck4_10/inner/dw1/bn/mean/Fused_Mul__copy142310397/quantized1357622866" type="Const" version="opset1">
30707 <data element_type="i8" offset="549144" shape="64,1,3,3" size="576"/>
30708 <output>
30709 <port id="0" precision="I8">
30710 <dim>64</dim>
30711 <dim>1</dim>
30712 <dim>3</dim>
30713 <dim>3</dim>
30714 </port>
30715 </output>
30716 </layer>
30717 <layer id="2008" name="bottleneck4_10/inner/dw1/bn/mean/Fused_Mul__copy142310397/quantized/to_f16" type="Convert" version="opset1">
30718 <data destination_type="f16"/>
30719 <input>
30720 <port id="0">
30721 <dim>64</dim>
30722 <dim>1</dim>
30723 <dim>3</dim>
30724 <dim>3</dim>
30725 </port>
30726 </input>
30727 <output>
30728 <port id="1" precision="FP16">
30729 <dim>64</dim>
30730 <dim>1</dim>
30731 <dim>3</dim>
30732 <dim>3</dim>
30733 </port>
30734 </output>
30735 </layer>
30736 <layer id="2009" name="bottleneck4_10/inner/dw1/conv/fq_weights_1/zero_point1358922440" type="Const" version="opset1">
30737 <data element_type="f16" offset="549720" shape="64,1,1,1" size="128"/>
30738 <output>
30739 <port id="0" precision="FP16">
30740 <dim>64</dim>
30741 <dim>1</dim>
30742 <dim>1</dim>
30743 <dim>1</dim>
30744 </port>
30745 </output>
30746 </layer>
30747 <layer id="2010" name="bottleneck4_10/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
30748 <data auto_broadcast="numpy"/>
30749 <input>
30750 <port id="0">
30751 <dim>64</dim>
30752 <dim>1</dim>
30753 <dim>3</dim>
30754 <dim>3</dim>
30755 </port>
30756 <port id="1">
30757 <dim>64</dim>
30758 <dim>1</dim>
30759 <dim>1</dim>
30760 <dim>1</dim>
30761 </port>
30762 </input>
30763 <output>
30764 <port id="2" precision="FP16">
30765 <dim>64</dim>
30766 <dim>1</dim>
30767 <dim>3</dim>
30768 <dim>3</dim>
30769 </port>
30770 </output>
30771 </layer>
30772 <layer id="2011" name="bottleneck4_10/inner/dw1/conv/fq_weights_1/scale1358420976" type="Const" version="opset1">
30773 <data element_type="f16" offset="549848" shape="64,1,1,1" size="128"/>
30774 <output>
30775 <port id="0" precision="FP16">
30776 <dim>64</dim>
30777 <dim>1</dim>
30778 <dim>1</dim>
30779 <dim>1</dim>
30780 </port>
30781 </output>
30782 </layer>
30783 <layer id="2012" name="bottleneck4_10/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
30784 <data auto_broadcast="numpy"/>
30785 <input>
30786 <port id="0">
30787 <dim>64</dim>
30788 <dim>1</dim>
30789 <dim>3</dim>
30790 <dim>3</dim>
30791 </port>
30792 <port id="1">
30793 <dim>64</dim>
30794 <dim>1</dim>
30795 <dim>1</dim>
30796 <dim>1</dim>
30797 </port>
30798 </input>
30799 <output>
30800 <port id="2" precision="FP16">
30801 <dim>64</dim>
30802 <dim>1</dim>
30803 <dim>3</dim>
30804 <dim>3</dim>
30805 </port>
30806 </output>
30807 </layer>
30808 <layer id="2013" name="16839" type="Reshape" version="opset1">
30809 <data special_zero="true"/>
30810 <input>
30811 <port id="0">
30812 <dim>64</dim>
30813 <dim>1</dim>
30814 <dim>3</dim>
30815 <dim>3</dim>
30816 </port>
30817 <port id="1">
30818 <dim>5</dim>
30819 </port>
30820 </input>
30821 <output>
30822 <port id="2" precision="FP16">
30823 <dim>64</dim>
30824 <dim>1</dim>
30825 <dim>1</dim>
30826 <dim>3</dim>
30827 <dim>3</dim>
30828 </port>
30829 </output>
30830 </layer>
30831 <layer id="2014" name="bottleneck4_10/inner/dw1/conv" type="GroupConvolution" version="opset1">
30832 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
30833 <input>
30834 <port id="0">
30835 <dim>1</dim>
30836 <dim>64</dim>
30837 <dim>20</dim>
30838 <dim>34</dim>
30839 </port>
30840 <port id="1">
30841 <dim>64</dim>
30842 <dim>1</dim>
30843 <dim>1</dim>
30844 <dim>3</dim>
30845 <dim>3</dim>
30846 </port>
30847 </input>
30848 <output>
30849 <port id="2" precision="FP16">
30850 <dim>1</dim>
30851 <dim>64</dim>
30852 <dim>20</dim>
30853 <dim>34</dim>
30854 </port>
30855 </output>
30856 </layer>
30857 <layer id="2015" name="data_add_2450524510142519743" type="Const" version="opset1">
30858 <data element_type="f16" offset="549976" shape="1,64,1,1" size="128"/>
30859 <output>
30860 <port id="0" precision="FP16">
30861 <dim>1</dim>
30862 <dim>64</dim>
30863 <dim>1</dim>
30864 <dim>1</dim>
30865 </port>
30866 </output>
30867 </layer>
30868 <layer id="2016" name="bottleneck4_10/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
30869 <data auto_broadcast="numpy"/>
30870 <input>
30871 <port id="0">
30872 <dim>1</dim>
30873 <dim>64</dim>
30874 <dim>20</dim>
30875 <dim>34</dim>
30876 </port>
30877 <port id="1">
30878 <dim>1</dim>
30879 <dim>64</dim>
30880 <dim>1</dim>
30881 <dim>1</dim>
30882 </port>
30883 </input>
30884 <output>
30885 <port id="2" names="bottleneck4_10/inner/dw1/conv" precision="FP16">
30886 <dim>1</dim>
30887 <dim>64</dim>
30888 <dim>20</dim>
30889 <dim>34</dim>
30890 </port>
30891 </output>
30892 </layer>
30893 <layer id="2017" name="bottleneck4_10/inner/dw1/fn/weights30944402621427" type="Const" version="opset1">
30894 <data element_type="f32" offset="1576" shape="1" size="4"/>
30895 <output>
30896 <port id="0" precision="FP32">
30897 <dim>1</dim>
30898 </port>
30899 </output>
30900 </layer>
30901 <layer id="2018" name="bottleneck4_10/inner/dw1/fn" type="PReLU" version="opset1">
30902 <input>
30903 <port id="0">
30904 <dim>1</dim>
30905 <dim>64</dim>
30906 <dim>20</dim>
30907 <dim>34</dim>
30908 </port>
30909 <port id="1">
30910 <dim>1</dim>
30911 </port>
30912 </input>
30913 <output>
30914 <port id="2" names="bottleneck4_10/inner/dw1/conv" precision="FP16">
30915 <dim>1</dim>
30916 <dim>64</dim>
30917 <dim>20</dim>
30918 <dim>34</dim>
30919 </port>
30920 </output>
30921 </layer>
30922 <layer id="2019" name="bottleneck4_10/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
30923 <data auto_broadcast="numpy" levels="256"/>
30924 <input>
30925 <port id="0">
30926 <dim>1</dim>
30927 <dim>64</dim>
30928 <dim>20</dim>
30929 <dim>34</dim>
30930 </port>
30931 <port id="1"/>
30932 <port id="2"/>
30933 <port id="3"/>
30934 <port id="4"/>
30935 </input>
30936 <output>
30937 <port id="5" precision="FP16">
30938 <dim>1</dim>
30939 <dim>64</dim>
30940 <dim>20</dim>
30941 <dim>34</dim>
30942 </port>
30943 </output>
30944 </layer>
30945 <layer id="2020" name="bottleneck4_10/dim_inc/bn/mean/Fused_Mul__copy142910400/quantized1364819974" type="Const" version="opset1">
30946 <data element_type="i8" offset="550104" shape="256,64,1,1" size="16384"/>
30947 <output>
30948 <port id="0" precision="I8">
30949 <dim>256</dim>
30950 <dim>64</dim>
30951 <dim>1</dim>
30952 <dim>1</dim>
30953 </port>
30954 </output>
30955 </layer>
30956 <layer id="2021" name="bottleneck4_10/dim_inc/bn/mean/Fused_Mul__copy142910400/quantized/to_f16" type="Convert" version="opset1">
30957 <data destination_type="f16"/>
30958 <input>
30959 <port id="0">
30960 <dim>256</dim>
30961 <dim>64</dim>
30962 <dim>1</dim>
30963 <dim>1</dim>
30964 </port>
30965 </input>
30966 <output>
30967 <port id="1" precision="FP16">
30968 <dim>256</dim>
30969 <dim>64</dim>
30970 <dim>1</dim>
30971 <dim>1</dim>
30972 </port>
30973 </output>
30974 </layer>
30975 <layer id="2022" name="bottleneck4_10/dim_inc/conv/fq_weights_1/zero_point1366122704" type="Const" version="opset1">
30976 <data element_type="f16" offset="566488" shape="256,1,1,1" size="512"/>
30977 <output>
30978 <port id="0" precision="FP16">
30979 <dim>256</dim>
30980 <dim>1</dim>
30981 <dim>1</dim>
30982 <dim>1</dim>
30983 </port>
30984 </output>
30985 </layer>
30986 <layer id="2023" name="bottleneck4_10/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
30987 <data auto_broadcast="numpy"/>
30988 <input>
30989 <port id="0">
30990 <dim>256</dim>
30991 <dim>64</dim>
30992 <dim>1</dim>
30993 <dim>1</dim>
30994 </port>
30995 <port id="1">
30996 <dim>256</dim>
30997 <dim>1</dim>
30998 <dim>1</dim>
30999 <dim>1</dim>
31000 </port>
31001 </input>
31002 <output>
31003 <port id="2" precision="FP16">
31004 <dim>256</dim>
31005 <dim>64</dim>
31006 <dim>1</dim>
31007 <dim>1</dim>
31008 </port>
31009 </output>
31010 </layer>
31011 <layer id="2024" name="bottleneck4_10/dim_inc/conv/fq_weights_1/scale1365622392" type="Const" version="opset1">
31012 <data element_type="f16" offset="567000" shape="256,1,1,1" size="512"/>
31013 <output>
31014 <port id="0" precision="FP16">
31015 <dim>256</dim>
31016 <dim>1</dim>
31017 <dim>1</dim>
31018 <dim>1</dim>
31019 </port>
31020 </output>
31021 </layer>
31022 <layer id="2025" name="bottleneck4_10/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
31023 <data auto_broadcast="numpy"/>
31024 <input>
31025 <port id="0">
31026 <dim>256</dim>
31027 <dim>64</dim>
31028 <dim>1</dim>
31029 <dim>1</dim>
31030 </port>
31031 <port id="1">
31032 <dim>256</dim>
31033 <dim>1</dim>
31034 <dim>1</dim>
31035 <dim>1</dim>
31036 </port>
31037 </input>
31038 <output>
31039 <port id="2" precision="FP16">
31040 <dim>256</dim>
31041 <dim>64</dim>
31042 <dim>1</dim>
31043 <dim>1</dim>
31044 </port>
31045 </output>
31046 </layer>
31047 <layer id="2026" name="bottleneck4_10/dim_inc/conv" type="Convolution" version="opset1">
31048 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
31049 <input>
31050 <port id="0">
31051 <dim>1</dim>
31052 <dim>64</dim>
31053 <dim>20</dim>
31054 <dim>34</dim>
31055 </port>
31056 <port id="1">
31057 <dim>256</dim>
31058 <dim>64</dim>
31059 <dim>1</dim>
31060 <dim>1</dim>
31061 </port>
31062 </input>
31063 <output>
31064 <port id="2" precision="FP16">
31065 <dim>1</dim>
31066 <dim>256</dim>
31067 <dim>20</dim>
31068 <dim>34</dim>
31069 </port>
31070 </output>
31071 </layer>
31072 <layer id="2027" name="data_add_2451324518143120667" type="Const" version="opset1">
31073 <data element_type="f16" offset="567512" shape="1,256,1,1" size="512"/>
31074 <output>
31075 <port id="0" precision="FP16">
31076 <dim>1</dim>
31077 <dim>256</dim>
31078 <dim>1</dim>
31079 <dim>1</dim>
31080 </port>
31081 </output>
31082 </layer>
31083 <layer id="2028" name="bottleneck4_10/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
31084 <data auto_broadcast="numpy"/>
31085 <input>
31086 <port id="0">
31087 <dim>1</dim>
31088 <dim>256</dim>
31089 <dim>20</dim>
31090 <dim>34</dim>
31091 </port>
31092 <port id="1">
31093 <dim>1</dim>
31094 <dim>256</dim>
31095 <dim>1</dim>
31096 <dim>1</dim>
31097 </port>
31098 </input>
31099 <output>
31100 <port id="2" names="bottleneck4_10/dim_inc/conv" precision="FP16">
31101 <dim>1</dim>
31102 <dim>256</dim>
31103 <dim>20</dim>
31104 <dim>34</dim>
31105 </port>
31106 </output>
31107 </layer>
31108 <layer id="2029" name="bottleneck4_10/add/fq_input_1" type="FakeQuantize" version="opset1">
31109 <data auto_broadcast="numpy" levels="256"/>
31110 <input>
31111 <port id="0">
31112 <dim>1</dim>
31113 <dim>256</dim>
31114 <dim>20</dim>
31115 <dim>34</dim>
31116 </port>
31117 <port id="1"/>
31118 <port id="2"/>
31119 <port id="3"/>
31120 <port id="4"/>
31121 </input>
31122 <output>
31123 <port id="5" precision="FP16">
31124 <dim>1</dim>
31125 <dim>256</dim>
31126 <dim>20</dim>
31127 <dim>34</dim>
31128 </port>
31129 </output>
31130 </layer>
31131 <layer id="2030" name="bottleneck4_10/add" type="Add" version="opset1">
31132 <data auto_broadcast="numpy"/>
31133 <input>
31134 <port id="0">
31135 <dim>1</dim>
31136 <dim>256</dim>
31137 <dim>20</dim>
31138 <dim>34</dim>
31139 </port>
31140 <port id="1">
31141 <dim>1</dim>
31142 <dim>256</dim>
31143 <dim>20</dim>
31144 <dim>34</dim>
31145 </port>
31146 </input>
31147 <output>
31148 <port id="2" names="bottleneck4_10/add" precision="FP16">
31149 <dim>1</dim>
31150 <dim>256</dim>
31151 <dim>20</dim>
31152 <dim>34</dim>
31153 </port>
31154 </output>
31155 </layer>
31156 <layer id="2031" name="bottleneck4_10/fn/weights30904403071434" type="Const" version="opset1">
31157 <data element_type="f32" offset="1576" shape="1" size="4"/>
31158 <output>
31159 <port id="0" precision="FP32">
31160 <dim>1</dim>
31161 </port>
31162 </output>
31163 </layer>
31164 <layer id="2032" name="bottleneck4_10/fn" type="PReLU" version="opset1">
31165 <input>
31166 <port id="0">
31167 <dim>1</dim>
31168 <dim>256</dim>
31169 <dim>20</dim>
31170 <dim>34</dim>
31171 </port>
31172 <port id="1">
31173 <dim>1</dim>
31174 </port>
31175 </input>
31176 <output>
31177 <port id="2" names="bottleneck4_10/add" precision="FP16">
31178 <dim>1</dim>
31179 <dim>256</dim>
31180 <dim>20</dim>
31181 <dim>34</dim>
31182 </port>
31183 </output>
31184 </layer>
31185 <layer id="2033" name="bottleneck4_11/add/fq_input_0" type="FakeQuantize" version="opset1">
31186 <data auto_broadcast="numpy" levels="256"/>
31187 <input>
31188 <port id="0">
31189 <dim>1</dim>
31190 <dim>256</dim>
31191 <dim>20</dim>
31192 <dim>34</dim>
31193 </port>
31194 <port id="1"/>
31195 <port id="2"/>
31196 <port id="3"/>
31197 <port id="4"/>
31198 </input>
31199 <output>
31200 <port id="5" precision="FP16">
31201 <dim>1</dim>
31202 <dim>256</dim>
31203 <dim>20</dim>
31204 <dim>34</dim>
31205 </port>
31206 </output>
31207 </layer>
31208 <layer id="2034" name="4294429819989" type="Const" version="opset1">
31209 <data element_type="f16" offset="568024" shape="" size="2"/>
31210 <output>
31211 <port id="0" precision="FP16"/>
31212 </output>
31213 </layer>
31214 <layer id="2035" name="4295429920127" type="Const" version="opset1">
31215 <data element_type="f16" offset="568026" shape="" size="2"/>
31216 <output>
31217 <port id="0" precision="FP16"/>
31218 </output>
31219 </layer>
31220 <layer id="2036" name="4296430021717" type="Const" version="opset1">
31221 <data element_type="f16" offset="568024" shape="" size="2"/>
31222 <output>
31223 <port id="0" precision="FP16"/>
31224 </output>
31225 </layer>
31226 <layer id="2037" name="4297430121204" type="Const" version="opset1">
31227 <data element_type="f16" offset="568026" shape="" size="2"/>
31228 <output>
31229 <port id="0" precision="FP16"/>
31230 </output>
31231 </layer>
31232 <layer id="2038" name="5324532822170" type="Const" version="opset1">
31233 <data element_type="f16" offset="568028" shape="" size="2"/>
31234 <output>
31235 <port id="0" precision="FP16"/>
31236 </output>
31237 </layer>
31238 <layer id="2039" name="5325532922719" type="Const" version="opset1">
31239 <data element_type="f16" offset="568030" shape="" size="2"/>
31240 <output>
31241 <port id="0" precision="FP16"/>
31242 </output>
31243 </layer>
31244 <layer id="2040" name="5326533020073" type="Const" version="opset1">
31245 <data element_type="f16" offset="568028" shape="" size="2"/>
31246 <output>
31247 <port id="0" precision="FP16"/>
31248 </output>
31249 </layer>
31250 <layer id="2041" name="5327533122332" type="Const" version="opset1">
31251 <data element_type="f16" offset="568030" shape="" size="2"/>
31252 <output>
31253 <port id="0" precision="FP16"/>
31254 </output>
31255 </layer>
31256 <layer id="2042" name="3304330820979" type="Const" version="opset1">
31257 <data element_type="f16" offset="568032" shape="1,64,1,1" size="128"/>
31258 <output>
31259 <port id="0" precision="FP16">
31260 <dim>1</dim>
31261 <dim>64</dim>
31262 <dim>1</dim>
31263 <dim>1</dim>
31264 </port>
31265 </output>
31266 </layer>
31267 <layer id="2043" name="3305330922551" type="Const" version="opset1">
31268 <data element_type="f16" offset="568160" shape="1,64,1,1" size="128"/>
31269 <output>
31270 <port id="0" precision="FP16">
31271 <dim>1</dim>
31272 <dim>64</dim>
31273 <dim>1</dim>
31274 <dim>1</dim>
31275 </port>
31276 </output>
31277 </layer>
31278 <layer id="2044" name="3306331022683" type="Const" version="opset1">
31279 <data element_type="f16" offset="568032" shape="1,64,1,1" size="128"/>
31280 <output>
31281 <port id="0" precision="FP16">
31282 <dim>1</dim>
31283 <dim>64</dim>
31284 <dim>1</dim>
31285 <dim>1</dim>
31286 </port>
31287 </output>
31288 </layer>
31289 <layer id="2045" name="3307331120856" type="Const" version="opset1">
31290 <data element_type="f16" offset="568160" shape="1,64,1,1" size="128"/>
31291 <output>
31292 <port id="0" precision="FP16">
31293 <dim>1</dim>
31294 <dim>64</dim>
31295 <dim>1</dim>
31296 <dim>1</dim>
31297 </port>
31298 </output>
31299 </layer>
31300 <layer id="2046" name="bottleneck4_11/dim_red/bn/mean/Fused_Mul__copy143610403/quantized1362419785" type="Const" version="opset1">
31301 <data element_type="i8" offset="568288" shape="64,256,1,1" size="16384"/>
31302 <output>
31303 <port id="0" precision="I8">
31304 <dim>64</dim>
31305 <dim>256</dim>
31306 <dim>1</dim>
31307 <dim>1</dim>
31308 </port>
31309 </output>
31310 </layer>
31311 <layer id="2047" name="bottleneck4_11/dim_red/bn/mean/Fused_Mul__copy143610403/quantized/to_f16" type="Convert" version="opset1">
31312 <data destination_type="f16"/>
31313 <input>
31314 <port id="0">
31315 <dim>64</dim>
31316 <dim>256</dim>
31317 <dim>1</dim>
31318 <dim>1</dim>
31319 </port>
31320 </input>
31321 <output>
31322 <port id="1" precision="FP16">
31323 <dim>64</dim>
31324 <dim>256</dim>
31325 <dim>1</dim>
31326 <dim>1</dim>
31327 </port>
31328 </output>
31329 </layer>
31330 <layer id="2048" name="bottleneck4_11/dim_red/conv/fq_weights_1/zero_point1363720406" type="Const" version="opset1">
31331 <data element_type="f16" offset="584672" shape="64,1,1,1" size="128"/>
31332 <output>
31333 <port id="0" precision="FP16">
31334 <dim>64</dim>
31335 <dim>1</dim>
31336 <dim>1</dim>
31337 <dim>1</dim>
31338 </port>
31339 </output>
31340 </layer>
31341 <layer id="2049" name="bottleneck4_11/dim_red/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
31342 <data auto_broadcast="numpy"/>
31343 <input>
31344 <port id="0">
31345 <dim>64</dim>
31346 <dim>256</dim>
31347 <dim>1</dim>
31348 <dim>1</dim>
31349 </port>
31350 <port id="1">
31351 <dim>64</dim>
31352 <dim>1</dim>
31353 <dim>1</dim>
31354 <dim>1</dim>
31355 </port>
31356 </input>
31357 <output>
31358 <port id="2" precision="FP16">
31359 <dim>64</dim>
31360 <dim>256</dim>
31361 <dim>1</dim>
31362 <dim>1</dim>
31363 </port>
31364 </output>
31365 </layer>
31366 <layer id="2050" name="bottleneck4_11/dim_red/conv/fq_weights_1/scale1363222524" type="Const" version="opset1">
31367 <data element_type="f16" offset="584800" shape="64,1,1,1" size="128"/>
31368 <output>
31369 <port id="0" precision="FP16">
31370 <dim>64</dim>
31371 <dim>1</dim>
31372 <dim>1</dim>
31373 <dim>1</dim>
31374 </port>
31375 </output>
31376 </layer>
31377 <layer id="2051" name="bottleneck4_11/dim_red/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
31378 <data auto_broadcast="numpy"/>
31379 <input>
31380 <port id="0">
31381 <dim>64</dim>
31382 <dim>256</dim>
31383 <dim>1</dim>
31384 <dim>1</dim>
31385 </port>
31386 <port id="1">
31387 <dim>64</dim>
31388 <dim>1</dim>
31389 <dim>1</dim>
31390 <dim>1</dim>
31391 </port>
31392 </input>
31393 <output>
31394 <port id="2" precision="FP16">
31395 <dim>64</dim>
31396 <dim>256</dim>
31397 <dim>1</dim>
31398 <dim>1</dim>
31399 </port>
31400 </output>
31401 </layer>
31402 <layer id="2052" name="bottleneck4_11/dim_red/conv" type="Convolution" version="opset1">
31403 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
31404 <input>
31405 <port id="0">
31406 <dim>1</dim>
31407 <dim>256</dim>
31408 <dim>20</dim>
31409 <dim>34</dim>
31410 </port>
31411 <port id="1">
31412 <dim>64</dim>
31413 <dim>256</dim>
31414 <dim>1</dim>
31415 <dim>1</dim>
31416 </port>
31417 </input>
31418 <output>
31419 <port id="2" precision="FP16">
31420 <dim>1</dim>
31421 <dim>64</dim>
31422 <dim>20</dim>
31423 <dim>34</dim>
31424 </port>
31425 </output>
31426 </layer>
31427 <layer id="2053" name="data_add_2452124526143821531" type="Const" version="opset1">
31428 <data element_type="f16" offset="584928" shape="1,64,1,1" size="128"/>
31429 <output>
31430 <port id="0" precision="FP16">
31431 <dim>1</dim>
31432 <dim>64</dim>
31433 <dim>1</dim>
31434 <dim>1</dim>
31435 </port>
31436 </output>
31437 </layer>
31438 <layer id="2054" name="bottleneck4_11/dim_red/bn/variance/Fused_Add_" type="Add" version="opset1">
31439 <data auto_broadcast="numpy"/>
31440 <input>
31441 <port id="0">
31442 <dim>1</dim>
31443 <dim>64</dim>
31444 <dim>20</dim>
31445 <dim>34</dim>
31446 </port>
31447 <port id="1">
31448 <dim>1</dim>
31449 <dim>64</dim>
31450 <dim>1</dim>
31451 <dim>1</dim>
31452 </port>
31453 </input>
31454 <output>
31455 <port id="2" names="bottleneck4_11/dim_red/conv" precision="FP16">
31456 <dim>1</dim>
31457 <dim>64</dim>
31458 <dim>20</dim>
31459 <dim>34</dim>
31460 </port>
31461 </output>
31462 </layer>
31463 <layer id="2055" name="bottleneck4_11/dim_red/fn/weights30956405321440" type="Const" version="opset1">
31464 <data element_type="f32" offset="1576" shape="1" size="4"/>
31465 <output>
31466 <port id="0" precision="FP32">
31467 <dim>1</dim>
31468 </port>
31469 </output>
31470 </layer>
31471 <layer id="2056" name="bottleneck4_11/dim_red/fn" type="PReLU" version="opset1">
31472 <input>
31473 <port id="0">
31474 <dim>1</dim>
31475 <dim>64</dim>
31476 <dim>20</dim>
31477 <dim>34</dim>
31478 </port>
31479 <port id="1">
31480 <dim>1</dim>
31481 </port>
31482 </input>
31483 <output>
31484 <port id="2" names="bottleneck4_11/dim_red/conv" precision="FP16">
31485 <dim>1</dim>
31486 <dim>64</dim>
31487 <dim>20</dim>
31488 <dim>34</dim>
31489 </port>
31490 </output>
31491 </layer>
31492 <layer id="2057" name="bottleneck4_11/inner/dw1/conv/fq_input_0" type="FakeQuantize" version="opset1">
31493 <data auto_broadcast="numpy" levels="256"/>
31494 <input>
31495 <port id="0">
31496 <dim>1</dim>
31497 <dim>64</dim>
31498 <dim>20</dim>
31499 <dim>34</dim>
31500 </port>
31501 <port id="1">
31502 <dim>1</dim>
31503 <dim>64</dim>
31504 <dim>1</dim>
31505 <dim>1</dim>
31506 </port>
31507 <port id="2">
31508 <dim>1</dim>
31509 <dim>64</dim>
31510 <dim>1</dim>
31511 <dim>1</dim>
31512 </port>
31513 <port id="3">
31514 <dim>1</dim>
31515 <dim>64</dim>
31516 <dim>1</dim>
31517 <dim>1</dim>
31518 </port>
31519 <port id="4">
31520 <dim>1</dim>
31521 <dim>64</dim>
31522 <dim>1</dim>
31523 <dim>1</dim>
31524 </port>
31525 </input>
31526 <output>
31527 <port id="5" precision="FP16">
31528 <dim>1</dim>
31529 <dim>64</dim>
31530 <dim>20</dim>
31531 <dim>34</dim>
31532 </port>
31533 </output>
31534 </layer>
31535 <layer id="2058" name="16827/value1682922857" type="Const" version="opset1">
31536 <data element_type="i64" offset="189984" shape="5" size="40"/>
31537 <output>
31538 <port id="0" precision="I64">
31539 <dim>5</dim>
31540 </port>
31541 </output>
31542 </layer>
31543 <layer id="2059" name="bottleneck4_11/inner/dw1/bn/mean/Fused_Mul__copy144210406/quantized1292821633" type="Const" version="opset1">
31544 <data element_type="i8" offset="585056" shape="64,1,3,3" size="576"/>
31545 <output>
31546 <port id="0" precision="I8">
31547 <dim>64</dim>
31548 <dim>1</dim>
31549 <dim>3</dim>
31550 <dim>3</dim>
31551 </port>
31552 </output>
31553 </layer>
31554 <layer id="2060" name="bottleneck4_11/inner/dw1/bn/mean/Fused_Mul__copy144210406/quantized/to_f16" type="Convert" version="opset1">
31555 <data destination_type="f16"/>
31556 <input>
31557 <port id="0">
31558 <dim>64</dim>
31559 <dim>1</dim>
31560 <dim>3</dim>
31561 <dim>3</dim>
31562 </port>
31563 </input>
31564 <output>
31565 <port id="1" precision="FP16">
31566 <dim>64</dim>
31567 <dim>1</dim>
31568 <dim>3</dim>
31569 <dim>3</dim>
31570 </port>
31571 </output>
31572 </layer>
31573 <layer id="2061" name="bottleneck4_11/inner/dw1/conv/fq_weights_1/zero_point1294120229" type="Const" version="opset1">
31574 <data element_type="f16" offset="585632" shape="64,1,1,1" size="128"/>
31575 <output>
31576 <port id="0" precision="FP16">
31577 <dim>64</dim>
31578 <dim>1</dim>
31579 <dim>1</dim>
31580 <dim>1</dim>
31581 </port>
31582 </output>
31583 </layer>
31584 <layer id="2062" name="bottleneck4_11/inner/dw1/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
31585 <data auto_broadcast="numpy"/>
31586 <input>
31587 <port id="0">
31588 <dim>64</dim>
31589 <dim>1</dim>
31590 <dim>3</dim>
31591 <dim>3</dim>
31592 </port>
31593 <port id="1">
31594 <dim>64</dim>
31595 <dim>1</dim>
31596 <dim>1</dim>
31597 <dim>1</dim>
31598 </port>
31599 </input>
31600 <output>
31601 <port id="2" precision="FP16">
31602 <dim>64</dim>
31603 <dim>1</dim>
31604 <dim>3</dim>
31605 <dim>3</dim>
31606 </port>
31607 </output>
31608 </layer>
31609 <layer id="2063" name="bottleneck4_11/inner/dw1/conv/fq_weights_1/scale1293620895" type="Const" version="opset1">
31610 <data element_type="f16" offset="585760" shape="64,1,1,1" size="128"/>
31611 <output>
31612 <port id="0" precision="FP16">
31613 <dim>64</dim>
31614 <dim>1</dim>
31615 <dim>1</dim>
31616 <dim>1</dim>
31617 </port>
31618 </output>
31619 </layer>
31620 <layer id="2064" name="bottleneck4_11/inner/dw1/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
31621 <data auto_broadcast="numpy"/>
31622 <input>
31623 <port id="0">
31624 <dim>64</dim>
31625 <dim>1</dim>
31626 <dim>3</dim>
31627 <dim>3</dim>
31628 </port>
31629 <port id="1">
31630 <dim>64</dim>
31631 <dim>1</dim>
31632 <dim>1</dim>
31633 <dim>1</dim>
31634 </port>
31635 </input>
31636 <output>
31637 <port id="2" precision="FP16">
31638 <dim>64</dim>
31639 <dim>1</dim>
31640 <dim>3</dim>
31641 <dim>3</dim>
31642 </port>
31643 </output>
31644 </layer>
31645 <layer id="2065" name="16827" type="Reshape" version="opset1">
31646 <data special_zero="true"/>
31647 <input>
31648 <port id="0">
31649 <dim>64</dim>
31650 <dim>1</dim>
31651 <dim>3</dim>
31652 <dim>3</dim>
31653 </port>
31654 <port id="1">
31655 <dim>5</dim>
31656 </port>
31657 </input>
31658 <output>
31659 <port id="2" precision="FP16">
31660 <dim>64</dim>
31661 <dim>1</dim>
31662 <dim>1</dim>
31663 <dim>3</dim>
31664 <dim>3</dim>
31665 </port>
31666 </output>
31667 </layer>
31668 <layer id="2066" name="bottleneck4_11/inner/dw1/conv" type="GroupConvolution" version="opset1">
31669 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
31670 <input>
31671 <port id="0">
31672 <dim>1</dim>
31673 <dim>64</dim>
31674 <dim>20</dim>
31675 <dim>34</dim>
31676 </port>
31677 <port id="1">
31678 <dim>64</dim>
31679 <dim>1</dim>
31680 <dim>1</dim>
31681 <dim>3</dim>
31682 <dim>3</dim>
31683 </port>
31684 </input>
31685 <output>
31686 <port id="2" precision="FP16">
31687 <dim>1</dim>
31688 <dim>64</dim>
31689 <dim>20</dim>
31690 <dim>34</dim>
31691 </port>
31692 </output>
31693 </layer>
31694 <layer id="2067" name="data_add_2452924534144422578" type="Const" version="opset1">
31695 <data element_type="f16" offset="585888" shape="1,64,1,1" size="128"/>
31696 <output>
31697 <port id="0" precision="FP16">
31698 <dim>1</dim>
31699 <dim>64</dim>
31700 <dim>1</dim>
31701 <dim>1</dim>
31702 </port>
31703 </output>
31704 </layer>
31705 <layer id="2068" name="bottleneck4_11/inner/dw1/bn/variance/Fused_Add_" type="Add" version="opset1">
31706 <data auto_broadcast="numpy"/>
31707 <input>
31708 <port id="0">
31709 <dim>1</dim>
31710 <dim>64</dim>
31711 <dim>20</dim>
31712 <dim>34</dim>
31713 </port>
31714 <port id="1">
31715 <dim>1</dim>
31716 <dim>64</dim>
31717 <dim>1</dim>
31718 <dim>1</dim>
31719 </port>
31720 </input>
31721 <output>
31722 <port id="2" names="bottleneck4_11/inner/dw1/conv" precision="FP16">
31723 <dim>1</dim>
31724 <dim>64</dim>
31725 <dim>20</dim>
31726 <dim>34</dim>
31727 </port>
31728 </output>
31729 </layer>
31730 <layer id="2069" name="bottleneck4_11/inner/dw1/fn/weights30776403011446" type="Const" version="opset1">
31731 <data element_type="f32" offset="1576" shape="1" size="4"/>
31732 <output>
31733 <port id="0" precision="FP32">
31734 <dim>1</dim>
31735 </port>
31736 </output>
31737 </layer>
31738 <layer id="2070" name="bottleneck4_11/inner/dw1/fn" type="PReLU" version="opset1">
31739 <input>
31740 <port id="0">
31741 <dim>1</dim>
31742 <dim>64</dim>
31743 <dim>20</dim>
31744 <dim>34</dim>
31745 </port>
31746 <port id="1">
31747 <dim>1</dim>
31748 </port>
31749 </input>
31750 <output>
31751 <port id="2" names="bottleneck4_11/inner/dw1/conv" precision="FP16">
31752 <dim>1</dim>
31753 <dim>64</dim>
31754 <dim>20</dim>
31755 <dim>34</dim>
31756 </port>
31757 </output>
31758 </layer>
31759 <layer id="2071" name="bottleneck4_11/dim_inc/conv/fq_input_0" type="FakeQuantize" version="opset1">
31760 <data auto_broadcast="numpy" levels="256"/>
31761 <input>
31762 <port id="0">
31763 <dim>1</dim>
31764 <dim>64</dim>
31765 <dim>20</dim>
31766 <dim>34</dim>
31767 </port>
31768 <port id="1"/>
31769 <port id="2"/>
31770 <port id="3"/>
31771 <port id="4"/>
31772 </input>
31773 <output>
31774 <port id="5" precision="FP16">
31775 <dim>1</dim>
31776 <dim>64</dim>
31777 <dim>20</dim>
31778 <dim>34</dim>
31779 </port>
31780 </output>
31781 </layer>
31782 <layer id="2072" name="bottleneck4_11/dim_inc/bn/mean/Fused_Mul__copy144810409/quantized1384021810" type="Const" version="opset1">
31783 <data element_type="i8" offset="586016" shape="256,64,1,1" size="16384"/>
31784 <output>
31785 <port id="0" precision="I8">
31786 <dim>256</dim>
31787 <dim>64</dim>
31788 <dim>1</dim>
31789 <dim>1</dim>
31790 </port>
31791 </output>
31792 </layer>
31793 <layer id="2073" name="bottleneck4_11/dim_inc/bn/mean/Fused_Mul__copy144810409/quantized/to_f16" type="Convert" version="opset1">
31794 <data destination_type="f16"/>
31795 <input>
31796 <port id="0">
31797 <dim>256</dim>
31798 <dim>64</dim>
31799 <dim>1</dim>
31800 <dim>1</dim>
31801 </port>
31802 </input>
31803 <output>
31804 <port id="1" precision="FP16">
31805 <dim>256</dim>
31806 <dim>64</dim>
31807 <dim>1</dim>
31808 <dim>1</dim>
31809 </port>
31810 </output>
31811 </layer>
31812 <layer id="2074" name="bottleneck4_11/dim_inc/conv/fq_weights_1/zero_point1385319533" type="Const" version="opset1">
31813 <data element_type="f16" offset="602400" shape="256,1,1,1" size="512"/>
31814 <output>
31815 <port id="0" precision="FP16">
31816 <dim>256</dim>
31817 <dim>1</dim>
31818 <dim>1</dim>
31819 <dim>1</dim>
31820 </port>
31821 </output>
31822 </layer>
31823 <layer id="2075" name="bottleneck4_11/dim_inc/conv/fq_weights_1/minus_zp" type="Subtract" version="opset1">
31824 <data auto_broadcast="numpy"/>
31825 <input>
31826 <port id="0">
31827 <dim>256</dim>
31828 <dim>64</dim>
31829 <dim>1</dim>
31830 <dim>1</dim>
31831 </port>
31832 <port id="1">
31833 <dim>256</dim>
31834 <dim>1</dim>
31835 <dim>1</dim>
31836 <dim>1</dim>
31837 </port>
31838 </input>
31839 <output>
31840 <port id="2" precision="FP16">
31841 <dim>256</dim>
31842 <dim>64</dim>
31843 <dim>1</dim>
31844 <dim>1</dim>
31845 </port>
31846 </output>
31847 </layer>
31848 <layer id="2076" name="bottleneck4_11/dim_inc/conv/fq_weights_1/scale1384821798" type="Const" version="opset1">
31849 <data element_type="f16" offset="602912" shape="256,1,1,1" size="512"/>
31850 <output>
31851 <port id="0" precision="FP16">
31852 <dim>256</dim>
31853 <dim>1</dim>
31854 <dim>1</dim>
31855 <dim>1</dim>
31856 </port>
31857 </output>
31858 </layer>
31859 <layer id="2077" name="bottleneck4_11/dim_inc/conv/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
31860 <data auto_broadcast="numpy"/>
31861 <input>
31862 <port id="0">
31863 <dim>256</dim>
31864 <dim>64</dim>
31865 <dim>1</dim>
31866 <dim>1</dim>
31867 </port>
31868 <port id="1">
31869 <dim>256</dim>
31870 <dim>1</dim>
31871 <dim>1</dim>
31872 <dim>1</dim>
31873 </port>
31874 </input>
31875 <output>
31876 <port id="2" precision="FP16">
31877 <dim>256</dim>
31878 <dim>64</dim>
31879 <dim>1</dim>
31880 <dim>1</dim>
31881 </port>
31882 </output>
31883 </layer>
31884 <layer id="2078" name="bottleneck4_11/dim_inc/conv" type="Convolution" version="opset1">
31885 <data auto_pad="explicit" dilations="1,1" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
31886 <input>
31887 <port id="0">
31888 <dim>1</dim>
31889 <dim>64</dim>
31890 <dim>20</dim>
31891 <dim>34</dim>
31892 </port>
31893 <port id="1">
31894 <dim>256</dim>
31895 <dim>64</dim>
31896 <dim>1</dim>
31897 <dim>1</dim>
31898 </port>
31899 </input>
31900 <output>
31901 <port id="2" precision="FP16">
31902 <dim>1</dim>
31903 <dim>256</dim>
31904 <dim>20</dim>
31905 <dim>34</dim>
31906 </port>
31907 </output>
31908 </layer>
31909 <layer id="2079" name="data_add_2453724542145022566" type="Const" version="opset1">
31910 <data element_type="f16" offset="603424" shape="1,256,1,1" size="512"/>
31911 <output>
31912 <port id="0" precision="FP16">
31913 <dim>1</dim>
31914 <dim>256</dim>
31915 <dim>1</dim>
31916 <dim>1</dim>
31917 </port>
31918 </output>
31919 </layer>
31920 <layer id="2080" name="bottleneck4_11/dim_inc/bn/variance/Fused_Add_" type="Add" version="opset1">
31921 <data auto_broadcast="numpy"/>
31922 <input>
31923 <port id="0">
31924 <dim>1</dim>
31925 <dim>256</dim>
31926 <dim>20</dim>
31927 <dim>34</dim>
31928 </port>
31929 <port id="1">
31930 <dim>1</dim>
31931 <dim>256</dim>
31932 <dim>1</dim>
31933 <dim>1</dim>
31934 </port>
31935 </input>
31936 <output>
31937 <port id="2" names="bottleneck4_11/dim_inc/conv" precision="FP16">
31938 <dim>1</dim>
31939 <dim>256</dim>
31940 <dim>20</dim>
31941 <dim>34</dim>
31942 </port>
31943 </output>
31944 </layer>
31945 <layer id="2081" name="bottleneck4_11/add/fq_input_1" type="FakeQuantize" version="opset1">
31946 <data auto_broadcast="numpy" levels="256"/>
31947 <input>
31948 <port id="0">
31949 <dim>1</dim>
31950 <dim>256</dim>
31951 <dim>20</dim>
31952 <dim>34</dim>
31953 </port>
31954 <port id="1"/>
31955 <port id="2"/>
31956 <port id="3"/>
31957 <port id="4"/>
31958 </input>
31959 <output>
31960 <port id="5" precision="FP16">
31961 <dim>1</dim>
31962 <dim>256</dim>
31963 <dim>20</dim>
31964 <dim>34</dim>
31965 </port>
31966 </output>
31967 </layer>
31968 <layer id="2082" name="bottleneck4_11/add" type="Add" version="opset1">
31969 <data auto_broadcast="numpy"/>
31970 <input>
31971 <port id="0">
31972 <dim>1</dim>
31973 <dim>256</dim>
31974 <dim>20</dim>
31975 <dim>34</dim>
31976 </port>
31977 <port id="1">
31978 <dim>1</dim>
31979 <dim>256</dim>
31980 <dim>20</dim>
31981 <dim>34</dim>
31982 </port>
31983 </input>
31984 <output>
31985 <port id="2" names="bottleneck4_11/add" precision="FP16">
31986 <dim>1</dim>
31987 <dim>256</dim>
31988 <dim>20</dim>
31989 <dim>34</dim>
31990 </port>
31991 </output>
31992 </layer>
31993 <layer id="2083" name="bottleneck4_11/fn/weights31180405651453" type="Const" version="opset1">
31994 <data element_type="f32" offset="1576" shape="1" size="4"/>
31995 <output>
31996 <port id="0" precision="FP32">
31997 <dim>1</dim>
31998 </port>
31999 </output>
32000 </layer>
32001 <layer id="2084" name="bottleneck4_11/fn" type="PReLU" version="opset1">
32002 <input>
32003 <port id="0">
32004 <dim>1</dim>
32005 <dim>256</dim>
32006 <dim>20</dim>
32007 <dim>34</dim>
32008 </port>
32009 <port id="1">
32010 <dim>1</dim>
32011 </port>
32012 </input>
32013 <output>
32014 <port id="2" names="bb_16xout_pd" precision="FP16">
32015 <dim>1</dim>
32016 <dim>256</dim>
32017 <dim>20</dim>
32018 <dim>34</dim>
32019 </port>
32020 </output>
32021 </layer>
32022 <layer id="2085" name="mbox_conf1/out/conv/WithoutBiases/fq_input_0" type="FakeQuantize" version="opset1">
32023 <data auto_broadcast="numpy" levels="256"/>
32024 <input>
32025 <port id="0">
32026 <dim>1</dim>
32027 <dim>256</dim>
32028 <dim>20</dim>
32029 <dim>34</dim>
32030 </port>
32031 <port id="1"/>
32032 <port id="2"/>
32033 <port id="3"/>
32034 <port id="4"/>
32035 </input>
32036 <output>
32037 <port id="5" precision="FP16">
32038 <dim>1</dim>
32039 <dim>256</dim>
32040 <dim>20</dim>
32041 <dim>34</dim>
32042 </port>
32043 </output>
32044 </layer>
32045 <layer id="2086" name="752145510412/quantized1300022731" type="Const" version="opset1">
32046 <data element_type="i8" offset="603936" shape="48,256,3,3" size="110592"/>
32047 <output>
32048 <port id="0" precision="I8">
32049 <dim>48</dim>
32050 <dim>256</dim>
32051 <dim>3</dim>
32052 <dim>3</dim>
32053 </port>
32054 </output>
32055 </layer>
32056 <layer id="2087" name="752145510412/quantized/to_f16" type="Convert" version="opset1">
32057 <data destination_type="f16"/>
32058 <input>
32059 <port id="0">
32060 <dim>48</dim>
32061 <dim>256</dim>
32062 <dim>3</dim>
32063 <dim>3</dim>
32064 </port>
32065 </input>
32066 <output>
32067 <port id="1" precision="FP16">
32068 <dim>48</dim>
32069 <dim>256</dim>
32070 <dim>3</dim>
32071 <dim>3</dim>
32072 </port>
32073 </output>
32074 </layer>
32075 <layer id="2088" name="mbox_loc1/out/conv/WithoutBiases/fq_weights_1/zero_point1301321714" type="Const" version="opset1">
32076 <data element_type="f16" offset="714528" shape="48,1,1,1" size="96"/>
32077 <output>
32078 <port id="0" precision="FP16">
32079 <dim>48</dim>
32080 <dim>1</dim>
32081 <dim>1</dim>
32082 <dim>1</dim>
32083 </port>
32084 </output>
32085 </layer>
32086 <layer id="2089" name="mbox_loc1/out/conv/WithoutBiases/fq_weights_1/minus_zp" type="Subtract" version="opset1">
32087 <data auto_broadcast="numpy"/>
32088 <input>
32089 <port id="0">
32090 <dim>48</dim>
32091 <dim>256</dim>
32092 <dim>3</dim>
32093 <dim>3</dim>
32094 </port>
32095 <port id="1">
32096 <dim>48</dim>
32097 <dim>1</dim>
32098 <dim>1</dim>
32099 <dim>1</dim>
32100 </port>
32101 </input>
32102 <output>
32103 <port id="2" precision="FP16">
32104 <dim>48</dim>
32105 <dim>256</dim>
32106 <dim>3</dim>
32107 <dim>3</dim>
32108 </port>
32109 </output>
32110 </layer>
32111 <layer id="2090" name="mbox_loc1/out/conv/WithoutBiases/fq_weights_1/scale1300820010" type="Const" version="opset1">
32112 <data element_type="f16" offset="714624" shape="48,1,1,1" size="96"/>
32113 <output>
32114 <port id="0" precision="FP16">
32115 <dim>48</dim>
32116 <dim>1</dim>
32117 <dim>1</dim>
32118 <dim>1</dim>
32119 </port>
32120 </output>
32121 </layer>
32122 <layer id="2091" name="mbox_loc1/out/conv/WithoutBiases/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
32123 <data auto_broadcast="numpy"/>
32124 <input>
32125 <port id="0">
32126 <dim>48</dim>
32127 <dim>256</dim>
32128 <dim>3</dim>
32129 <dim>3</dim>
32130 </port>
32131 <port id="1">
32132 <dim>48</dim>
32133 <dim>1</dim>
32134 <dim>1</dim>
32135 <dim>1</dim>
32136 </port>
32137 </input>
32138 <output>
32139 <port id="2" precision="FP16">
32140 <dim>48</dim>
32141 <dim>256</dim>
32142 <dim>3</dim>
32143 <dim>3</dim>
32144 </port>
32145 </output>
32146 </layer>
32147 <layer id="2092" name="mbox_loc1/out/conv/WithoutBiases" type="Convolution" version="opset1">
32148 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
32149 <input>
32150 <port id="0">
32151 <dim>1</dim>
32152 <dim>256</dim>
32153 <dim>20</dim>
32154 <dim>34</dim>
32155 </port>
32156 <port id="1">
32157 <dim>48</dim>
32158 <dim>256</dim>
32159 <dim>3</dim>
32160 <dim>3</dim>
32161 </port>
32162 </input>
32163 <output>
32164 <port id="2" precision="FP16">
32165 <dim>1</dim>
32166 <dim>48</dim>
32167 <dim>20</dim>
32168 <dim>34</dim>
32169 </port>
32170 </output>
32171 </layer>
32172 <layer id="2093" name="mbox_loc1/out/conv/Dims13831145721933" type="Const" version="opset1">
32173 <data element_type="f16" offset="714720" shape="1,48,1,1" size="96"/>
32174 <output>
32175 <port id="0" precision="FP16">
32176 <dim>1</dim>
32177 <dim>48</dim>
32178 <dim>1</dim>
32179 <dim>1</dim>
32180 </port>
32181 </output>
32182 </layer>
32183 <layer id="2094" name="mbox_loc1/out/conv" type="Add" version="opset1">
32184 <data auto_broadcast="numpy"/>
32185 <input>
32186 <port id="0">
32187 <dim>1</dim>
32188 <dim>48</dim>
32189 <dim>20</dim>
32190 <dim>34</dim>
32191 </port>
32192 <port id="1">
32193 <dim>1</dim>
32194 <dim>48</dim>
32195 <dim>1</dim>
32196 <dim>1</dim>
32197 </port>
32198 </input>
32199 <output>
32200 <port id="2" names="mbox_loc1/out/conv" precision="FP16">
32201 <dim>1</dim>
32202 <dim>48</dim>
32203 <dim>20</dim>
32204 <dim>34</dim>
32205 </port>
32206 </output>
32207 </layer>
32208 <layer id="2095" name="12961459" type="Const" version="opset1">
32209 <data element_type="i64" offset="714816" shape="4" size="32"/>
32210 <output>
32211 <port id="0" precision="I64">
32212 <dim>4</dim>
32213 </port>
32214 </output>
32215 </layer>
32216 <layer id="2096" name="mbox_loc1/out/conv/perm" type="Transpose" version="opset1">
32217 <input>
32218 <port id="0">
32219 <dim>1</dim>
32220 <dim>48</dim>
32221 <dim>20</dim>
32222 <dim>34</dim>
32223 </port>
32224 <port id="1">
32225 <dim>4</dim>
32226 </port>
32227 </input>
32228 <output>
32229 <port id="2" names="mbox_loc1/out/conv/perm" precision="FP16">
32230 <dim>1</dim>
32231 <dim>20</dim>
32232 <dim>34</dim>
32233 <dim>48</dim>
32234 </port>
32235 </output>
32236 </layer>
32237 <layer id="2097" name="1303/shapes_concat146120100" type="Const" version="opset1">
32238 <data element_type="i64" offset="714848" shape="2" size="16"/>
32239 <output>
32240 <port id="0" precision="I64">
32241 <dim>2</dim>
32242 </port>
32243 </output>
32244 </layer>
32245 <layer id="2098" name="mbox_loc1/out/conv/flat" type="Reshape" version="opset1">
32246 <data special_zero="true"/>
32247 <input>
32248 <port id="0">
32249 <dim>1</dim>
32250 <dim>20</dim>
32251 <dim>34</dim>
32252 <dim>48</dim>
32253 </port>
32254 <port id="1">
32255 <dim>2</dim>
32256 </port>
32257 </input>
32258 <output>
32259 <port id="2" names="mbox_loc1/out/conv/flat" precision="FP16">
32260 <dim>1</dim>
32261 <dim>32640</dim>
32262 </port>
32263 </output>
32264 </layer>
32265 <layer id="2099" name="811146310416/quantized1261621258" type="Const" version="opset1">
32266 <data element_type="i8" offset="714864" shape="24,256,3,3" size="55296"/>
32267 <output>
32268 <port id="0" precision="I8">
32269 <dim>24</dim>
32270 <dim>256</dim>
32271 <dim>3</dim>
32272 <dim>3</dim>
32273 </port>
32274 </output>
32275 </layer>
32276 <layer id="2100" name="811146310416/quantized/to_f16" type="Convert" version="opset1">
32277 <data destination_type="f16"/>
32278 <input>
32279 <port id="0">
32280 <dim>24</dim>
32281 <dim>256</dim>
32282 <dim>3</dim>
32283 <dim>3</dim>
32284 </port>
32285 </input>
32286 <output>
32287 <port id="1" precision="FP16">
32288 <dim>24</dim>
32289 <dim>256</dim>
32290 <dim>3</dim>
32291 <dim>3</dim>
32292 </port>
32293 </output>
32294 </layer>
32295 <layer id="2101" name="mbox_conf1/out/conv/WithoutBiases/fq_weights_1/zero_point1262920310" type="Const" version="opset1">
32296 <data element_type="f16" offset="770160" shape="24,1,1,1" size="48"/>
32297 <output>
32298 <port id="0" precision="FP16">
32299 <dim>24</dim>
32300 <dim>1</dim>
32301 <dim>1</dim>
32302 <dim>1</dim>
32303 </port>
32304 </output>
32305 </layer>
32306 <layer id="2102" name="mbox_conf1/out/conv/WithoutBiases/fq_weights_1/minus_zp" type="Subtract" version="opset1">
32307 <data auto_broadcast="numpy"/>
32308 <input>
32309 <port id="0">
32310 <dim>24</dim>
32311 <dim>256</dim>
32312 <dim>3</dim>
32313 <dim>3</dim>
32314 </port>
32315 <port id="1">
32316 <dim>24</dim>
32317 <dim>1</dim>
32318 <dim>1</dim>
32319 <dim>1</dim>
32320 </port>
32321 </input>
32322 <output>
32323 <port id="2" precision="FP16">
32324 <dim>24</dim>
32325 <dim>256</dim>
32326 <dim>3</dim>
32327 <dim>3</dim>
32328 </port>
32329 </output>
32330 </layer>
32331 <layer id="2103" name="mbox_conf1/out/conv/WithoutBiases/fq_weights_1/scale1262419446" type="Const" version="opset1">
32332 <data element_type="f16" offset="770208" shape="24,1,1,1" size="48"/>
32333 <output>
32334 <port id="0" precision="FP16">
32335 <dim>24</dim>
32336 <dim>1</dim>
32337 <dim>1</dim>
32338 <dim>1</dim>
32339 </port>
32340 </output>
32341 </layer>
32342 <layer id="2104" name="mbox_conf1/out/conv/WithoutBiases/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
32343 <data auto_broadcast="numpy"/>
32344 <input>
32345 <port id="0">
32346 <dim>24</dim>
32347 <dim>256</dim>
32348 <dim>3</dim>
32349 <dim>3</dim>
32350 </port>
32351 <port id="1">
32352 <dim>24</dim>
32353 <dim>1</dim>
32354 <dim>1</dim>
32355 <dim>1</dim>
32356 </port>
32357 </input>
32358 <output>
32359 <port id="2" precision="FP16">
32360 <dim>24</dim>
32361 <dim>256</dim>
32362 <dim>3</dim>
32363 <dim>3</dim>
32364 </port>
32365 </output>
32366 </layer>
32367 <layer id="2105" name="mbox_conf1/out/conv/WithoutBiases" type="Convolution" version="opset1">
32368 <data auto_pad="explicit" dilations="1,1" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
32369 <input>
32370 <port id="0">
32371 <dim>1</dim>
32372 <dim>256</dim>
32373 <dim>20</dim>
32374 <dim>34</dim>
32375 </port>
32376 <port id="1">
32377 <dim>24</dim>
32378 <dim>256</dim>
32379 <dim>3</dim>
32380 <dim>3</dim>
32381 </port>
32382 </input>
32383 <output>
32384 <port id="2" precision="FP16">
32385 <dim>1</dim>
32386 <dim>24</dim>
32387 <dim>20</dim>
32388 <dim>34</dim>
32389 </port>
32390 </output>
32391 </layer>
32392 <layer id="2106" name="mbox_conf1/out/conv/Dims13825146521111" type="Const" version="opset1">
32393 <data element_type="f16" offset="770256" shape="1,24,1,1" size="48"/>
32394 <output>
32395 <port id="0" precision="FP16">
32396 <dim>1</dim>
32397 <dim>24</dim>
32398 <dim>1</dim>
32399 <dim>1</dim>
32400 </port>
32401 </output>
32402 </layer>
32403 <layer id="2107" name="mbox_conf1/out/conv" type="Add" version="opset1">
32404 <data auto_broadcast="numpy"/>
32405 <input>
32406 <port id="0">
32407 <dim>1</dim>
32408 <dim>24</dim>
32409 <dim>20</dim>
32410 <dim>34</dim>
32411 </port>
32412 <port id="1">
32413 <dim>1</dim>
32414 <dim>24</dim>
32415 <dim>1</dim>
32416 <dim>1</dim>
32417 </port>
32418 </input>
32419 <output>
32420 <port id="2" names="mbox_conf1/out/conv" precision="FP16">
32421 <dim>1</dim>
32422 <dim>24</dim>
32423 <dim>20</dim>
32424 <dim>34</dim>
32425 </port>
32426 </output>
32427 </layer>
32428 <layer id="2108" name="12971467" type="Const" version="opset1">
32429 <data element_type="i64" offset="714816" shape="4" size="32"/>
32430 <output>
32431 <port id="0" precision="I64">
32432 <dim>4</dim>
32433 </port>
32434 </output>
32435 </layer>
32436 <layer id="2109" name="mbox_conf1/out/conv/perm" type="Transpose" version="opset1">
32437 <input>
32438 <port id="0">
32439 <dim>1</dim>
32440 <dim>24</dim>
32441 <dim>20</dim>
32442 <dim>34</dim>
32443 </port>
32444 <port id="1">
32445 <dim>4</dim>
32446 </port>
32447 </input>
32448 <output>
32449 <port id="2" names="mbox_conf1/out/conv/perm" precision="FP16">
32450 <dim>1</dim>
32451 <dim>20</dim>
32452 <dim>34</dim>
32453 <dim>24</dim>
32454 </port>
32455 </output>
32456 </layer>
32457 <layer id="2110" name="1308/shapes_concat146920376" type="Const" version="opset1">
32458 <data element_type="i64" offset="714848" shape="2" size="16"/>
32459 <output>
32460 <port id="0" precision="I64">
32461 <dim>2</dim>
32462 </port>
32463 </output>
32464 </layer>
32465 <layer id="2111" name="mbox_conf1/out/conv/flat" type="Reshape" version="opset1">
32466 <data special_zero="true"/>
32467 <input>
32468 <port id="0">
32469 <dim>1</dim>
32470 <dim>20</dim>
32471 <dim>34</dim>
32472 <dim>24</dim>
32473 </port>
32474 <port id="1">
32475 <dim>2</dim>
32476 </port>
32477 </input>
32478 <output>
32479 <port id="2" names="mbox_conf1/out/conv/flat" precision="FP16">
32480 <dim>1</dim>
32481 <dim>16320</dim>
32482 </port>
32483 </output>
32484 </layer>
32485 <layer id="2112" name="1295147119821" type="Const" version="opset1">
32486 <data element_type="i64" offset="770304" shape="3" size="24"/>
32487 <output>
32488 <port id="0" precision="I64">
32489 <dim>3</dim>
32490 </port>
32491 </output>
32492 </layer>
32493 <layer id="2113" name="mbox_conf1/out/conv/flat/reshape" type="Reshape" version="opset1">
32494 <data special_zero="true"/>
32495 <input>
32496 <port id="0">
32497 <dim>1</dim>
32498 <dim>16320</dim>
32499 </port>
32500 <port id="1">
32501 <dim>3</dim>
32502 </port>
32503 </input>
32504 <output>
32505 <port id="2" names="mbox_conf1/out/conv/flat/reshape" precision="FP16">
32506 <dim>1</dim>
32507 <dim>8160</dim>
32508 <dim>2</dim>
32509 </port>
32510 </output>
32511 </layer>
32512 <layer id="2114" name="mbox_conf1/out/conv/flat/softmax" type="SoftMax" version="opset1">
32513 <data axis="2"/>
32514 <input>
32515 <port id="0">
32516 <dim>1</dim>
32517 <dim>8160</dim>
32518 <dim>2</dim>
32519 </port>
32520 </input>
32521 <output>
32522 <port id="1" names="mbox_conf1/out/conv/flat/softmax" precision="FP16">
32523 <dim>1</dim>
32524 <dim>8160</dim>
32525 <dim>2</dim>
32526 </port>
32527 </output>
32528 </layer>
32529 <layer id="2115" name="1298/shapes_concat147422509" type="Const" version="opset1">
32530 <data element_type="i64" offset="714848" shape="2" size="16"/>
32531 <output>
32532 <port id="0" precision="I64">
32533 <dim>2</dim>
32534 </port>
32535 </output>
32536 </layer>
32537 <layer id="2116" name="mbox_conf1/out/conv/flat/softmax/flat" type="Reshape" version="opset1">
32538 <data special_zero="true"/>
32539 <input>
32540 <port id="0">
32541 <dim>1</dim>
32542 <dim>8160</dim>
32543 <dim>2</dim>
32544 </port>
32545 <port id="1">
32546 <dim>2</dim>
32547 </port>
32548 </input>
32549 <output>
32550 <port id="2" names="mbox_conf1/out/conv/flat/softmax/flat" precision="FP16">
32551 <dim>1</dim>
32552 <dim>16320</dim>
32553 </port>
32554 </output>
32555 </layer>
32556 <layer id="2117" name="mbox1/priorbox/0_port" type="ShapeOf" version="opset3">
32557 <data output_type="i64"/>
32558 <input>
32559 <port id="0">
32560 <dim>1</dim>
32561 <dim>256</dim>
32562 <dim>20</dim>
32563 <dim>34</dim>
32564 </port>
32565 </input>
32566 <output>
32567 <port id="1" precision="I64">
32568 <dim>4</dim>
32569 </port>
32570 </output>
32571 </layer>
32572 <layer id="2118" name="mbox1/priorbox/ss_begin29786405591477" type="Const" version="opset1">
32573 <data element_type="i64" offset="770328" shape="1" size="8"/>
32574 <output>
32575 <port id="0" precision="I64">
32576 <dim>1</dim>
32577 </port>
32578 </output>
32579 </layer>
32580 <layer id="2119" name="mbox1/priorbox/ss_end29787403131478" type="Const" version="opset1">
32581 <data element_type="i64" offset="770336" shape="1" size="8"/>
32582 <output>
32583 <port id="0" precision="I64">
32584 <dim>1</dim>
32585 </port>
32586 </output>
32587 </layer>
32588 <layer id="2120" name="mbox1/priorbox/ss_stride29788398871479" type="Const" version="opset1">
32589 <data element_type="i64" offset="770344" shape="1" size="8"/>
32590 <output>
32591 <port id="0" precision="I64">
32592 <dim>1</dim>
32593 </port>
32594 </output>
32595 </layer>
32596 <layer id="2121" name="mbox1/priorbox/ss_0_port" type="StridedSlice" version="opset1">
32597 <data begin_mask="0" ellipsis_mask="0" end_mask="1" new_axis_mask="0" shrink_axis_mask="0"/>
32598 <input>
32599 <port id="0">
32600 <dim>4</dim>
32601 </port>
32602 <port id="1">
32603 <dim>1</dim>
32604 </port>
32605 <port id="2">
32606 <dim>1</dim>
32607 </port>
32608 <port id="3">
32609 <dim>1</dim>
32610 </port>
32611 </input>
32612 <output>
32613 <port id="4" precision="I64">
32614 <dim>2</dim>
32615 </port>
32616 </output>
32617 </layer>
32618 <layer id="2122" name="mbox1/priorbox/1_port" type="ShapeOf" version="opset3">
32619 <data output_type="i64"/>
32620 <input>
32621 <port id="0">
32622 <dim>1</dim>
32623 <dim>3</dim>
32624 <dim>320</dim>
32625 <dim>544</dim>
32626 </port>
32627 </input>
32628 <output>
32629 <port id="1" precision="I64">
32630 <dim>4</dim>
32631 </port>
32632 </output>
32633 </layer>
32634 <layer id="2123" name="mbox1/priorbox/ss_begin29786405861482" type="Const" version="opset1">
32635 <data element_type="i64" offset="770328" shape="1" size="8"/>
32636 <output>
32637 <port id="0" precision="I64">
32638 <dim>1</dim>
32639 </port>
32640 </output>
32641 </layer>
32642 <layer id="2124" name="mbox1/priorbox/ss_end29787398301483" type="Const" version="opset1">
32643 <data element_type="i64" offset="770336" shape="1" size="8"/>
32644 <output>
32645 <port id="0" precision="I64">
32646 <dim>1</dim>
32647 </port>
32648 </output>
32649 </layer>
32650 <layer id="2125" name="mbox1/priorbox/ss_stride29788405201484" type="Const" version="opset1">
32651 <data element_type="i64" offset="770344" shape="1" size="8"/>
32652 <output>
32653 <port id="0" precision="I64">
32654 <dim>1</dim>
32655 </port>
32656 </output>
32657 </layer>
32658 <layer id="2126" name="mbox1/priorbox/ss_1_port" type="StridedSlice" version="opset1">
32659 <data begin_mask="0" ellipsis_mask="0" end_mask="1" new_axis_mask="0" shrink_axis_mask="0"/>
32660 <input>
32661 <port id="0">
32662 <dim>4</dim>
32663 </port>
32664 <port id="1">
32665 <dim>1</dim>
32666 </port>
32667 <port id="2">
32668 <dim>1</dim>
32669 </port>
32670 <port id="3">
32671 <dim>1</dim>
32672 </port>
32673 </input>
32674 <output>
32675 <port id="4" precision="I64">
32676 <dim>2</dim>
32677 </port>
32678 </output>
32679 </layer>
32680 <layer id="2127" name="mbox1/priorbox/naked_not_unsqueezed" type="PriorBoxClustered" version="opset1">
32681 <data clip="0" height="34.07,47.11,54.22,65.78,75.56,80.89,89.78,99.26,115.56,163.26,194.07,197.33" offset="0.5" step="0" step_h="16" step_w="16" variance="0.1,0.1,0.2,0.2" width="11.33,17,20.68,23.52,28.05,37.4,30.03,35.7,44.2,55.25,78.12,135.15"/>
32682 <input>
32683 <port id="0">
32684 <dim>2</dim>
32685 </port>
32686 <port id="1">
32687 <dim>2</dim>
32688 </port>
32689 </input>
32690 <output>
32691 <port id="2" precision="FP32">
32692 <dim>2</dim>
32693 <dim>32640</dim>
32694 </port>
32695 </output>
32696 </layer>
32697 <layer id="2128" name="mbox1/priorbox/unsqueeze/value2979640538148720766" type="Const" version="opset1">
32698 <data element_type="i64" offset="770352" shape="1" size="8"/>
32699 <output>
32700 <port id="0" precision="I64">
32701 <dim>1</dim>
32702 </port>
32703 </output>
32704 </layer>
32705 <layer id="2129" name="mbox1/priorbox" type="Unsqueeze" version="opset1">
32706 <input>
32707 <port id="0">
32708 <dim>2</dim>
32709 <dim>32640</dim>
32710 </port>
32711 <port id="1">
32712 <dim>1</dim>
32713 </port>
32714 </input>
32715 <output>
32716 <port id="2" names="mbox1/priorbox" precision="FP32">
32717 <dim>1</dim>
32718 <dim>2</dim>
32719 <dim>32640</dim>
32720 </port>
32721 </output>
32722 </layer>
32723 <layer id="2130" name="detection_out" type="DetectionOutput" version="opset1">
32724 <data background_label_id="0" clip_after_nms="false" clip_before_nms="false" code_type="caffe.PriorBoxParameter.CENTER_SIZE" confidence_threshold="0.009999999776482582" decrease_label_id="false" input_height="1" input_width="1" keep_top_k="200" nms_threshold="0.44999998807907104" normalized="true" num_classes="2" objectness_score="0" share_location="true" top_k="400" variance_encoded_in_target="false"/>
32725 <input>
32726 <port id="0">
32727 <dim>1</dim>
32728 <dim>32640</dim>
32729 </port>
32730 <port id="1">
32731 <dim>1</dim>
32732 <dim>16320</dim>
32733 </port>
32734 <port id="2">
32735 <dim>1</dim>
32736 <dim>2</dim>
32737 <dim>32640</dim>
32738 </port>
32739 </input>
32740 <output>
32741 <port id="3" names="detection_out" precision="FP32">
32742 <dim>1</dim>
32743 <dim>1</dim>
32744 <dim>200</dim>
32745 <dim>7</dim>
32746 </port>
32747 </output>
32748 </layer>
32749 <layer id="2131" name="detection_out/sink_port_0" type="Result" version="opset1">
32750 <input>
32751 <port id="0">
32752 <dim>1</dim>
32753 <dim>1</dim>
32754 <dim>200</dim>
32755 <dim>7</dim>
32756 </port>
32757 </input>
32758 </layer>
32759 </layers>
32760 <edges>
32761 <edge from-layer="164" from-port="0" to-layer="166" to-port="0"/>
32762 <edge from-layer="165" from-port="0" to-layer="166" to-port="1"/>
32763 <edge from-layer="166" from-port="2" to-layer="168" to-port="0"/>
32764 <edge from-layer="167" from-port="0" to-layer="168" to-port="1"/>
32765 <edge from-layer="168" from-port="2" to-layer="169" to-port="0"/>
32766 <edge from-layer="160" from-port="0" to-layer="169" to-port="1"/>
32767 <edge from-layer="161" from-port="0" to-layer="169" to-port="2"/>
32768 <edge from-layer="162" from-port="0" to-layer="169" to-port="3"/>
32769 <edge from-layer="163" from-port="0" to-layer="169" to-port="4"/>
32770 <edge from-layer="170" from-port="0" to-layer="171" to-port="0"/>
32771 <edge from-layer="171" from-port="1" to-layer="173" to-port="0"/>
32772 <edge from-layer="172" from-port="0" to-layer="173" to-port="1"/>
32773 <edge from-layer="173" from-port="2" to-layer="175" to-port="0"/>
32774 <edge from-layer="174" from-port="0" to-layer="175" to-port="1"/>
32775 <edge from-layer="169" from-port="5" to-layer="176" to-port="0"/>
32776 <edge from-layer="175" from-port="2" to-layer="176" to-port="1"/>
32777 <edge from-layer="176" from-port="2" to-layer="178" to-port="0"/>
32778 <edge from-layer="177" from-port="0" to-layer="178" to-port="1"/>
32779 <edge from-layer="178" from-port="2" to-layer="179" to-port="0"/>
32780 <edge from-layer="179" from-port="1" to-layer="180" to-port="0"/>
32781 <edge from-layer="156" from-port="0" to-layer="180" to-port="1"/>
32782 <edge from-layer="157" from-port="0" to-layer="180" to-port="2"/>
32783 <edge from-layer="158" from-port="0" to-layer="180" to-port="3"/>
32784 <edge from-layer="159" from-port="0" to-layer="180" to-port="4"/>
32785 <edge from-layer="193" from-port="0" to-layer="194" to-port="0"/>
32786 <edge from-layer="194" from-port="1" to-layer="196" to-port="0"/>
32787 <edge from-layer="195" from-port="0" to-layer="196" to-port="1"/>
32788 <edge from-layer="196" from-port="2" to-layer="198" to-port="0"/>
32789 <edge from-layer="197" from-port="0" to-layer="198" to-port="1"/>
32790 <edge from-layer="180" from-port="5" to-layer="199" to-port="0"/>
32791 <edge from-layer="198" from-port="2" to-layer="199" to-port="1"/>
32792 <edge from-layer="199" from-port="2" to-layer="201" to-port="0"/>
32793 <edge from-layer="200" from-port="0" to-layer="201" to-port="1"/>
32794 <edge from-layer="201" from-port="2" to-layer="203" to-port="0"/>
32795 <edge from-layer="202" from-port="0" to-layer="203" to-port="1"/>
32796 <edge from-layer="203" from-port="2" to-layer="204" to-port="0"/>
32797 <edge from-layer="189" from-port="0" to-layer="204" to-port="1"/>
32798 <edge from-layer="190" from-port="0" to-layer="204" to-port="2"/>
32799 <edge from-layer="191" from-port="0" to-layer="204" to-port="3"/>
32800 <edge from-layer="192" from-port="0" to-layer="204" to-port="4"/>
32801 <edge from-layer="206" from-port="0" to-layer="207" to-port="0"/>
32802 <edge from-layer="207" from-port="1" to-layer="209" to-port="0"/>
32803 <edge from-layer="208" from-port="0" to-layer="209" to-port="1"/>
32804 <edge from-layer="209" from-port="2" to-layer="211" to-port="0"/>
32805 <edge from-layer="210" from-port="0" to-layer="211" to-port="1"/>
32806 <edge from-layer="211" from-port="2" to-layer="212" to-port="0"/>
32807 <edge from-layer="205" from-port="0" to-layer="212" to-port="1"/>
32808 <edge from-layer="204" from-port="5" to-layer="213" to-port="0"/>
32809 <edge from-layer="212" from-port="2" to-layer="213" to-port="1"/>
32810 <edge from-layer="213" from-port="2" to-layer="215" to-port="0"/>
32811 <edge from-layer="214" from-port="0" to-layer="215" to-port="1"/>
32812 <edge from-layer="215" from-port="2" to-layer="217" to-port="0"/>
32813 <edge from-layer="216" from-port="0" to-layer="217" to-port="1"/>
32814 <edge from-layer="217" from-port="2" to-layer="218" to-port="0"/>
32815 <edge from-layer="185" from-port="0" to-layer="218" to-port="1"/>
32816 <edge from-layer="186" from-port="0" to-layer="218" to-port="2"/>
32817 <edge from-layer="187" from-port="0" to-layer="218" to-port="3"/>
32818 <edge from-layer="188" from-port="0" to-layer="218" to-port="4"/>
32819 <edge from-layer="219" from-port="0" to-layer="220" to-port="0"/>
32820 <edge from-layer="220" from-port="1" to-layer="222" to-port="0"/>
32821 <edge from-layer="221" from-port="0" to-layer="222" to-port="1"/>
32822 <edge from-layer="222" from-port="2" to-layer="224" to-port="0"/>
32823 <edge from-layer="223" from-port="0" to-layer="224" to-port="1"/>
32824 <edge from-layer="218" from-port="5" to-layer="225" to-port="0"/>
32825 <edge from-layer="224" from-port="2" to-layer="225" to-port="1"/>
32826 <edge from-layer="225" from-port="2" to-layer="227" to-port="0"/>
32827 <edge from-layer="226" from-port="0" to-layer="227" to-port="1"/>
32828 <edge from-layer="227" from-port="2" to-layer="228" to-port="0"/>
32829 <edge from-layer="181" from-port="0" to-layer="228" to-port="1"/>
32830 <edge from-layer="182" from-port="0" to-layer="228" to-port="2"/>
32831 <edge from-layer="183" from-port="0" to-layer="228" to-port="3"/>
32832 <edge from-layer="184" from-port="0" to-layer="228" to-port="4"/>
32833 <edge from-layer="180" from-port="5" to-layer="229" to-port="0"/>
32834 <edge from-layer="228" from-port="5" to-layer="229" to-port="1"/>
32835 <edge from-layer="229" from-port="2" to-layer="231" to-port="0"/>
32836 <edge from-layer="230" from-port="0" to-layer="231" to-port="1"/>
32837 <edge from-layer="231" from-port="2" to-layer="232" to-port="0"/>
32838 <edge from-layer="152" from-port="0" to-layer="232" to-port="1"/>
32839 <edge from-layer="153" from-port="0" to-layer="232" to-port="2"/>
32840 <edge from-layer="154" from-port="0" to-layer="232" to-port="3"/>
32841 <edge from-layer="155" from-port="0" to-layer="232" to-port="4"/>
32842 <edge from-layer="245" from-port="0" to-layer="246" to-port="0"/>
32843 <edge from-layer="246" from-port="1" to-layer="248" to-port="0"/>
32844 <edge from-layer="247" from-port="0" to-layer="248" to-port="1"/>
32845 <edge from-layer="248" from-port="2" to-layer="250" to-port="0"/>
32846 <edge from-layer="249" from-port="0" to-layer="250" to-port="1"/>
32847 <edge from-layer="232" from-port="5" to-layer="251" to-port="0"/>
32848 <edge from-layer="250" from-port="2" to-layer="251" to-port="1"/>
32849 <edge from-layer="251" from-port="2" to-layer="253" to-port="0"/>
32850 <edge from-layer="252" from-port="0" to-layer="253" to-port="1"/>
32851 <edge from-layer="253" from-port="2" to-layer="255" to-port="0"/>
32852 <edge from-layer="254" from-port="0" to-layer="255" to-port="1"/>
32853 <edge from-layer="255" from-port="2" to-layer="256" to-port="0"/>
32854 <edge from-layer="241" from-port="0" to-layer="256" to-port="1"/>
32855 <edge from-layer="242" from-port="0" to-layer="256" to-port="2"/>
32856 <edge from-layer="243" from-port="0" to-layer="256" to-port="3"/>
32857 <edge from-layer="244" from-port="0" to-layer="256" to-port="4"/>
32858 <edge from-layer="258" from-port="0" to-layer="259" to-port="0"/>
32859 <edge from-layer="259" from-port="1" to-layer="261" to-port="0"/>
32860 <edge from-layer="260" from-port="0" to-layer="261" to-port="1"/>
32861 <edge from-layer="261" from-port="2" to-layer="263" to-port="0"/>
32862 <edge from-layer="262" from-port="0" to-layer="263" to-port="1"/>
32863 <edge from-layer="263" from-port="2" to-layer="264" to-port="0"/>
32864 <edge from-layer="257" from-port="0" to-layer="264" to-port="1"/>
32865 <edge from-layer="256" from-port="5" to-layer="265" to-port="0"/>
32866 <edge from-layer="264" from-port="2" to-layer="265" to-port="1"/>
32867 <edge from-layer="265" from-port="2" to-layer="267" to-port="0"/>
32868 <edge from-layer="266" from-port="0" to-layer="267" to-port="1"/>
32869 <edge from-layer="267" from-port="2" to-layer="269" to-port="0"/>
32870 <edge from-layer="268" from-port="0" to-layer="269" to-port="1"/>
32871 <edge from-layer="269" from-port="2" to-layer="270" to-port="0"/>
32872 <edge from-layer="237" from-port="0" to-layer="270" to-port="1"/>
32873 <edge from-layer="238" from-port="0" to-layer="270" to-port="2"/>
32874 <edge from-layer="239" from-port="0" to-layer="270" to-port="3"/>
32875 <edge from-layer="240" from-port="0" to-layer="270" to-port="4"/>
32876 <edge from-layer="271" from-port="0" to-layer="272" to-port="0"/>
32877 <edge from-layer="272" from-port="1" to-layer="274" to-port="0"/>
32878 <edge from-layer="273" from-port="0" to-layer="274" to-port="1"/>
32879 <edge from-layer="274" from-port="2" to-layer="276" to-port="0"/>
32880 <edge from-layer="275" from-port="0" to-layer="276" to-port="1"/>
32881 <edge from-layer="270" from-port="5" to-layer="277" to-port="0"/>
32882 <edge from-layer="276" from-port="2" to-layer="277" to-port="1"/>
32883 <edge from-layer="277" from-port="2" to-layer="279" to-port="0"/>
32884 <edge from-layer="278" from-port="0" to-layer="279" to-port="1"/>
32885 <edge from-layer="279" from-port="2" to-layer="280" to-port="0"/>
32886 <edge from-layer="233" from-port="0" to-layer="280" to-port="1"/>
32887 <edge from-layer="234" from-port="0" to-layer="280" to-port="2"/>
32888 <edge from-layer="235" from-port="0" to-layer="280" to-port="3"/>
32889 <edge from-layer="236" from-port="0" to-layer="280" to-port="4"/>
32890 <edge from-layer="232" from-port="5" to-layer="281" to-port="0"/>
32891 <edge from-layer="280" from-port="5" to-layer="281" to-port="1"/>
32892 <edge from-layer="281" from-port="2" to-layer="283" to-port="0"/>
32893 <edge from-layer="282" from-port="0" to-layer="283" to-port="1"/>
32894 <edge from-layer="283" from-port="2" to-layer="284" to-port="0"/>
32895 <edge from-layer="148" from-port="0" to-layer="284" to-port="1"/>
32896 <edge from-layer="149" from-port="0" to-layer="284" to-port="2"/>
32897 <edge from-layer="150" from-port="0" to-layer="284" to-port="3"/>
32898 <edge from-layer="151" from-port="0" to-layer="284" to-port="4"/>
32899 <edge from-layer="297" from-port="0" to-layer="298" to-port="0"/>
32900 <edge from-layer="298" from-port="1" to-layer="300" to-port="0"/>
32901 <edge from-layer="299" from-port="0" to-layer="300" to-port="1"/>
32902 <edge from-layer="300" from-port="2" to-layer="302" to-port="0"/>
32903 <edge from-layer="301" from-port="0" to-layer="302" to-port="1"/>
32904 <edge from-layer="284" from-port="5" to-layer="303" to-port="0"/>
32905 <edge from-layer="302" from-port="2" to-layer="303" to-port="1"/>
32906 <edge from-layer="303" from-port="2" to-layer="305" to-port="0"/>
32907 <edge from-layer="304" from-port="0" to-layer="305" to-port="1"/>
32908 <edge from-layer="305" from-port="2" to-layer="307" to-port="0"/>
32909 <edge from-layer="306" from-port="0" to-layer="307" to-port="1"/>
32910 <edge from-layer="307" from-port="2" to-layer="308" to-port="0"/>
32911 <edge from-layer="293" from-port="0" to-layer="308" to-port="1"/>
32912 <edge from-layer="294" from-port="0" to-layer="308" to-port="2"/>
32913 <edge from-layer="295" from-port="0" to-layer="308" to-port="3"/>
32914 <edge from-layer="296" from-port="0" to-layer="308" to-port="4"/>
32915 <edge from-layer="310" from-port="0" to-layer="311" to-port="0"/>
32916 <edge from-layer="311" from-port="1" to-layer="313" to-port="0"/>
32917 <edge from-layer="312" from-port="0" to-layer="313" to-port="1"/>
32918 <edge from-layer="313" from-port="2" to-layer="315" to-port="0"/>
32919 <edge from-layer="314" from-port="0" to-layer="315" to-port="1"/>
32920 <edge from-layer="315" from-port="2" to-layer="316" to-port="0"/>
32921 <edge from-layer="309" from-port="0" to-layer="316" to-port="1"/>
32922 <edge from-layer="308" from-port="5" to-layer="317" to-port="0"/>
32923 <edge from-layer="316" from-port="2" to-layer="317" to-port="1"/>
32924 <edge from-layer="317" from-port="2" to-layer="319" to-port="0"/>
32925 <edge from-layer="318" from-port="0" to-layer="319" to-port="1"/>
32926 <edge from-layer="319" from-port="2" to-layer="321" to-port="0"/>
32927 <edge from-layer="320" from-port="0" to-layer="321" to-port="1"/>
32928 <edge from-layer="321" from-port="2" to-layer="322" to-port="0"/>
32929 <edge from-layer="289" from-port="0" to-layer="322" to-port="1"/>
32930 <edge from-layer="290" from-port="0" to-layer="322" to-port="2"/>
32931 <edge from-layer="291" from-port="0" to-layer="322" to-port="3"/>
32932 <edge from-layer="292" from-port="0" to-layer="322" to-port="4"/>
32933 <edge from-layer="323" from-port="0" to-layer="324" to-port="0"/>
32934 <edge from-layer="324" from-port="1" to-layer="326" to-port="0"/>
32935 <edge from-layer="325" from-port="0" to-layer="326" to-port="1"/>
32936 <edge from-layer="326" from-port="2" to-layer="328" to-port="0"/>
32937 <edge from-layer="327" from-port="0" to-layer="328" to-port="1"/>
32938 <edge from-layer="322" from-port="5" to-layer="329" to-port="0"/>
32939 <edge from-layer="328" from-port="2" to-layer="329" to-port="1"/>
32940 <edge from-layer="329" from-port="2" to-layer="331" to-port="0"/>
32941 <edge from-layer="330" from-port="0" to-layer="331" to-port="1"/>
32942 <edge from-layer="331" from-port="2" to-layer="332" to-port="0"/>
32943 <edge from-layer="285" from-port="0" to-layer="332" to-port="1"/>
32944 <edge from-layer="286" from-port="0" to-layer="332" to-port="2"/>
32945 <edge from-layer="287" from-port="0" to-layer="332" to-port="3"/>
32946 <edge from-layer="288" from-port="0" to-layer="332" to-port="4"/>
32947 <edge from-layer="284" from-port="5" to-layer="333" to-port="0"/>
32948 <edge from-layer="332" from-port="5" to-layer="333" to-port="1"/>
32949 <edge from-layer="333" from-port="2" to-layer="335" to-port="0"/>
32950 <edge from-layer="334" from-port="0" to-layer="335" to-port="1"/>
32951 <edge from-layer="335" from-port="2" to-layer="336" to-port="0"/>
32952 <edge from-layer="144" from-port="0" to-layer="336" to-port="1"/>
32953 <edge from-layer="145" from-port="0" to-layer="336" to-port="2"/>
32954 <edge from-layer="146" from-port="0" to-layer="336" to-port="3"/>
32955 <edge from-layer="147" from-port="0" to-layer="336" to-port="4"/>
32956 <edge from-layer="349" from-port="0" to-layer="350" to-port="0"/>
32957 <edge from-layer="350" from-port="1" to-layer="352" to-port="0"/>
32958 <edge from-layer="351" from-port="0" to-layer="352" to-port="1"/>
32959 <edge from-layer="352" from-port="2" to-layer="354" to-port="0"/>
32960 <edge from-layer="353" from-port="0" to-layer="354" to-port="1"/>
32961 <edge from-layer="336" from-port="5" to-layer="355" to-port="0"/>
32962 <edge from-layer="354" from-port="2" to-layer="355" to-port="1"/>
32963 <edge from-layer="355" from-port="2" to-layer="357" to-port="0"/>
32964 <edge from-layer="356" from-port="0" to-layer="357" to-port="1"/>
32965 <edge from-layer="357" from-port="2" to-layer="359" to-port="0"/>
32966 <edge from-layer="358" from-port="0" to-layer="359" to-port="1"/>
32967 <edge from-layer="359" from-port="2" to-layer="360" to-port="0"/>
32968 <edge from-layer="345" from-port="0" to-layer="360" to-port="1"/>
32969 <edge from-layer="346" from-port="0" to-layer="360" to-port="2"/>
32970 <edge from-layer="347" from-port="0" to-layer="360" to-port="3"/>
32971 <edge from-layer="348" from-port="0" to-layer="360" to-port="4"/>
32972 <edge from-layer="362" from-port="0" to-layer="363" to-port="0"/>
32973 <edge from-layer="363" from-port="1" to-layer="365" to-port="0"/>
32974 <edge from-layer="364" from-port="0" to-layer="365" to-port="1"/>
32975 <edge from-layer="365" from-port="2" to-layer="367" to-port="0"/>
32976 <edge from-layer="366" from-port="0" to-layer="367" to-port="1"/>
32977 <edge from-layer="367" from-port="2" to-layer="368" to-port="0"/>
32978 <edge from-layer="361" from-port="0" to-layer="368" to-port="1"/>
32979 <edge from-layer="360" from-port="5" to-layer="369" to-port="0"/>
32980 <edge from-layer="368" from-port="2" to-layer="369" to-port="1"/>
32981 <edge from-layer="369" from-port="2" to-layer="371" to-port="0"/>
32982 <edge from-layer="370" from-port="0" to-layer="371" to-port="1"/>
32983 <edge from-layer="371" from-port="2" to-layer="373" to-port="0"/>
32984 <edge from-layer="372" from-port="0" to-layer="373" to-port="1"/>
32985 <edge from-layer="373" from-port="2" to-layer="374" to-port="0"/>
32986 <edge from-layer="341" from-port="0" to-layer="374" to-port="1"/>
32987 <edge from-layer="342" from-port="0" to-layer="374" to-port="2"/>
32988 <edge from-layer="343" from-port="0" to-layer="374" to-port="3"/>
32989 <edge from-layer="344" from-port="0" to-layer="374" to-port="4"/>
32990 <edge from-layer="375" from-port="0" to-layer="376" to-port="0"/>
32991 <edge from-layer="376" from-port="1" to-layer="378" to-port="0"/>
32992 <edge from-layer="377" from-port="0" to-layer="378" to-port="1"/>
32993 <edge from-layer="378" from-port="2" to-layer="380" to-port="0"/>
32994 <edge from-layer="379" from-port="0" to-layer="380" to-port="1"/>
32995 <edge from-layer="374" from-port="5" to-layer="381" to-port="0"/>
32996 <edge from-layer="380" from-port="2" to-layer="381" to-port="1"/>
32997 <edge from-layer="381" from-port="2" to-layer="383" to-port="0"/>
32998 <edge from-layer="382" from-port="0" to-layer="383" to-port="1"/>
32999 <edge from-layer="383" from-port="2" to-layer="384" to-port="0"/>
33000 <edge from-layer="337" from-port="0" to-layer="384" to-port="1"/>
33001 <edge from-layer="338" from-port="0" to-layer="384" to-port="2"/>
33002 <edge from-layer="339" from-port="0" to-layer="384" to-port="3"/>
33003 <edge from-layer="340" from-port="0" to-layer="384" to-port="4"/>
33004 <edge from-layer="336" from-port="5" to-layer="385" to-port="0"/>
33005 <edge from-layer="384" from-port="5" to-layer="385" to-port="1"/>
33006 <edge from-layer="385" from-port="2" to-layer="387" to-port="0"/>
33007 <edge from-layer="386" from-port="0" to-layer="387" to-port="1"/>
33008 <edge from-layer="387" from-port="2" to-layer="388" to-port="0"/>
33009 <edge from-layer="140" from-port="0" to-layer="388" to-port="1"/>
33010 <edge from-layer="141" from-port="0" to-layer="388" to-port="2"/>
33011 <edge from-layer="142" from-port="0" to-layer="388" to-port="3"/>
33012 <edge from-layer="143" from-port="0" to-layer="388" to-port="4"/>
33013 <edge from-layer="388" from-port="5" to-layer="389" to-port="0"/>
33014 <edge from-layer="390" from-port="0" to-layer="391" to-port="0"/>
33015 <edge from-layer="391" from-port="1" to-layer="393" to-port="0"/>
33016 <edge from-layer="392" from-port="0" to-layer="393" to-port="1"/>
33017 <edge from-layer="393" from-port="2" to-layer="395" to-port="0"/>
33018 <edge from-layer="394" from-port="0" to-layer="395" to-port="1"/>
33019 <edge from-layer="389" from-port="1" to-layer="396" to-port="0"/>
33020 <edge from-layer="395" from-port="2" to-layer="396" to-port="1"/>
33021 <edge from-layer="396" from-port="2" to-layer="398" to-port="0"/>
33022 <edge from-layer="397" from-port="0" to-layer="398" to-port="1"/>
33023 <edge from-layer="398" from-port="2" to-layer="399" to-port="0"/>
33024 <edge from-layer="136" from-port="0" to-layer="399" to-port="1"/>
33025 <edge from-layer="137" from-port="0" to-layer="399" to-port="2"/>
33026 <edge from-layer="138" from-port="0" to-layer="399" to-port="3"/>
33027 <edge from-layer="139" from-port="0" to-layer="399" to-port="4"/>
33028 <edge from-layer="412" from-port="0" to-layer="413" to-port="0"/>
33029 <edge from-layer="413" from-port="1" to-layer="415" to-port="0"/>
33030 <edge from-layer="414" from-port="0" to-layer="415" to-port="1"/>
33031 <edge from-layer="415" from-port="2" to-layer="417" to-port="0"/>
33032 <edge from-layer="416" from-port="0" to-layer="417" to-port="1"/>
33033 <edge from-layer="388" from-port="5" to-layer="418" to-port="0"/>
33034 <edge from-layer="417" from-port="2" to-layer="418" to-port="1"/>
33035 <edge from-layer="418" from-port="2" to-layer="420" to-port="0"/>
33036 <edge from-layer="419" from-port="0" to-layer="420" to-port="1"/>
33037 <edge from-layer="420" from-port="2" to-layer="422" to-port="0"/>
33038 <edge from-layer="421" from-port="0" to-layer="422" to-port="1"/>
33039 <edge from-layer="422" from-port="2" to-layer="423" to-port="0"/>
33040 <edge from-layer="408" from-port="0" to-layer="423" to-port="1"/>
33041 <edge from-layer="409" from-port="0" to-layer="423" to-port="2"/>
33042 <edge from-layer="410" from-port="0" to-layer="423" to-port="3"/>
33043 <edge from-layer="411" from-port="0" to-layer="423" to-port="4"/>
33044 <edge from-layer="425" from-port="0" to-layer="426" to-port="0"/>
33045 <edge from-layer="426" from-port="1" to-layer="428" to-port="0"/>
33046 <edge from-layer="427" from-port="0" to-layer="428" to-port="1"/>
33047 <edge from-layer="428" from-port="2" to-layer="430" to-port="0"/>
33048 <edge from-layer="429" from-port="0" to-layer="430" to-port="1"/>
33049 <edge from-layer="430" from-port="2" to-layer="431" to-port="0"/>
33050 <edge from-layer="424" from-port="0" to-layer="431" to-port="1"/>
33051 <edge from-layer="423" from-port="5" to-layer="432" to-port="0"/>
33052 <edge from-layer="431" from-port="2" to-layer="432" to-port="1"/>
33053 <edge from-layer="432" from-port="2" to-layer="434" to-port="0"/>
33054 <edge from-layer="433" from-port="0" to-layer="434" to-port="1"/>
33055 <edge from-layer="434" from-port="2" to-layer="436" to-port="0"/>
33056 <edge from-layer="435" from-port="0" to-layer="436" to-port="1"/>
33057 <edge from-layer="436" from-port="2" to-layer="437" to-port="0"/>
33058 <edge from-layer="404" from-port="0" to-layer="437" to-port="1"/>
33059 <edge from-layer="405" from-port="0" to-layer="437" to-port="2"/>
33060 <edge from-layer="406" from-port="0" to-layer="437" to-port="3"/>
33061 <edge from-layer="407" from-port="0" to-layer="437" to-port="4"/>
33062 <edge from-layer="438" from-port="0" to-layer="439" to-port="0"/>
33063 <edge from-layer="439" from-port="1" to-layer="441" to-port="0"/>
33064 <edge from-layer="440" from-port="0" to-layer="441" to-port="1"/>
33065 <edge from-layer="441" from-port="2" to-layer="443" to-port="0"/>
33066 <edge from-layer="442" from-port="0" to-layer="443" to-port="1"/>
33067 <edge from-layer="437" from-port="5" to-layer="444" to-port="0"/>
33068 <edge from-layer="443" from-port="2" to-layer="444" to-port="1"/>
33069 <edge from-layer="444" from-port="2" to-layer="446" to-port="0"/>
33070 <edge from-layer="445" from-port="0" to-layer="446" to-port="1"/>
33071 <edge from-layer="446" from-port="2" to-layer="447" to-port="0"/>
33072 <edge from-layer="400" from-port="0" to-layer="447" to-port="1"/>
33073 <edge from-layer="401" from-port="0" to-layer="447" to-port="2"/>
33074 <edge from-layer="402" from-port="0" to-layer="447" to-port="3"/>
33075 <edge from-layer="403" from-port="0" to-layer="447" to-port="4"/>
33076 <edge from-layer="399" from-port="5" to-layer="448" to-port="0"/>
33077 <edge from-layer="447" from-port="5" to-layer="448" to-port="1"/>
33078 <edge from-layer="448" from-port="2" to-layer="450" to-port="0"/>
33079 <edge from-layer="449" from-port="0" to-layer="450" to-port="1"/>
33080 <edge from-layer="450" from-port="2" to-layer="451" to-port="0"/>
33081 <edge from-layer="132" from-port="0" to-layer="451" to-port="1"/>
33082 <edge from-layer="133" from-port="0" to-layer="451" to-port="2"/>
33083 <edge from-layer="134" from-port="0" to-layer="451" to-port="3"/>
33084 <edge from-layer="135" from-port="0" to-layer="451" to-port="4"/>
33085 <edge from-layer="464" from-port="0" to-layer="465" to-port="0"/>
33086 <edge from-layer="465" from-port="1" to-layer="467" to-port="0"/>
33087 <edge from-layer="466" from-port="0" to-layer="467" to-port="1"/>
33088 <edge from-layer="467" from-port="2" to-layer="469" to-port="0"/>
33089 <edge from-layer="468" from-port="0" to-layer="469" to-port="1"/>
33090 <edge from-layer="451" from-port="5" to-layer="470" to-port="0"/>
33091 <edge from-layer="469" from-port="2" to-layer="470" to-port="1"/>
33092 <edge from-layer="470" from-port="2" to-layer="472" to-port="0"/>
33093 <edge from-layer="471" from-port="0" to-layer="472" to-port="1"/>
33094 <edge from-layer="472" from-port="2" to-layer="474" to-port="0"/>
33095 <edge from-layer="473" from-port="0" to-layer="474" to-port="1"/>
33096 <edge from-layer="474" from-port="2" to-layer="475" to-port="0"/>
33097 <edge from-layer="460" from-port="0" to-layer="475" to-port="1"/>
33098 <edge from-layer="461" from-port="0" to-layer="475" to-port="2"/>
33099 <edge from-layer="462" from-port="0" to-layer="475" to-port="3"/>
33100 <edge from-layer="463" from-port="0" to-layer="475" to-port="4"/>
33101 <edge from-layer="477" from-port="0" to-layer="478" to-port="0"/>
33102 <edge from-layer="478" from-port="1" to-layer="480" to-port="0"/>
33103 <edge from-layer="479" from-port="0" to-layer="480" to-port="1"/>
33104 <edge from-layer="480" from-port="2" to-layer="482" to-port="0"/>
33105 <edge from-layer="481" from-port="0" to-layer="482" to-port="1"/>
33106 <edge from-layer="482" from-port="2" to-layer="483" to-port="0"/>
33107 <edge from-layer="476" from-port="0" to-layer="483" to-port="1"/>
33108 <edge from-layer="475" from-port="5" to-layer="484" to-port="0"/>
33109 <edge from-layer="483" from-port="2" to-layer="484" to-port="1"/>
33110 <edge from-layer="484" from-port="2" to-layer="486" to-port="0"/>
33111 <edge from-layer="485" from-port="0" to-layer="486" to-port="1"/>
33112 <edge from-layer="486" from-port="2" to-layer="488" to-port="0"/>
33113 <edge from-layer="487" from-port="0" to-layer="488" to-port="1"/>
33114 <edge from-layer="488" from-port="2" to-layer="489" to-port="0"/>
33115 <edge from-layer="456" from-port="0" to-layer="489" to-port="1"/>
33116 <edge from-layer="457" from-port="0" to-layer="489" to-port="2"/>
33117 <edge from-layer="458" from-port="0" to-layer="489" to-port="3"/>
33118 <edge from-layer="459" from-port="0" to-layer="489" to-port="4"/>
33119 <edge from-layer="490" from-port="0" to-layer="491" to-port="0"/>
33120 <edge from-layer="491" from-port="1" to-layer="493" to-port="0"/>
33121 <edge from-layer="492" from-port="0" to-layer="493" to-port="1"/>
33122 <edge from-layer="493" from-port="2" to-layer="495" to-port="0"/>
33123 <edge from-layer="494" from-port="0" to-layer="495" to-port="1"/>
33124 <edge from-layer="489" from-port="5" to-layer="496" to-port="0"/>
33125 <edge from-layer="495" from-port="2" to-layer="496" to-port="1"/>
33126 <edge from-layer="496" from-port="2" to-layer="498" to-port="0"/>
33127 <edge from-layer="497" from-port="0" to-layer="498" to-port="1"/>
33128 <edge from-layer="498" from-port="2" to-layer="499" to-port="0"/>
33129 <edge from-layer="452" from-port="0" to-layer="499" to-port="1"/>
33130 <edge from-layer="453" from-port="0" to-layer="499" to-port="2"/>
33131 <edge from-layer="454" from-port="0" to-layer="499" to-port="3"/>
33132 <edge from-layer="455" from-port="0" to-layer="499" to-port="4"/>
33133 <edge from-layer="451" from-port="5" to-layer="500" to-port="0"/>
33134 <edge from-layer="499" from-port="5" to-layer="500" to-port="1"/>
33135 <edge from-layer="500" from-port="2" to-layer="502" to-port="0"/>
33136 <edge from-layer="501" from-port="0" to-layer="502" to-port="1"/>
33137 <edge from-layer="502" from-port="2" to-layer="503" to-port="0"/>
33138 <edge from-layer="128" from-port="0" to-layer="503" to-port="1"/>
33139 <edge from-layer="129" from-port="0" to-layer="503" to-port="2"/>
33140 <edge from-layer="130" from-port="0" to-layer="503" to-port="3"/>
33141 <edge from-layer="131" from-port="0" to-layer="503" to-port="4"/>
33142 <edge from-layer="516" from-port="0" to-layer="517" to-port="0"/>
33143 <edge from-layer="517" from-port="1" to-layer="519" to-port="0"/>
33144 <edge from-layer="518" from-port="0" to-layer="519" to-port="1"/>
33145 <edge from-layer="519" from-port="2" to-layer="521" to-port="0"/>
33146 <edge from-layer="520" from-port="0" to-layer="521" to-port="1"/>
33147 <edge from-layer="503" from-port="5" to-layer="522" to-port="0"/>
33148 <edge from-layer="521" from-port="2" to-layer="522" to-port="1"/>
33149 <edge from-layer="522" from-port="2" to-layer="524" to-port="0"/>
33150 <edge from-layer="523" from-port="0" to-layer="524" to-port="1"/>
33151 <edge from-layer="524" from-port="2" to-layer="526" to-port="0"/>
33152 <edge from-layer="525" from-port="0" to-layer="526" to-port="1"/>
33153 <edge from-layer="526" from-port="2" to-layer="527" to-port="0"/>
33154 <edge from-layer="512" from-port="0" to-layer="527" to-port="1"/>
33155 <edge from-layer="513" from-port="0" to-layer="527" to-port="2"/>
33156 <edge from-layer="514" from-port="0" to-layer="527" to-port="3"/>
33157 <edge from-layer="515" from-port="0" to-layer="527" to-port="4"/>
33158 <edge from-layer="529" from-port="0" to-layer="530" to-port="0"/>
33159 <edge from-layer="530" from-port="1" to-layer="532" to-port="0"/>
33160 <edge from-layer="531" from-port="0" to-layer="532" to-port="1"/>
33161 <edge from-layer="532" from-port="2" to-layer="534" to-port="0"/>
33162 <edge from-layer="533" from-port="0" to-layer="534" to-port="1"/>
33163 <edge from-layer="534" from-port="2" to-layer="535" to-port="0"/>
33164 <edge from-layer="528" from-port="0" to-layer="535" to-port="1"/>
33165 <edge from-layer="527" from-port="5" to-layer="536" to-port="0"/>
33166 <edge from-layer="535" from-port="2" to-layer="536" to-port="1"/>
33167 <edge from-layer="536" from-port="2" to-layer="538" to-port="0"/>
33168 <edge from-layer="537" from-port="0" to-layer="538" to-port="1"/>
33169 <edge from-layer="538" from-port="2" to-layer="540" to-port="0"/>
33170 <edge from-layer="539" from-port="0" to-layer="540" to-port="1"/>
33171 <edge from-layer="540" from-port="2" to-layer="541" to-port="0"/>
33172 <edge from-layer="508" from-port="0" to-layer="541" to-port="1"/>
33173 <edge from-layer="509" from-port="0" to-layer="541" to-port="2"/>
33174 <edge from-layer="510" from-port="0" to-layer="541" to-port="3"/>
33175 <edge from-layer="511" from-port="0" to-layer="541" to-port="4"/>
33176 <edge from-layer="542" from-port="0" to-layer="543" to-port="0"/>
33177 <edge from-layer="543" from-port="1" to-layer="545" to-port="0"/>
33178 <edge from-layer="544" from-port="0" to-layer="545" to-port="1"/>
33179 <edge from-layer="545" from-port="2" to-layer="547" to-port="0"/>
33180 <edge from-layer="546" from-port="0" to-layer="547" to-port="1"/>
33181 <edge from-layer="541" from-port="5" to-layer="548" to-port="0"/>
33182 <edge from-layer="547" from-port="2" to-layer="548" to-port="1"/>
33183 <edge from-layer="548" from-port="2" to-layer="550" to-port="0"/>
33184 <edge from-layer="549" from-port="0" to-layer="550" to-port="1"/>
33185 <edge from-layer="550" from-port="2" to-layer="551" to-port="0"/>
33186 <edge from-layer="504" from-port="0" to-layer="551" to-port="1"/>
33187 <edge from-layer="505" from-port="0" to-layer="551" to-port="2"/>
33188 <edge from-layer="506" from-port="0" to-layer="551" to-port="3"/>
33189 <edge from-layer="507" from-port="0" to-layer="551" to-port="4"/>
33190 <edge from-layer="503" from-port="5" to-layer="552" to-port="0"/>
33191 <edge from-layer="551" from-port="5" to-layer="552" to-port="1"/>
33192 <edge from-layer="552" from-port="2" to-layer="554" to-port="0"/>
33193 <edge from-layer="553" from-port="0" to-layer="554" to-port="1"/>
33194 <edge from-layer="554" from-port="2" to-layer="555" to-port="0"/>
33195 <edge from-layer="124" from-port="0" to-layer="555" to-port="1"/>
33196 <edge from-layer="125" from-port="0" to-layer="555" to-port="2"/>
33197 <edge from-layer="126" from-port="0" to-layer="555" to-port="3"/>
33198 <edge from-layer="127" from-port="0" to-layer="555" to-port="4"/>
33199 <edge from-layer="568" from-port="0" to-layer="569" to-port="0"/>
33200 <edge from-layer="569" from-port="1" to-layer="571" to-port="0"/>
33201 <edge from-layer="570" from-port="0" to-layer="571" to-port="1"/>
33202 <edge from-layer="571" from-port="2" to-layer="573" to-port="0"/>
33203 <edge from-layer="572" from-port="0" to-layer="573" to-port="1"/>
33204 <edge from-layer="555" from-port="5" to-layer="574" to-port="0"/>
33205 <edge from-layer="573" from-port="2" to-layer="574" to-port="1"/>
33206 <edge from-layer="574" from-port="2" to-layer="576" to-port="0"/>
33207 <edge from-layer="575" from-port="0" to-layer="576" to-port="1"/>
33208 <edge from-layer="576" from-port="2" to-layer="578" to-port="0"/>
33209 <edge from-layer="577" from-port="0" to-layer="578" to-port="1"/>
33210 <edge from-layer="578" from-port="2" to-layer="579" to-port="0"/>
33211 <edge from-layer="564" from-port="0" to-layer="579" to-port="1"/>
33212 <edge from-layer="565" from-port="0" to-layer="579" to-port="2"/>
33213 <edge from-layer="566" from-port="0" to-layer="579" to-port="3"/>
33214 <edge from-layer="567" from-port="0" to-layer="579" to-port="4"/>
33215 <edge from-layer="581" from-port="0" to-layer="582" to-port="0"/>
33216 <edge from-layer="582" from-port="1" to-layer="584" to-port="0"/>
33217 <edge from-layer="583" from-port="0" to-layer="584" to-port="1"/>
33218 <edge from-layer="584" from-port="2" to-layer="586" to-port="0"/>
33219 <edge from-layer="585" from-port="0" to-layer="586" to-port="1"/>
33220 <edge from-layer="586" from-port="2" to-layer="587" to-port="0"/>
33221 <edge from-layer="580" from-port="0" to-layer="587" to-port="1"/>
33222 <edge from-layer="579" from-port="5" to-layer="588" to-port="0"/>
33223 <edge from-layer="587" from-port="2" to-layer="588" to-port="1"/>
33224 <edge from-layer="588" from-port="2" to-layer="590" to-port="0"/>
33225 <edge from-layer="589" from-port="0" to-layer="590" to-port="1"/>
33226 <edge from-layer="590" from-port="2" to-layer="592" to-port="0"/>
33227 <edge from-layer="591" from-port="0" to-layer="592" to-port="1"/>
33228 <edge from-layer="592" from-port="2" to-layer="593" to-port="0"/>
33229 <edge from-layer="560" from-port="0" to-layer="593" to-port="1"/>
33230 <edge from-layer="561" from-port="0" to-layer="593" to-port="2"/>
33231 <edge from-layer="562" from-port="0" to-layer="593" to-port="3"/>
33232 <edge from-layer="563" from-port="0" to-layer="593" to-port="4"/>
33233 <edge from-layer="594" from-port="0" to-layer="595" to-port="0"/>
33234 <edge from-layer="595" from-port="1" to-layer="597" to-port="0"/>
33235 <edge from-layer="596" from-port="0" to-layer="597" to-port="1"/>
33236 <edge from-layer="597" from-port="2" to-layer="599" to-port="0"/>
33237 <edge from-layer="598" from-port="0" to-layer="599" to-port="1"/>
33238 <edge from-layer="593" from-port="5" to-layer="600" to-port="0"/>
33239 <edge from-layer="599" from-port="2" to-layer="600" to-port="1"/>
33240 <edge from-layer="600" from-port="2" to-layer="602" to-port="0"/>
33241 <edge from-layer="601" from-port="0" to-layer="602" to-port="1"/>
33242 <edge from-layer="602" from-port="2" to-layer="603" to-port="0"/>
33243 <edge from-layer="556" from-port="0" to-layer="603" to-port="1"/>
33244 <edge from-layer="557" from-port="0" to-layer="603" to-port="2"/>
33245 <edge from-layer="558" from-port="0" to-layer="603" to-port="3"/>
33246 <edge from-layer="559" from-port="0" to-layer="603" to-port="4"/>
33247 <edge from-layer="555" from-port="5" to-layer="604" to-port="0"/>
33248 <edge from-layer="603" from-port="5" to-layer="604" to-port="1"/>
33249 <edge from-layer="604" from-port="2" to-layer="606" to-port="0"/>
33250 <edge from-layer="605" from-port="0" to-layer="606" to-port="1"/>
33251 <edge from-layer="606" from-port="2" to-layer="607" to-port="0"/>
33252 <edge from-layer="120" from-port="0" to-layer="607" to-port="1"/>
33253 <edge from-layer="121" from-port="0" to-layer="607" to-port="2"/>
33254 <edge from-layer="122" from-port="0" to-layer="607" to-port="3"/>
33255 <edge from-layer="123" from-port="0" to-layer="607" to-port="4"/>
33256 <edge from-layer="620" from-port="0" to-layer="621" to-port="0"/>
33257 <edge from-layer="621" from-port="1" to-layer="623" to-port="0"/>
33258 <edge from-layer="622" from-port="0" to-layer="623" to-port="1"/>
33259 <edge from-layer="623" from-port="2" to-layer="625" to-port="0"/>
33260 <edge from-layer="624" from-port="0" to-layer="625" to-port="1"/>
33261 <edge from-layer="607" from-port="5" to-layer="626" to-port="0"/>
33262 <edge from-layer="625" from-port="2" to-layer="626" to-port="1"/>
33263 <edge from-layer="626" from-port="2" to-layer="628" to-port="0"/>
33264 <edge from-layer="627" from-port="0" to-layer="628" to-port="1"/>
33265 <edge from-layer="628" from-port="2" to-layer="630" to-port="0"/>
33266 <edge from-layer="629" from-port="0" to-layer="630" to-port="1"/>
33267 <edge from-layer="630" from-port="2" to-layer="631" to-port="0"/>
33268 <edge from-layer="616" from-port="0" to-layer="631" to-port="1"/>
33269 <edge from-layer="617" from-port="0" to-layer="631" to-port="2"/>
33270 <edge from-layer="618" from-port="0" to-layer="631" to-port="3"/>
33271 <edge from-layer="619" from-port="0" to-layer="631" to-port="4"/>
33272 <edge from-layer="633" from-port="0" to-layer="634" to-port="0"/>
33273 <edge from-layer="634" from-port="1" to-layer="636" to-port="0"/>
33274 <edge from-layer="635" from-port="0" to-layer="636" to-port="1"/>
33275 <edge from-layer="636" from-port="2" to-layer="638" to-port="0"/>
33276 <edge from-layer="637" from-port="0" to-layer="638" to-port="1"/>
33277 <edge from-layer="638" from-port="2" to-layer="639" to-port="0"/>
33278 <edge from-layer="632" from-port="0" to-layer="639" to-port="1"/>
33279 <edge from-layer="631" from-port="5" to-layer="640" to-port="0"/>
33280 <edge from-layer="639" from-port="2" to-layer="640" to-port="1"/>
33281 <edge from-layer="640" from-port="2" to-layer="642" to-port="0"/>
33282 <edge from-layer="641" from-port="0" to-layer="642" to-port="1"/>
33283 <edge from-layer="642" from-port="2" to-layer="644" to-port="0"/>
33284 <edge from-layer="643" from-port="0" to-layer="644" to-port="1"/>
33285 <edge from-layer="644" from-port="2" to-layer="645" to-port="0"/>
33286 <edge from-layer="612" from-port="0" to-layer="645" to-port="1"/>
33287 <edge from-layer="613" from-port="0" to-layer="645" to-port="2"/>
33288 <edge from-layer="614" from-port="0" to-layer="645" to-port="3"/>
33289 <edge from-layer="615" from-port="0" to-layer="645" to-port="4"/>
33290 <edge from-layer="646" from-port="0" to-layer="647" to-port="0"/>
33291 <edge from-layer="647" from-port="1" to-layer="649" to-port="0"/>
33292 <edge from-layer="648" from-port="0" to-layer="649" to-port="1"/>
33293 <edge from-layer="649" from-port="2" to-layer="651" to-port="0"/>
33294 <edge from-layer="650" from-port="0" to-layer="651" to-port="1"/>
33295 <edge from-layer="645" from-port="5" to-layer="652" to-port="0"/>
33296 <edge from-layer="651" from-port="2" to-layer="652" to-port="1"/>
33297 <edge from-layer="652" from-port="2" to-layer="654" to-port="0"/>
33298 <edge from-layer="653" from-port="0" to-layer="654" to-port="1"/>
33299 <edge from-layer="654" from-port="2" to-layer="655" to-port="0"/>
33300 <edge from-layer="608" from-port="0" to-layer="655" to-port="1"/>
33301 <edge from-layer="609" from-port="0" to-layer="655" to-port="2"/>
33302 <edge from-layer="610" from-port="0" to-layer="655" to-port="3"/>
33303 <edge from-layer="611" from-port="0" to-layer="655" to-port="4"/>
33304 <edge from-layer="607" from-port="5" to-layer="656" to-port="0"/>
33305 <edge from-layer="655" from-port="5" to-layer="656" to-port="1"/>
33306 <edge from-layer="656" from-port="2" to-layer="658" to-port="0"/>
33307 <edge from-layer="657" from-port="0" to-layer="658" to-port="1"/>
33308 <edge from-layer="658" from-port="2" to-layer="659" to-port="0"/>
33309 <edge from-layer="116" from-port="0" to-layer="659" to-port="1"/>
33310 <edge from-layer="117" from-port="0" to-layer="659" to-port="2"/>
33311 <edge from-layer="118" from-port="0" to-layer="659" to-port="3"/>
33312 <edge from-layer="119" from-port="0" to-layer="659" to-port="4"/>
33313 <edge from-layer="672" from-port="0" to-layer="673" to-port="0"/>
33314 <edge from-layer="673" from-port="1" to-layer="675" to-port="0"/>
33315 <edge from-layer="674" from-port="0" to-layer="675" to-port="1"/>
33316 <edge from-layer="675" from-port="2" to-layer="677" to-port="0"/>
33317 <edge from-layer="676" from-port="0" to-layer="677" to-port="1"/>
33318 <edge from-layer="659" from-port="5" to-layer="678" to-port="0"/>
33319 <edge from-layer="677" from-port="2" to-layer="678" to-port="1"/>
33320 <edge from-layer="678" from-port="2" to-layer="680" to-port="0"/>
33321 <edge from-layer="679" from-port="0" to-layer="680" to-port="1"/>
33322 <edge from-layer="680" from-port="2" to-layer="682" to-port="0"/>
33323 <edge from-layer="681" from-port="0" to-layer="682" to-port="1"/>
33324 <edge from-layer="682" from-port="2" to-layer="683" to-port="0"/>
33325 <edge from-layer="668" from-port="0" to-layer="683" to-port="1"/>
33326 <edge from-layer="669" from-port="0" to-layer="683" to-port="2"/>
33327 <edge from-layer="670" from-port="0" to-layer="683" to-port="3"/>
33328 <edge from-layer="671" from-port="0" to-layer="683" to-port="4"/>
33329 <edge from-layer="685" from-port="0" to-layer="686" to-port="0"/>
33330 <edge from-layer="686" from-port="1" to-layer="688" to-port="0"/>
33331 <edge from-layer="687" from-port="0" to-layer="688" to-port="1"/>
33332 <edge from-layer="688" from-port="2" to-layer="690" to-port="0"/>
33333 <edge from-layer="689" from-port="0" to-layer="690" to-port="1"/>
33334 <edge from-layer="690" from-port="2" to-layer="691" to-port="0"/>
33335 <edge from-layer="684" from-port="0" to-layer="691" to-port="1"/>
33336 <edge from-layer="683" from-port="5" to-layer="692" to-port="0"/>
33337 <edge from-layer="691" from-port="2" to-layer="692" to-port="1"/>
33338 <edge from-layer="692" from-port="2" to-layer="694" to-port="0"/>
33339 <edge from-layer="693" from-port="0" to-layer="694" to-port="1"/>
33340 <edge from-layer="694" from-port="2" to-layer="696" to-port="0"/>
33341 <edge from-layer="695" from-port="0" to-layer="696" to-port="1"/>
33342 <edge from-layer="696" from-port="2" to-layer="697" to-port="0"/>
33343 <edge from-layer="664" from-port="0" to-layer="697" to-port="1"/>
33344 <edge from-layer="665" from-port="0" to-layer="697" to-port="2"/>
33345 <edge from-layer="666" from-port="0" to-layer="697" to-port="3"/>
33346 <edge from-layer="667" from-port="0" to-layer="697" to-port="4"/>
33347 <edge from-layer="698" from-port="0" to-layer="699" to-port="0"/>
33348 <edge from-layer="699" from-port="1" to-layer="701" to-port="0"/>
33349 <edge from-layer="700" from-port="0" to-layer="701" to-port="1"/>
33350 <edge from-layer="701" from-port="2" to-layer="703" to-port="0"/>
33351 <edge from-layer="702" from-port="0" to-layer="703" to-port="1"/>
33352 <edge from-layer="697" from-port="5" to-layer="704" to-port="0"/>
33353 <edge from-layer="703" from-port="2" to-layer="704" to-port="1"/>
33354 <edge from-layer="704" from-port="2" to-layer="706" to-port="0"/>
33355 <edge from-layer="705" from-port="0" to-layer="706" to-port="1"/>
33356 <edge from-layer="706" from-port="2" to-layer="707" to-port="0"/>
33357 <edge from-layer="660" from-port="0" to-layer="707" to-port="1"/>
33358 <edge from-layer="661" from-port="0" to-layer="707" to-port="2"/>
33359 <edge from-layer="662" from-port="0" to-layer="707" to-port="3"/>
33360 <edge from-layer="663" from-port="0" to-layer="707" to-port="4"/>
33361 <edge from-layer="659" from-port="5" to-layer="708" to-port="0"/>
33362 <edge from-layer="707" from-port="5" to-layer="708" to-port="1"/>
33363 <edge from-layer="708" from-port="2" to-layer="710" to-port="0"/>
33364 <edge from-layer="709" from-port="0" to-layer="710" to-port="1"/>
33365 <edge from-layer="710" from-port="2" to-layer="711" to-port="0"/>
33366 <edge from-layer="112" from-port="0" to-layer="711" to-port="1"/>
33367 <edge from-layer="113" from-port="0" to-layer="711" to-port="2"/>
33368 <edge from-layer="114" from-port="0" to-layer="711" to-port="3"/>
33369 <edge from-layer="115" from-port="0" to-layer="711" to-port="4"/>
33370 <edge from-layer="724" from-port="0" to-layer="725" to-port="0"/>
33371 <edge from-layer="725" from-port="1" to-layer="727" to-port="0"/>
33372 <edge from-layer="726" from-port="0" to-layer="727" to-port="1"/>
33373 <edge from-layer="727" from-port="2" to-layer="729" to-port="0"/>
33374 <edge from-layer="728" from-port="0" to-layer="729" to-port="1"/>
33375 <edge from-layer="711" from-port="5" to-layer="730" to-port="0"/>
33376 <edge from-layer="729" from-port="2" to-layer="730" to-port="1"/>
33377 <edge from-layer="730" from-port="2" to-layer="732" to-port="0"/>
33378 <edge from-layer="731" from-port="0" to-layer="732" to-port="1"/>
33379 <edge from-layer="732" from-port="2" to-layer="734" to-port="0"/>
33380 <edge from-layer="733" from-port="0" to-layer="734" to-port="1"/>
33381 <edge from-layer="734" from-port="2" to-layer="735" to-port="0"/>
33382 <edge from-layer="720" from-port="0" to-layer="735" to-port="1"/>
33383 <edge from-layer="721" from-port="0" to-layer="735" to-port="2"/>
33384 <edge from-layer="722" from-port="0" to-layer="735" to-port="3"/>
33385 <edge from-layer="723" from-port="0" to-layer="735" to-port="4"/>
33386 <edge from-layer="737" from-port="0" to-layer="738" to-port="0"/>
33387 <edge from-layer="738" from-port="1" to-layer="740" to-port="0"/>
33388 <edge from-layer="739" from-port="0" to-layer="740" to-port="1"/>
33389 <edge from-layer="740" from-port="2" to-layer="742" to-port="0"/>
33390 <edge from-layer="741" from-port="0" to-layer="742" to-port="1"/>
33391 <edge from-layer="742" from-port="2" to-layer="743" to-port="0"/>
33392 <edge from-layer="736" from-port="0" to-layer="743" to-port="1"/>
33393 <edge from-layer="735" from-port="5" to-layer="744" to-port="0"/>
33394 <edge from-layer="743" from-port="2" to-layer="744" to-port="1"/>
33395 <edge from-layer="744" from-port="2" to-layer="746" to-port="0"/>
33396 <edge from-layer="745" from-port="0" to-layer="746" to-port="1"/>
33397 <edge from-layer="746" from-port="2" to-layer="748" to-port="0"/>
33398 <edge from-layer="747" from-port="0" to-layer="748" to-port="1"/>
33399 <edge from-layer="748" from-port="2" to-layer="749" to-port="0"/>
33400 <edge from-layer="716" from-port="0" to-layer="749" to-port="1"/>
33401 <edge from-layer="717" from-port="0" to-layer="749" to-port="2"/>
33402 <edge from-layer="718" from-port="0" to-layer="749" to-port="3"/>
33403 <edge from-layer="719" from-port="0" to-layer="749" to-port="4"/>
33404 <edge from-layer="750" from-port="0" to-layer="751" to-port="0"/>
33405 <edge from-layer="751" from-port="1" to-layer="753" to-port="0"/>
33406 <edge from-layer="752" from-port="0" to-layer="753" to-port="1"/>
33407 <edge from-layer="753" from-port="2" to-layer="755" to-port="0"/>
33408 <edge from-layer="754" from-port="0" to-layer="755" to-port="1"/>
33409 <edge from-layer="749" from-port="5" to-layer="756" to-port="0"/>
33410 <edge from-layer="755" from-port="2" to-layer="756" to-port="1"/>
33411 <edge from-layer="756" from-port="2" to-layer="758" to-port="0"/>
33412 <edge from-layer="757" from-port="0" to-layer="758" to-port="1"/>
33413 <edge from-layer="758" from-port="2" to-layer="759" to-port="0"/>
33414 <edge from-layer="712" from-port="0" to-layer="759" to-port="1"/>
33415 <edge from-layer="713" from-port="0" to-layer="759" to-port="2"/>
33416 <edge from-layer="714" from-port="0" to-layer="759" to-port="3"/>
33417 <edge from-layer="715" from-port="0" to-layer="759" to-port="4"/>
33418 <edge from-layer="711" from-port="5" to-layer="760" to-port="0"/>
33419 <edge from-layer="759" from-port="5" to-layer="760" to-port="1"/>
33420 <edge from-layer="760" from-port="2" to-layer="762" to-port="0"/>
33421 <edge from-layer="761" from-port="0" to-layer="762" to-port="1"/>
33422 <edge from-layer="762" from-port="2" to-layer="763" to-port="0"/>
33423 <edge from-layer="108" from-port="0" to-layer="763" to-port="1"/>
33424 <edge from-layer="109" from-port="0" to-layer="763" to-port="2"/>
33425 <edge from-layer="110" from-port="0" to-layer="763" to-port="3"/>
33426 <edge from-layer="111" from-port="0" to-layer="763" to-port="4"/>
33427 <edge from-layer="776" from-port="0" to-layer="777" to-port="0"/>
33428 <edge from-layer="777" from-port="1" to-layer="779" to-port="0"/>
33429 <edge from-layer="778" from-port="0" to-layer="779" to-port="1"/>
33430 <edge from-layer="779" from-port="2" to-layer="781" to-port="0"/>
33431 <edge from-layer="780" from-port="0" to-layer="781" to-port="1"/>
33432 <edge from-layer="763" from-port="5" to-layer="782" to-port="0"/>
33433 <edge from-layer="781" from-port="2" to-layer="782" to-port="1"/>
33434 <edge from-layer="782" from-port="2" to-layer="784" to-port="0"/>
33435 <edge from-layer="783" from-port="0" to-layer="784" to-port="1"/>
33436 <edge from-layer="784" from-port="2" to-layer="786" to-port="0"/>
33437 <edge from-layer="785" from-port="0" to-layer="786" to-port="1"/>
33438 <edge from-layer="786" from-port="2" to-layer="787" to-port="0"/>
33439 <edge from-layer="772" from-port="0" to-layer="787" to-port="1"/>
33440 <edge from-layer="773" from-port="0" to-layer="787" to-port="2"/>
33441 <edge from-layer="774" from-port="0" to-layer="787" to-port="3"/>
33442 <edge from-layer="775" from-port="0" to-layer="787" to-port="4"/>
33443 <edge from-layer="789" from-port="0" to-layer="790" to-port="0"/>
33444 <edge from-layer="790" from-port="1" to-layer="792" to-port="0"/>
33445 <edge from-layer="791" from-port="0" to-layer="792" to-port="1"/>
33446 <edge from-layer="792" from-port="2" to-layer="794" to-port="0"/>
33447 <edge from-layer="793" from-port="0" to-layer="794" to-port="1"/>
33448 <edge from-layer="794" from-port="2" to-layer="795" to-port="0"/>
33449 <edge from-layer="788" from-port="0" to-layer="795" to-port="1"/>
33450 <edge from-layer="787" from-port="5" to-layer="796" to-port="0"/>
33451 <edge from-layer="795" from-port="2" to-layer="796" to-port="1"/>
33452 <edge from-layer="796" from-port="2" to-layer="798" to-port="0"/>
33453 <edge from-layer="797" from-port="0" to-layer="798" to-port="1"/>
33454 <edge from-layer="798" from-port="2" to-layer="800" to-port="0"/>
33455 <edge from-layer="799" from-port="0" to-layer="800" to-port="1"/>
33456 <edge from-layer="800" from-port="2" to-layer="801" to-port="0"/>
33457 <edge from-layer="768" from-port="0" to-layer="801" to-port="1"/>
33458 <edge from-layer="769" from-port="0" to-layer="801" to-port="2"/>
33459 <edge from-layer="770" from-port="0" to-layer="801" to-port="3"/>
33460 <edge from-layer="771" from-port="0" to-layer="801" to-port="4"/>
33461 <edge from-layer="802" from-port="0" to-layer="803" to-port="0"/>
33462 <edge from-layer="803" from-port="1" to-layer="805" to-port="0"/>
33463 <edge from-layer="804" from-port="0" to-layer="805" to-port="1"/>
33464 <edge from-layer="805" from-port="2" to-layer="807" to-port="0"/>
33465 <edge from-layer="806" from-port="0" to-layer="807" to-port="1"/>
33466 <edge from-layer="801" from-port="5" to-layer="808" to-port="0"/>
33467 <edge from-layer="807" from-port="2" to-layer="808" to-port="1"/>
33468 <edge from-layer="808" from-port="2" to-layer="810" to-port="0"/>
33469 <edge from-layer="809" from-port="0" to-layer="810" to-port="1"/>
33470 <edge from-layer="810" from-port="2" to-layer="811" to-port="0"/>
33471 <edge from-layer="764" from-port="0" to-layer="811" to-port="1"/>
33472 <edge from-layer="765" from-port="0" to-layer="811" to-port="2"/>
33473 <edge from-layer="766" from-port="0" to-layer="811" to-port="3"/>
33474 <edge from-layer="767" from-port="0" to-layer="811" to-port="4"/>
33475 <edge from-layer="763" from-port="5" to-layer="812" to-port="0"/>
33476 <edge from-layer="811" from-port="5" to-layer="812" to-port="1"/>
33477 <edge from-layer="812" from-port="2" to-layer="814" to-port="0"/>
33478 <edge from-layer="813" from-port="0" to-layer="814" to-port="1"/>
33479 <edge from-layer="814" from-port="2" to-layer="815" to-port="0"/>
33480 <edge from-layer="104" from-port="0" to-layer="815" to-port="1"/>
33481 <edge from-layer="105" from-port="0" to-layer="815" to-port="2"/>
33482 <edge from-layer="106" from-port="0" to-layer="815" to-port="3"/>
33483 <edge from-layer="107" from-port="0" to-layer="815" to-port="4"/>
33484 <edge from-layer="828" from-port="0" to-layer="829" to-port="0"/>
33485 <edge from-layer="829" from-port="1" to-layer="831" to-port="0"/>
33486 <edge from-layer="830" from-port="0" to-layer="831" to-port="1"/>
33487 <edge from-layer="831" from-port="2" to-layer="833" to-port="0"/>
33488 <edge from-layer="832" from-port="0" to-layer="833" to-port="1"/>
33489 <edge from-layer="815" from-port="5" to-layer="834" to-port="0"/>
33490 <edge from-layer="833" from-port="2" to-layer="834" to-port="1"/>
33491 <edge from-layer="834" from-port="2" to-layer="836" to-port="0"/>
33492 <edge from-layer="835" from-port="0" to-layer="836" to-port="1"/>
33493 <edge from-layer="836" from-port="2" to-layer="838" to-port="0"/>
33494 <edge from-layer="837" from-port="0" to-layer="838" to-port="1"/>
33495 <edge from-layer="838" from-port="2" to-layer="839" to-port="0"/>
33496 <edge from-layer="824" from-port="0" to-layer="839" to-port="1"/>
33497 <edge from-layer="825" from-port="0" to-layer="839" to-port="2"/>
33498 <edge from-layer="826" from-port="0" to-layer="839" to-port="3"/>
33499 <edge from-layer="827" from-port="0" to-layer="839" to-port="4"/>
33500 <edge from-layer="841" from-port="0" to-layer="842" to-port="0"/>
33501 <edge from-layer="842" from-port="1" to-layer="844" to-port="0"/>
33502 <edge from-layer="843" from-port="0" to-layer="844" to-port="1"/>
33503 <edge from-layer="844" from-port="2" to-layer="846" to-port="0"/>
33504 <edge from-layer="845" from-port="0" to-layer="846" to-port="1"/>
33505 <edge from-layer="846" from-port="2" to-layer="847" to-port="0"/>
33506 <edge from-layer="840" from-port="0" to-layer="847" to-port="1"/>
33507 <edge from-layer="839" from-port="5" to-layer="848" to-port="0"/>
33508 <edge from-layer="847" from-port="2" to-layer="848" to-port="1"/>
33509 <edge from-layer="848" from-port="2" to-layer="850" to-port="0"/>
33510 <edge from-layer="849" from-port="0" to-layer="850" to-port="1"/>
33511 <edge from-layer="850" from-port="2" to-layer="852" to-port="0"/>
33512 <edge from-layer="851" from-port="0" to-layer="852" to-port="1"/>
33513 <edge from-layer="852" from-port="2" to-layer="853" to-port="0"/>
33514 <edge from-layer="820" from-port="0" to-layer="853" to-port="1"/>
33515 <edge from-layer="821" from-port="0" to-layer="853" to-port="2"/>
33516 <edge from-layer="822" from-port="0" to-layer="853" to-port="3"/>
33517 <edge from-layer="823" from-port="0" to-layer="853" to-port="4"/>
33518 <edge from-layer="854" from-port="0" to-layer="855" to-port="0"/>
33519 <edge from-layer="855" from-port="1" to-layer="857" to-port="0"/>
33520 <edge from-layer="856" from-port="0" to-layer="857" to-port="1"/>
33521 <edge from-layer="857" from-port="2" to-layer="859" to-port="0"/>
33522 <edge from-layer="858" from-port="0" to-layer="859" to-port="1"/>
33523 <edge from-layer="853" from-port="5" to-layer="860" to-port="0"/>
33524 <edge from-layer="859" from-port="2" to-layer="860" to-port="1"/>
33525 <edge from-layer="860" from-port="2" to-layer="862" to-port="0"/>
33526 <edge from-layer="861" from-port="0" to-layer="862" to-port="1"/>
33527 <edge from-layer="862" from-port="2" to-layer="863" to-port="0"/>
33528 <edge from-layer="816" from-port="0" to-layer="863" to-port="1"/>
33529 <edge from-layer="817" from-port="0" to-layer="863" to-port="2"/>
33530 <edge from-layer="818" from-port="0" to-layer="863" to-port="3"/>
33531 <edge from-layer="819" from-port="0" to-layer="863" to-port="4"/>
33532 <edge from-layer="815" from-port="5" to-layer="864" to-port="0"/>
33533 <edge from-layer="863" from-port="5" to-layer="864" to-port="1"/>
33534 <edge from-layer="864" from-port="2" to-layer="866" to-port="0"/>
33535 <edge from-layer="865" from-port="0" to-layer="866" to-port="1"/>
33536 <edge from-layer="866" from-port="2" to-layer="867" to-port="0"/>
33537 <edge from-layer="100" from-port="0" to-layer="867" to-port="1"/>
33538 <edge from-layer="101" from-port="0" to-layer="867" to-port="2"/>
33539 <edge from-layer="102" from-port="0" to-layer="867" to-port="3"/>
33540 <edge from-layer="103" from-port="0" to-layer="867" to-port="4"/>
33541 <edge from-layer="867" from-port="5" to-layer="868" to-port="0"/>
33542 <edge from-layer="869" from-port="0" to-layer="870" to-port="0"/>
33543 <edge from-layer="870" from-port="1" to-layer="872" to-port="0"/>
33544 <edge from-layer="871" from-port="0" to-layer="872" to-port="1"/>
33545 <edge from-layer="872" from-port="2" to-layer="874" to-port="0"/>
33546 <edge from-layer="873" from-port="0" to-layer="874" to-port="1"/>
33547 <edge from-layer="868" from-port="1" to-layer="875" to-port="0"/>
33548 <edge from-layer="874" from-port="2" to-layer="875" to-port="1"/>
33549 <edge from-layer="875" from-port="2" to-layer="877" to-port="0"/>
33550 <edge from-layer="876" from-port="0" to-layer="877" to-port="1"/>
33551 <edge from-layer="877" from-port="2" to-layer="878" to-port="0"/>
33552 <edge from-layer="96" from-port="0" to-layer="878" to-port="1"/>
33553 <edge from-layer="97" from-port="0" to-layer="878" to-port="2"/>
33554 <edge from-layer="98" from-port="0" to-layer="878" to-port="3"/>
33555 <edge from-layer="99" from-port="0" to-layer="878" to-port="4"/>
33556 <edge from-layer="891" from-port="0" to-layer="892" to-port="0"/>
33557 <edge from-layer="892" from-port="1" to-layer="894" to-port="0"/>
33558 <edge from-layer="893" from-port="0" to-layer="894" to-port="1"/>
33559 <edge from-layer="894" from-port="2" to-layer="896" to-port="0"/>
33560 <edge from-layer="895" from-port="0" to-layer="896" to-port="1"/>
33561 <edge from-layer="867" from-port="5" to-layer="897" to-port="0"/>
33562 <edge from-layer="896" from-port="2" to-layer="897" to-port="1"/>
33563 <edge from-layer="897" from-port="2" to-layer="899" to-port="0"/>
33564 <edge from-layer="898" from-port="0" to-layer="899" to-port="1"/>
33565 <edge from-layer="899" from-port="2" to-layer="901" to-port="0"/>
33566 <edge from-layer="900" from-port="0" to-layer="901" to-port="1"/>
33567 <edge from-layer="901" from-port="2" to-layer="902" to-port="0"/>
33568 <edge from-layer="887" from-port="0" to-layer="902" to-port="1"/>
33569 <edge from-layer="888" from-port="0" to-layer="902" to-port="2"/>
33570 <edge from-layer="889" from-port="0" to-layer="902" to-port="3"/>
33571 <edge from-layer="890" from-port="0" to-layer="902" to-port="4"/>
33572 <edge from-layer="904" from-port="0" to-layer="905" to-port="0"/>
33573 <edge from-layer="905" from-port="1" to-layer="907" to-port="0"/>
33574 <edge from-layer="906" from-port="0" to-layer="907" to-port="1"/>
33575 <edge from-layer="907" from-port="2" to-layer="909" to-port="0"/>
33576 <edge from-layer="908" from-port="0" to-layer="909" to-port="1"/>
33577 <edge from-layer="909" from-port="2" to-layer="910" to-port="0"/>
33578 <edge from-layer="903" from-port="0" to-layer="910" to-port="1"/>
33579 <edge from-layer="902" from-port="5" to-layer="911" to-port="0"/>
33580 <edge from-layer="910" from-port="2" to-layer="911" to-port="1"/>
33581 <edge from-layer="911" from-port="2" to-layer="913" to-port="0"/>
33582 <edge from-layer="912" from-port="0" to-layer="913" to-port="1"/>
33583 <edge from-layer="913" from-port="2" to-layer="915" to-port="0"/>
33584 <edge from-layer="914" from-port="0" to-layer="915" to-port="1"/>
33585 <edge from-layer="915" from-port="2" to-layer="916" to-port="0"/>
33586 <edge from-layer="883" from-port="0" to-layer="916" to-port="1"/>
33587 <edge from-layer="884" from-port="0" to-layer="916" to-port="2"/>
33588 <edge from-layer="885" from-port="0" to-layer="916" to-port="3"/>
33589 <edge from-layer="886" from-port="0" to-layer="916" to-port="4"/>
33590 <edge from-layer="917" from-port="0" to-layer="918" to-port="0"/>
33591 <edge from-layer="918" from-port="1" to-layer="920" to-port="0"/>
33592 <edge from-layer="919" from-port="0" to-layer="920" to-port="1"/>
33593 <edge from-layer="920" from-port="2" to-layer="922" to-port="0"/>
33594 <edge from-layer="921" from-port="0" to-layer="922" to-port="1"/>
33595 <edge from-layer="916" from-port="5" to-layer="923" to-port="0"/>
33596 <edge from-layer="922" from-port="2" to-layer="923" to-port="1"/>
33597 <edge from-layer="923" from-port="2" to-layer="925" to-port="0"/>
33598 <edge from-layer="924" from-port="0" to-layer="925" to-port="1"/>
33599 <edge from-layer="925" from-port="2" to-layer="926" to-port="0"/>
33600 <edge from-layer="879" from-port="0" to-layer="926" to-port="1"/>
33601 <edge from-layer="880" from-port="0" to-layer="926" to-port="2"/>
33602 <edge from-layer="881" from-port="0" to-layer="926" to-port="3"/>
33603 <edge from-layer="882" from-port="0" to-layer="926" to-port="4"/>
33604 <edge from-layer="878" from-port="5" to-layer="927" to-port="0"/>
33605 <edge from-layer="926" from-port="5" to-layer="927" to-port="1"/>
33606 <edge from-layer="927" from-port="2" to-layer="929" to-port="0"/>
33607 <edge from-layer="928" from-port="0" to-layer="929" to-port="1"/>
33608 <edge from-layer="929" from-port="2" to-layer="930" to-port="0"/>
33609 <edge from-layer="92" from-port="0" to-layer="930" to-port="1"/>
33610 <edge from-layer="93" from-port="0" to-layer="930" to-port="2"/>
33611 <edge from-layer="94" from-port="0" to-layer="930" to-port="3"/>
33612 <edge from-layer="95" from-port="0" to-layer="930" to-port="4"/>
33613 <edge from-layer="943" from-port="0" to-layer="944" to-port="0"/>
33614 <edge from-layer="944" from-port="1" to-layer="946" to-port="0"/>
33615 <edge from-layer="945" from-port="0" to-layer="946" to-port="1"/>
33616 <edge from-layer="946" from-port="2" to-layer="948" to-port="0"/>
33617 <edge from-layer="947" from-port="0" to-layer="948" to-port="1"/>
33618 <edge from-layer="930" from-port="5" to-layer="949" to-port="0"/>
33619 <edge from-layer="948" from-port="2" to-layer="949" to-port="1"/>
33620 <edge from-layer="949" from-port="2" to-layer="951" to-port="0"/>
33621 <edge from-layer="950" from-port="0" to-layer="951" to-port="1"/>
33622 <edge from-layer="951" from-port="2" to-layer="953" to-port="0"/>
33623 <edge from-layer="952" from-port="0" to-layer="953" to-port="1"/>
33624 <edge from-layer="953" from-port="2" to-layer="954" to-port="0"/>
33625 <edge from-layer="939" from-port="0" to-layer="954" to-port="1"/>
33626 <edge from-layer="940" from-port="0" to-layer="954" to-port="2"/>
33627 <edge from-layer="941" from-port="0" to-layer="954" to-port="3"/>
33628 <edge from-layer="942" from-port="0" to-layer="954" to-port="4"/>
33629 <edge from-layer="956" from-port="0" to-layer="957" to-port="0"/>
33630 <edge from-layer="957" from-port="1" to-layer="959" to-port="0"/>
33631 <edge from-layer="958" from-port="0" to-layer="959" to-port="1"/>
33632 <edge from-layer="959" from-port="2" to-layer="961" to-port="0"/>
33633 <edge from-layer="960" from-port="0" to-layer="961" to-port="1"/>
33634 <edge from-layer="961" from-port="2" to-layer="962" to-port="0"/>
33635 <edge from-layer="955" from-port="0" to-layer="962" to-port="1"/>
33636 <edge from-layer="954" from-port="5" to-layer="963" to-port="0"/>
33637 <edge from-layer="962" from-port="2" to-layer="963" to-port="1"/>
33638 <edge from-layer="963" from-port="2" to-layer="965" to-port="0"/>
33639 <edge from-layer="964" from-port="0" to-layer="965" to-port="1"/>
33640 <edge from-layer="965" from-port="2" to-layer="967" to-port="0"/>
33641 <edge from-layer="966" from-port="0" to-layer="967" to-port="1"/>
33642 <edge from-layer="967" from-port="2" to-layer="968" to-port="0"/>
33643 <edge from-layer="935" from-port="0" to-layer="968" to-port="1"/>
33644 <edge from-layer="936" from-port="0" to-layer="968" to-port="2"/>
33645 <edge from-layer="937" from-port="0" to-layer="968" to-port="3"/>
33646 <edge from-layer="938" from-port="0" to-layer="968" to-port="4"/>
33647 <edge from-layer="969" from-port="0" to-layer="970" to-port="0"/>
33648 <edge from-layer="970" from-port="1" to-layer="972" to-port="0"/>
33649 <edge from-layer="971" from-port="0" to-layer="972" to-port="1"/>
33650 <edge from-layer="972" from-port="2" to-layer="974" to-port="0"/>
33651 <edge from-layer="973" from-port="0" to-layer="974" to-port="1"/>
33652 <edge from-layer="968" from-port="5" to-layer="975" to-port="0"/>
33653 <edge from-layer="974" from-port="2" to-layer="975" to-port="1"/>
33654 <edge from-layer="975" from-port="2" to-layer="977" to-port="0"/>
33655 <edge from-layer="976" from-port="0" to-layer="977" to-port="1"/>
33656 <edge from-layer="977" from-port="2" to-layer="978" to-port="0"/>
33657 <edge from-layer="931" from-port="0" to-layer="978" to-port="1"/>
33658 <edge from-layer="932" from-port="0" to-layer="978" to-port="2"/>
33659 <edge from-layer="933" from-port="0" to-layer="978" to-port="3"/>
33660 <edge from-layer="934" from-port="0" to-layer="978" to-port="4"/>
33661 <edge from-layer="930" from-port="5" to-layer="979" to-port="0"/>
33662 <edge from-layer="978" from-port="5" to-layer="979" to-port="1"/>
33663 <edge from-layer="979" from-port="2" to-layer="981" to-port="0"/>
33664 <edge from-layer="980" from-port="0" to-layer="981" to-port="1"/>
33665 <edge from-layer="981" from-port="2" to-layer="982" to-port="0"/>
33666 <edge from-layer="88" from-port="0" to-layer="982" to-port="1"/>
33667 <edge from-layer="89" from-port="0" to-layer="982" to-port="2"/>
33668 <edge from-layer="90" from-port="0" to-layer="982" to-port="3"/>
33669 <edge from-layer="91" from-port="0" to-layer="982" to-port="4"/>
33670 <edge from-layer="995" from-port="0" to-layer="996" to-port="0"/>
33671 <edge from-layer="996" from-port="1" to-layer="998" to-port="0"/>
33672 <edge from-layer="997" from-port="0" to-layer="998" to-port="1"/>
33673 <edge from-layer="998" from-port="2" to-layer="1000" to-port="0"/>
33674 <edge from-layer="999" from-port="0" to-layer="1000" to-port="1"/>
33675 <edge from-layer="982" from-port="5" to-layer="1001" to-port="0"/>
33676 <edge from-layer="1000" from-port="2" to-layer="1001" to-port="1"/>
33677 <edge from-layer="1001" from-port="2" to-layer="1003" to-port="0"/>
33678 <edge from-layer="1002" from-port="0" to-layer="1003" to-port="1"/>
33679 <edge from-layer="1003" from-port="2" to-layer="1005" to-port="0"/>
33680 <edge from-layer="1004" from-port="0" to-layer="1005" to-port="1"/>
33681 <edge from-layer="1005" from-port="2" to-layer="1006" to-port="0"/>
33682 <edge from-layer="991" from-port="0" to-layer="1006" to-port="1"/>
33683 <edge from-layer="992" from-port="0" to-layer="1006" to-port="2"/>
33684 <edge from-layer="993" from-port="0" to-layer="1006" to-port="3"/>
33685 <edge from-layer="994" from-port="0" to-layer="1006" to-port="4"/>
33686 <edge from-layer="1008" from-port="0" to-layer="1009" to-port="0"/>
33687 <edge from-layer="1009" from-port="1" to-layer="1011" to-port="0"/>
33688 <edge from-layer="1010" from-port="0" to-layer="1011" to-port="1"/>
33689 <edge from-layer="1011" from-port="2" to-layer="1013" to-port="0"/>
33690 <edge from-layer="1012" from-port="0" to-layer="1013" to-port="1"/>
33691 <edge from-layer="1013" from-port="2" to-layer="1014" to-port="0"/>
33692 <edge from-layer="1007" from-port="0" to-layer="1014" to-port="1"/>
33693 <edge from-layer="1006" from-port="5" to-layer="1015" to-port="0"/>
33694 <edge from-layer="1014" from-port="2" to-layer="1015" to-port="1"/>
33695 <edge from-layer="1015" from-port="2" to-layer="1017" to-port="0"/>
33696 <edge from-layer="1016" from-port="0" to-layer="1017" to-port="1"/>
33697 <edge from-layer="1017" from-port="2" to-layer="1019" to-port="0"/>
33698 <edge from-layer="1018" from-port="0" to-layer="1019" to-port="1"/>
33699 <edge from-layer="1019" from-port="2" to-layer="1020" to-port="0"/>
33700 <edge from-layer="987" from-port="0" to-layer="1020" to-port="1"/>
33701 <edge from-layer="988" from-port="0" to-layer="1020" to-port="2"/>
33702 <edge from-layer="989" from-port="0" to-layer="1020" to-port="3"/>
33703 <edge from-layer="990" from-port="0" to-layer="1020" to-port="4"/>
33704 <edge from-layer="1021" from-port="0" to-layer="1022" to-port="0"/>
33705 <edge from-layer="1022" from-port="1" to-layer="1024" to-port="0"/>
33706 <edge from-layer="1023" from-port="0" to-layer="1024" to-port="1"/>
33707 <edge from-layer="1024" from-port="2" to-layer="1026" to-port="0"/>
33708 <edge from-layer="1025" from-port="0" to-layer="1026" to-port="1"/>
33709 <edge from-layer="1020" from-port="5" to-layer="1027" to-port="0"/>
33710 <edge from-layer="1026" from-port="2" to-layer="1027" to-port="1"/>
33711 <edge from-layer="1027" from-port="2" to-layer="1029" to-port="0"/>
33712 <edge from-layer="1028" from-port="0" to-layer="1029" to-port="1"/>
33713 <edge from-layer="1029" from-port="2" to-layer="1030" to-port="0"/>
33714 <edge from-layer="983" from-port="0" to-layer="1030" to-port="1"/>
33715 <edge from-layer="984" from-port="0" to-layer="1030" to-port="2"/>
33716 <edge from-layer="985" from-port="0" to-layer="1030" to-port="3"/>
33717 <edge from-layer="986" from-port="0" to-layer="1030" to-port="4"/>
33718 <edge from-layer="982" from-port="5" to-layer="1031" to-port="0"/>
33719 <edge from-layer="1030" from-port="5" to-layer="1031" to-port="1"/>
33720 <edge from-layer="1031" from-port="2" to-layer="1033" to-port="0"/>
33721 <edge from-layer="1032" from-port="0" to-layer="1033" to-port="1"/>
33722 <edge from-layer="1033" from-port="2" to-layer="1034" to-port="0"/>
33723 <edge from-layer="84" from-port="0" to-layer="1034" to-port="1"/>
33724 <edge from-layer="85" from-port="0" to-layer="1034" to-port="2"/>
33725 <edge from-layer="86" from-port="0" to-layer="1034" to-port="3"/>
33726 <edge from-layer="87" from-port="0" to-layer="1034" to-port="4"/>
33727 <edge from-layer="1047" from-port="0" to-layer="1048" to-port="0"/>
33728 <edge from-layer="1048" from-port="1" to-layer="1050" to-port="0"/>
33729 <edge from-layer="1049" from-port="0" to-layer="1050" to-port="1"/>
33730 <edge from-layer="1050" from-port="2" to-layer="1052" to-port="0"/>
33731 <edge from-layer="1051" from-port="0" to-layer="1052" to-port="1"/>
33732 <edge from-layer="1034" from-port="5" to-layer="1053" to-port="0"/>
33733 <edge from-layer="1052" from-port="2" to-layer="1053" to-port="1"/>
33734 <edge from-layer="1053" from-port="2" to-layer="1055" to-port="0"/>
33735 <edge from-layer="1054" from-port="0" to-layer="1055" to-port="1"/>
33736 <edge from-layer="1055" from-port="2" to-layer="1057" to-port="0"/>
33737 <edge from-layer="1056" from-port="0" to-layer="1057" to-port="1"/>
33738 <edge from-layer="1057" from-port="2" to-layer="1058" to-port="0"/>
33739 <edge from-layer="1043" from-port="0" to-layer="1058" to-port="1"/>
33740 <edge from-layer="1044" from-port="0" to-layer="1058" to-port="2"/>
33741 <edge from-layer="1045" from-port="0" to-layer="1058" to-port="3"/>
33742 <edge from-layer="1046" from-port="0" to-layer="1058" to-port="4"/>
33743 <edge from-layer="1060" from-port="0" to-layer="1061" to-port="0"/>
33744 <edge from-layer="1061" from-port="1" to-layer="1063" to-port="0"/>
33745 <edge from-layer="1062" from-port="0" to-layer="1063" to-port="1"/>
33746 <edge from-layer="1063" from-port="2" to-layer="1065" to-port="0"/>
33747 <edge from-layer="1064" from-port="0" to-layer="1065" to-port="1"/>
33748 <edge from-layer="1065" from-port="2" to-layer="1066" to-port="0"/>
33749 <edge from-layer="1059" from-port="0" to-layer="1066" to-port="1"/>
33750 <edge from-layer="1058" from-port="5" to-layer="1067" to-port="0"/>
33751 <edge from-layer="1066" from-port="2" to-layer="1067" to-port="1"/>
33752 <edge from-layer="1067" from-port="2" to-layer="1069" to-port="0"/>
33753 <edge from-layer="1068" from-port="0" to-layer="1069" to-port="1"/>
33754 <edge from-layer="1069" from-port="2" to-layer="1071" to-port="0"/>
33755 <edge from-layer="1070" from-port="0" to-layer="1071" to-port="1"/>
33756 <edge from-layer="1071" from-port="2" to-layer="1072" to-port="0"/>
33757 <edge from-layer="1039" from-port="0" to-layer="1072" to-port="1"/>
33758 <edge from-layer="1040" from-port="0" to-layer="1072" to-port="2"/>
33759 <edge from-layer="1041" from-port="0" to-layer="1072" to-port="3"/>
33760 <edge from-layer="1042" from-port="0" to-layer="1072" to-port="4"/>
33761 <edge from-layer="1073" from-port="0" to-layer="1074" to-port="0"/>
33762 <edge from-layer="1074" from-port="1" to-layer="1076" to-port="0"/>
33763 <edge from-layer="1075" from-port="0" to-layer="1076" to-port="1"/>
33764 <edge from-layer="1076" from-port="2" to-layer="1078" to-port="0"/>
33765 <edge from-layer="1077" from-port="0" to-layer="1078" to-port="1"/>
33766 <edge from-layer="1072" from-port="5" to-layer="1079" to-port="0"/>
33767 <edge from-layer="1078" from-port="2" to-layer="1079" to-port="1"/>
33768 <edge from-layer="1079" from-port="2" to-layer="1081" to-port="0"/>
33769 <edge from-layer="1080" from-port="0" to-layer="1081" to-port="1"/>
33770 <edge from-layer="1081" from-port="2" to-layer="1082" to-port="0"/>
33771 <edge from-layer="1035" from-port="0" to-layer="1082" to-port="1"/>
33772 <edge from-layer="1036" from-port="0" to-layer="1082" to-port="2"/>
33773 <edge from-layer="1037" from-port="0" to-layer="1082" to-port="3"/>
33774 <edge from-layer="1038" from-port="0" to-layer="1082" to-port="4"/>
33775 <edge from-layer="1034" from-port="5" to-layer="1083" to-port="0"/>
33776 <edge from-layer="1082" from-port="5" to-layer="1083" to-port="1"/>
33777 <edge from-layer="1083" from-port="2" to-layer="1085" to-port="0"/>
33778 <edge from-layer="1084" from-port="0" to-layer="1085" to-port="1"/>
33779 <edge from-layer="1085" from-port="2" to-layer="1086" to-port="0"/>
33780 <edge from-layer="80" from-port="0" to-layer="1086" to-port="1"/>
33781 <edge from-layer="81" from-port="0" to-layer="1086" to-port="2"/>
33782 <edge from-layer="82" from-port="0" to-layer="1086" to-port="3"/>
33783 <edge from-layer="83" from-port="0" to-layer="1086" to-port="4"/>
33784 <edge from-layer="1099" from-port="0" to-layer="1100" to-port="0"/>
33785 <edge from-layer="1100" from-port="1" to-layer="1102" to-port="0"/>
33786 <edge from-layer="1101" from-port="0" to-layer="1102" to-port="1"/>
33787 <edge from-layer="1102" from-port="2" to-layer="1104" to-port="0"/>
33788 <edge from-layer="1103" from-port="0" to-layer="1104" to-port="1"/>
33789 <edge from-layer="1086" from-port="5" to-layer="1105" to-port="0"/>
33790 <edge from-layer="1104" from-port="2" to-layer="1105" to-port="1"/>
33791 <edge from-layer="1105" from-port="2" to-layer="1107" to-port="0"/>
33792 <edge from-layer="1106" from-port="0" to-layer="1107" to-port="1"/>
33793 <edge from-layer="1107" from-port="2" to-layer="1109" to-port="0"/>
33794 <edge from-layer="1108" from-port="0" to-layer="1109" to-port="1"/>
33795 <edge from-layer="1109" from-port="2" to-layer="1110" to-port="0"/>
33796 <edge from-layer="1095" from-port="0" to-layer="1110" to-port="1"/>
33797 <edge from-layer="1096" from-port="0" to-layer="1110" to-port="2"/>
33798 <edge from-layer="1097" from-port="0" to-layer="1110" to-port="3"/>
33799 <edge from-layer="1098" from-port="0" to-layer="1110" to-port="4"/>
33800 <edge from-layer="1112" from-port="0" to-layer="1113" to-port="0"/>
33801 <edge from-layer="1113" from-port="1" to-layer="1115" to-port="0"/>
33802 <edge from-layer="1114" from-port="0" to-layer="1115" to-port="1"/>
33803 <edge from-layer="1115" from-port="2" to-layer="1117" to-port="0"/>
33804 <edge from-layer="1116" from-port="0" to-layer="1117" to-port="1"/>
33805 <edge from-layer="1117" from-port="2" to-layer="1118" to-port="0"/>
33806 <edge from-layer="1111" from-port="0" to-layer="1118" to-port="1"/>
33807 <edge from-layer="1110" from-port="5" to-layer="1119" to-port="0"/>
33808 <edge from-layer="1118" from-port="2" to-layer="1119" to-port="1"/>
33809 <edge from-layer="1119" from-port="2" to-layer="1121" to-port="0"/>
33810 <edge from-layer="1120" from-port="0" to-layer="1121" to-port="1"/>
33811 <edge from-layer="1121" from-port="2" to-layer="1123" to-port="0"/>
33812 <edge from-layer="1122" from-port="0" to-layer="1123" to-port="1"/>
33813 <edge from-layer="1123" from-port="2" to-layer="1124" to-port="0"/>
33814 <edge from-layer="1091" from-port="0" to-layer="1124" to-port="1"/>
33815 <edge from-layer="1092" from-port="0" to-layer="1124" to-port="2"/>
33816 <edge from-layer="1093" from-port="0" to-layer="1124" to-port="3"/>
33817 <edge from-layer="1094" from-port="0" to-layer="1124" to-port="4"/>
33818 <edge from-layer="1125" from-port="0" to-layer="1126" to-port="0"/>
33819 <edge from-layer="1126" from-port="1" to-layer="1128" to-port="0"/>
33820 <edge from-layer="1127" from-port="0" to-layer="1128" to-port="1"/>
33821 <edge from-layer="1128" from-port="2" to-layer="1130" to-port="0"/>
33822 <edge from-layer="1129" from-port="0" to-layer="1130" to-port="1"/>
33823 <edge from-layer="1124" from-port="5" to-layer="1131" to-port="0"/>
33824 <edge from-layer="1130" from-port="2" to-layer="1131" to-port="1"/>
33825 <edge from-layer="1131" from-port="2" to-layer="1133" to-port="0"/>
33826 <edge from-layer="1132" from-port="0" to-layer="1133" to-port="1"/>
33827 <edge from-layer="1133" from-port="2" to-layer="1134" to-port="0"/>
33828 <edge from-layer="1087" from-port="0" to-layer="1134" to-port="1"/>
33829 <edge from-layer="1088" from-port="0" to-layer="1134" to-port="2"/>
33830 <edge from-layer="1089" from-port="0" to-layer="1134" to-port="3"/>
33831 <edge from-layer="1090" from-port="0" to-layer="1134" to-port="4"/>
33832 <edge from-layer="1086" from-port="5" to-layer="1135" to-port="0"/>
33833 <edge from-layer="1134" from-port="5" to-layer="1135" to-port="1"/>
33834 <edge from-layer="1135" from-port="2" to-layer="1137" to-port="0"/>
33835 <edge from-layer="1136" from-port="0" to-layer="1137" to-port="1"/>
33836 <edge from-layer="1137" from-port="2" to-layer="1138" to-port="0"/>
33837 <edge from-layer="76" from-port="0" to-layer="1138" to-port="1"/>
33838 <edge from-layer="77" from-port="0" to-layer="1138" to-port="2"/>
33839 <edge from-layer="78" from-port="0" to-layer="1138" to-port="3"/>
33840 <edge from-layer="79" from-port="0" to-layer="1138" to-port="4"/>
33841 <edge from-layer="1151" from-port="0" to-layer="1152" to-port="0"/>
33842 <edge from-layer="1152" from-port="1" to-layer="1154" to-port="0"/>
33843 <edge from-layer="1153" from-port="0" to-layer="1154" to-port="1"/>
33844 <edge from-layer="1154" from-port="2" to-layer="1156" to-port="0"/>
33845 <edge from-layer="1155" from-port="0" to-layer="1156" to-port="1"/>
33846 <edge from-layer="1138" from-port="5" to-layer="1157" to-port="0"/>
33847 <edge from-layer="1156" from-port="2" to-layer="1157" to-port="1"/>
33848 <edge from-layer="1157" from-port="2" to-layer="1159" to-port="0"/>
33849 <edge from-layer="1158" from-port="0" to-layer="1159" to-port="1"/>
33850 <edge from-layer="1159" from-port="2" to-layer="1161" to-port="0"/>
33851 <edge from-layer="1160" from-port="0" to-layer="1161" to-port="1"/>
33852 <edge from-layer="1161" from-port="2" to-layer="1162" to-port="0"/>
33853 <edge from-layer="1147" from-port="0" to-layer="1162" to-port="1"/>
33854 <edge from-layer="1148" from-port="0" to-layer="1162" to-port="2"/>
33855 <edge from-layer="1149" from-port="0" to-layer="1162" to-port="3"/>
33856 <edge from-layer="1150" from-port="0" to-layer="1162" to-port="4"/>
33857 <edge from-layer="1164" from-port="0" to-layer="1165" to-port="0"/>
33858 <edge from-layer="1165" from-port="1" to-layer="1167" to-port="0"/>
33859 <edge from-layer="1166" from-port="0" to-layer="1167" to-port="1"/>
33860 <edge from-layer="1167" from-port="2" to-layer="1169" to-port="0"/>
33861 <edge from-layer="1168" from-port="0" to-layer="1169" to-port="1"/>
33862 <edge from-layer="1169" from-port="2" to-layer="1170" to-port="0"/>
33863 <edge from-layer="1163" from-port="0" to-layer="1170" to-port="1"/>
33864 <edge from-layer="1162" from-port="5" to-layer="1171" to-port="0"/>
33865 <edge from-layer="1170" from-port="2" to-layer="1171" to-port="1"/>
33866 <edge from-layer="1171" from-port="2" to-layer="1173" to-port="0"/>
33867 <edge from-layer="1172" from-port="0" to-layer="1173" to-port="1"/>
33868 <edge from-layer="1173" from-port="2" to-layer="1175" to-port="0"/>
33869 <edge from-layer="1174" from-port="0" to-layer="1175" to-port="1"/>
33870 <edge from-layer="1175" from-port="2" to-layer="1176" to-port="0"/>
33871 <edge from-layer="1143" from-port="0" to-layer="1176" to-port="1"/>
33872 <edge from-layer="1144" from-port="0" to-layer="1176" to-port="2"/>
33873 <edge from-layer="1145" from-port="0" to-layer="1176" to-port="3"/>
33874 <edge from-layer="1146" from-port="0" to-layer="1176" to-port="4"/>
33875 <edge from-layer="1177" from-port="0" to-layer="1178" to-port="0"/>
33876 <edge from-layer="1178" from-port="1" to-layer="1180" to-port="0"/>
33877 <edge from-layer="1179" from-port="0" to-layer="1180" to-port="1"/>
33878 <edge from-layer="1180" from-port="2" to-layer="1182" to-port="0"/>
33879 <edge from-layer="1181" from-port="0" to-layer="1182" to-port="1"/>
33880 <edge from-layer="1176" from-port="5" to-layer="1183" to-port="0"/>
33881 <edge from-layer="1182" from-port="2" to-layer="1183" to-port="1"/>
33882 <edge from-layer="1183" from-port="2" to-layer="1185" to-port="0"/>
33883 <edge from-layer="1184" from-port="0" to-layer="1185" to-port="1"/>
33884 <edge from-layer="1185" from-port="2" to-layer="1186" to-port="0"/>
33885 <edge from-layer="1139" from-port="0" to-layer="1186" to-port="1"/>
33886 <edge from-layer="1140" from-port="0" to-layer="1186" to-port="2"/>
33887 <edge from-layer="1141" from-port="0" to-layer="1186" to-port="3"/>
33888 <edge from-layer="1142" from-port="0" to-layer="1186" to-port="4"/>
33889 <edge from-layer="1138" from-port="5" to-layer="1187" to-port="0"/>
33890 <edge from-layer="1186" from-port="5" to-layer="1187" to-port="1"/>
33891 <edge from-layer="1187" from-port="2" to-layer="1189" to-port="0"/>
33892 <edge from-layer="1188" from-port="0" to-layer="1189" to-port="1"/>
33893 <edge from-layer="1189" from-port="2" to-layer="1190" to-port="0"/>
33894 <edge from-layer="72" from-port="0" to-layer="1190" to-port="1"/>
33895 <edge from-layer="73" from-port="0" to-layer="1190" to-port="2"/>
33896 <edge from-layer="74" from-port="0" to-layer="1190" to-port="3"/>
33897 <edge from-layer="75" from-port="0" to-layer="1190" to-port="4"/>
33898 <edge from-layer="1203" from-port="0" to-layer="1204" to-port="0"/>
33899 <edge from-layer="1204" from-port="1" to-layer="1206" to-port="0"/>
33900 <edge from-layer="1205" from-port="0" to-layer="1206" to-port="1"/>
33901 <edge from-layer="1206" from-port="2" to-layer="1208" to-port="0"/>
33902 <edge from-layer="1207" from-port="0" to-layer="1208" to-port="1"/>
33903 <edge from-layer="1190" from-port="5" to-layer="1209" to-port="0"/>
33904 <edge from-layer="1208" from-port="2" to-layer="1209" to-port="1"/>
33905 <edge from-layer="1209" from-port="2" to-layer="1211" to-port="0"/>
33906 <edge from-layer="1210" from-port="0" to-layer="1211" to-port="1"/>
33907 <edge from-layer="1211" from-port="2" to-layer="1213" to-port="0"/>
33908 <edge from-layer="1212" from-port="0" to-layer="1213" to-port="1"/>
33909 <edge from-layer="1213" from-port="2" to-layer="1214" to-port="0"/>
33910 <edge from-layer="1199" from-port="0" to-layer="1214" to-port="1"/>
33911 <edge from-layer="1200" from-port="0" to-layer="1214" to-port="2"/>
33912 <edge from-layer="1201" from-port="0" to-layer="1214" to-port="3"/>
33913 <edge from-layer="1202" from-port="0" to-layer="1214" to-port="4"/>
33914 <edge from-layer="1216" from-port="0" to-layer="1217" to-port="0"/>
33915 <edge from-layer="1217" from-port="1" to-layer="1219" to-port="0"/>
33916 <edge from-layer="1218" from-port="0" to-layer="1219" to-port="1"/>
33917 <edge from-layer="1219" from-port="2" to-layer="1221" to-port="0"/>
33918 <edge from-layer="1220" from-port="0" to-layer="1221" to-port="1"/>
33919 <edge from-layer="1221" from-port="2" to-layer="1222" to-port="0"/>
33920 <edge from-layer="1215" from-port="0" to-layer="1222" to-port="1"/>
33921 <edge from-layer="1214" from-port="5" to-layer="1223" to-port="0"/>
33922 <edge from-layer="1222" from-port="2" to-layer="1223" to-port="1"/>
33923 <edge from-layer="1223" from-port="2" to-layer="1225" to-port="0"/>
33924 <edge from-layer="1224" from-port="0" to-layer="1225" to-port="1"/>
33925 <edge from-layer="1225" from-port="2" to-layer="1227" to-port="0"/>
33926 <edge from-layer="1226" from-port="0" to-layer="1227" to-port="1"/>
33927 <edge from-layer="1227" from-port="2" to-layer="1228" to-port="0"/>
33928 <edge from-layer="1195" from-port="0" to-layer="1228" to-port="1"/>
33929 <edge from-layer="1196" from-port="0" to-layer="1228" to-port="2"/>
33930 <edge from-layer="1197" from-port="0" to-layer="1228" to-port="3"/>
33931 <edge from-layer="1198" from-port="0" to-layer="1228" to-port="4"/>
33932 <edge from-layer="1229" from-port="0" to-layer="1230" to-port="0"/>
33933 <edge from-layer="1230" from-port="1" to-layer="1232" to-port="0"/>
33934 <edge from-layer="1231" from-port="0" to-layer="1232" to-port="1"/>
33935 <edge from-layer="1232" from-port="2" to-layer="1234" to-port="0"/>
33936 <edge from-layer="1233" from-port="0" to-layer="1234" to-port="1"/>
33937 <edge from-layer="1228" from-port="5" to-layer="1235" to-port="0"/>
33938 <edge from-layer="1234" from-port="2" to-layer="1235" to-port="1"/>
33939 <edge from-layer="1235" from-port="2" to-layer="1237" to-port="0"/>
33940 <edge from-layer="1236" from-port="0" to-layer="1237" to-port="1"/>
33941 <edge from-layer="1237" from-port="2" to-layer="1238" to-port="0"/>
33942 <edge from-layer="1191" from-port="0" to-layer="1238" to-port="1"/>
33943 <edge from-layer="1192" from-port="0" to-layer="1238" to-port="2"/>
33944 <edge from-layer="1193" from-port="0" to-layer="1238" to-port="3"/>
33945 <edge from-layer="1194" from-port="0" to-layer="1238" to-port="4"/>
33946 <edge from-layer="1190" from-port="5" to-layer="1239" to-port="0"/>
33947 <edge from-layer="1238" from-port="5" to-layer="1239" to-port="1"/>
33948 <edge from-layer="1239" from-port="2" to-layer="1241" to-port="0"/>
33949 <edge from-layer="1240" from-port="0" to-layer="1241" to-port="1"/>
33950 <edge from-layer="1241" from-port="2" to-layer="1242" to-port="0"/>
33951 <edge from-layer="68" from-port="0" to-layer="1242" to-port="1"/>
33952 <edge from-layer="69" from-port="0" to-layer="1242" to-port="2"/>
33953 <edge from-layer="70" from-port="0" to-layer="1242" to-port="3"/>
33954 <edge from-layer="71" from-port="0" to-layer="1242" to-port="4"/>
33955 <edge from-layer="1255" from-port="0" to-layer="1256" to-port="0"/>
33956 <edge from-layer="1256" from-port="1" to-layer="1258" to-port="0"/>
33957 <edge from-layer="1257" from-port="0" to-layer="1258" to-port="1"/>
33958 <edge from-layer="1258" from-port="2" to-layer="1260" to-port="0"/>
33959 <edge from-layer="1259" from-port="0" to-layer="1260" to-port="1"/>
33960 <edge from-layer="1242" from-port="5" to-layer="1261" to-port="0"/>
33961 <edge from-layer="1260" from-port="2" to-layer="1261" to-port="1"/>
33962 <edge from-layer="1261" from-port="2" to-layer="1263" to-port="0"/>
33963 <edge from-layer="1262" from-port="0" to-layer="1263" to-port="1"/>
33964 <edge from-layer="1263" from-port="2" to-layer="1265" to-port="0"/>
33965 <edge from-layer="1264" from-port="0" to-layer="1265" to-port="1"/>
33966 <edge from-layer="1265" from-port="2" to-layer="1266" to-port="0"/>
33967 <edge from-layer="1251" from-port="0" to-layer="1266" to-port="1"/>
33968 <edge from-layer="1252" from-port="0" to-layer="1266" to-port="2"/>
33969 <edge from-layer="1253" from-port="0" to-layer="1266" to-port="3"/>
33970 <edge from-layer="1254" from-port="0" to-layer="1266" to-port="4"/>
33971 <edge from-layer="1268" from-port="0" to-layer="1269" to-port="0"/>
33972 <edge from-layer="1269" from-port="1" to-layer="1271" to-port="0"/>
33973 <edge from-layer="1270" from-port="0" to-layer="1271" to-port="1"/>
33974 <edge from-layer="1271" from-port="2" to-layer="1273" to-port="0"/>
33975 <edge from-layer="1272" from-port="0" to-layer="1273" to-port="1"/>
33976 <edge from-layer="1273" from-port="2" to-layer="1274" to-port="0"/>
33977 <edge from-layer="1267" from-port="0" to-layer="1274" to-port="1"/>
33978 <edge from-layer="1266" from-port="5" to-layer="1275" to-port="0"/>
33979 <edge from-layer="1274" from-port="2" to-layer="1275" to-port="1"/>
33980 <edge from-layer="1275" from-port="2" to-layer="1277" to-port="0"/>
33981 <edge from-layer="1276" from-port="0" to-layer="1277" to-port="1"/>
33982 <edge from-layer="1277" from-port="2" to-layer="1279" to-port="0"/>
33983 <edge from-layer="1278" from-port="0" to-layer="1279" to-port="1"/>
33984 <edge from-layer="1279" from-port="2" to-layer="1280" to-port="0"/>
33985 <edge from-layer="1247" from-port="0" to-layer="1280" to-port="1"/>
33986 <edge from-layer="1248" from-port="0" to-layer="1280" to-port="2"/>
33987 <edge from-layer="1249" from-port="0" to-layer="1280" to-port="3"/>
33988 <edge from-layer="1250" from-port="0" to-layer="1280" to-port="4"/>
33989 <edge from-layer="1281" from-port="0" to-layer="1282" to-port="0"/>
33990 <edge from-layer="1282" from-port="1" to-layer="1284" to-port="0"/>
33991 <edge from-layer="1283" from-port="0" to-layer="1284" to-port="1"/>
33992 <edge from-layer="1284" from-port="2" to-layer="1286" to-port="0"/>
33993 <edge from-layer="1285" from-port="0" to-layer="1286" to-port="1"/>
33994 <edge from-layer="1280" from-port="5" to-layer="1287" to-port="0"/>
33995 <edge from-layer="1286" from-port="2" to-layer="1287" to-port="1"/>
33996 <edge from-layer="1287" from-port="2" to-layer="1289" to-port="0"/>
33997 <edge from-layer="1288" from-port="0" to-layer="1289" to-port="1"/>
33998 <edge from-layer="1289" from-port="2" to-layer="1290" to-port="0"/>
33999 <edge from-layer="1243" from-port="0" to-layer="1290" to-port="1"/>
34000 <edge from-layer="1244" from-port="0" to-layer="1290" to-port="2"/>
34001 <edge from-layer="1245" from-port="0" to-layer="1290" to-port="3"/>
34002 <edge from-layer="1246" from-port="0" to-layer="1290" to-port="4"/>
34003 <edge from-layer="1242" from-port="5" to-layer="1291" to-port="0"/>
34004 <edge from-layer="1290" from-port="5" to-layer="1291" to-port="1"/>
34005 <edge from-layer="1291" from-port="2" to-layer="1293" to-port="0"/>
34006 <edge from-layer="1292" from-port="0" to-layer="1293" to-port="1"/>
34007 <edge from-layer="1293" from-port="2" to-layer="1294" to-port="0"/>
34008 <edge from-layer="64" from-port="0" to-layer="1294" to-port="1"/>
34009 <edge from-layer="65" from-port="0" to-layer="1294" to-port="2"/>
34010 <edge from-layer="66" from-port="0" to-layer="1294" to-port="3"/>
34011 <edge from-layer="67" from-port="0" to-layer="1294" to-port="4"/>
34012 <edge from-layer="1307" from-port="0" to-layer="1308" to-port="0"/>
34013 <edge from-layer="1308" from-port="1" to-layer="1310" to-port="0"/>
34014 <edge from-layer="1309" from-port="0" to-layer="1310" to-port="1"/>
34015 <edge from-layer="1310" from-port="2" to-layer="1312" to-port="0"/>
34016 <edge from-layer="1311" from-port="0" to-layer="1312" to-port="1"/>
34017 <edge from-layer="1294" from-port="5" to-layer="1313" to-port="0"/>
34018 <edge from-layer="1312" from-port="2" to-layer="1313" to-port="1"/>
34019 <edge from-layer="1313" from-port="2" to-layer="1315" to-port="0"/>
34020 <edge from-layer="1314" from-port="0" to-layer="1315" to-port="1"/>
34021 <edge from-layer="1315" from-port="2" to-layer="1317" to-port="0"/>
34022 <edge from-layer="1316" from-port="0" to-layer="1317" to-port="1"/>
34023 <edge from-layer="1317" from-port="2" to-layer="1318" to-port="0"/>
34024 <edge from-layer="1303" from-port="0" to-layer="1318" to-port="1"/>
34025 <edge from-layer="1304" from-port="0" to-layer="1318" to-port="2"/>
34026 <edge from-layer="1305" from-port="0" to-layer="1318" to-port="3"/>
34027 <edge from-layer="1306" from-port="0" to-layer="1318" to-port="4"/>
34028 <edge from-layer="1320" from-port="0" to-layer="1321" to-port="0"/>
34029 <edge from-layer="1321" from-port="1" to-layer="1323" to-port="0"/>
34030 <edge from-layer="1322" from-port="0" to-layer="1323" to-port="1"/>
34031 <edge from-layer="1323" from-port="2" to-layer="1325" to-port="0"/>
34032 <edge from-layer="1324" from-port="0" to-layer="1325" to-port="1"/>
34033 <edge from-layer="1325" from-port="2" to-layer="1326" to-port="0"/>
34034 <edge from-layer="1319" from-port="0" to-layer="1326" to-port="1"/>
34035 <edge from-layer="1318" from-port="5" to-layer="1327" to-port="0"/>
34036 <edge from-layer="1326" from-port="2" to-layer="1327" to-port="1"/>
34037 <edge from-layer="1327" from-port="2" to-layer="1329" to-port="0"/>
34038 <edge from-layer="1328" from-port="0" to-layer="1329" to-port="1"/>
34039 <edge from-layer="1329" from-port="2" to-layer="1331" to-port="0"/>
34040 <edge from-layer="1330" from-port="0" to-layer="1331" to-port="1"/>
34041 <edge from-layer="1331" from-port="2" to-layer="1332" to-port="0"/>
34042 <edge from-layer="1299" from-port="0" to-layer="1332" to-port="1"/>
34043 <edge from-layer="1300" from-port="0" to-layer="1332" to-port="2"/>
34044 <edge from-layer="1301" from-port="0" to-layer="1332" to-port="3"/>
34045 <edge from-layer="1302" from-port="0" to-layer="1332" to-port="4"/>
34046 <edge from-layer="1333" from-port="0" to-layer="1334" to-port="0"/>
34047 <edge from-layer="1334" from-port="1" to-layer="1336" to-port="0"/>
34048 <edge from-layer="1335" from-port="0" to-layer="1336" to-port="1"/>
34049 <edge from-layer="1336" from-port="2" to-layer="1338" to-port="0"/>
34050 <edge from-layer="1337" from-port="0" to-layer="1338" to-port="1"/>
34051 <edge from-layer="1332" from-port="5" to-layer="1339" to-port="0"/>
34052 <edge from-layer="1338" from-port="2" to-layer="1339" to-port="1"/>
34053 <edge from-layer="1339" from-port="2" to-layer="1341" to-port="0"/>
34054 <edge from-layer="1340" from-port="0" to-layer="1341" to-port="1"/>
34055 <edge from-layer="1341" from-port="2" to-layer="1342" to-port="0"/>
34056 <edge from-layer="1295" from-port="0" to-layer="1342" to-port="1"/>
34057 <edge from-layer="1296" from-port="0" to-layer="1342" to-port="2"/>
34058 <edge from-layer="1297" from-port="0" to-layer="1342" to-port="3"/>
34059 <edge from-layer="1298" from-port="0" to-layer="1342" to-port="4"/>
34060 <edge from-layer="1294" from-port="5" to-layer="1343" to-port="0"/>
34061 <edge from-layer="1342" from-port="5" to-layer="1343" to-port="1"/>
34062 <edge from-layer="1343" from-port="2" to-layer="1345" to-port="0"/>
34063 <edge from-layer="1344" from-port="0" to-layer="1345" to-port="1"/>
34064 <edge from-layer="1345" from-port="2" to-layer="1346" to-port="0"/>
34065 <edge from-layer="60" from-port="0" to-layer="1346" to-port="1"/>
34066 <edge from-layer="61" from-port="0" to-layer="1346" to-port="2"/>
34067 <edge from-layer="62" from-port="0" to-layer="1346" to-port="3"/>
34068 <edge from-layer="63" from-port="0" to-layer="1346" to-port="4"/>
34069 <edge from-layer="1359" from-port="0" to-layer="1360" to-port="0"/>
34070 <edge from-layer="1360" from-port="1" to-layer="1362" to-port="0"/>
34071 <edge from-layer="1361" from-port="0" to-layer="1362" to-port="1"/>
34072 <edge from-layer="1362" from-port="2" to-layer="1364" to-port="0"/>
34073 <edge from-layer="1363" from-port="0" to-layer="1364" to-port="1"/>
34074 <edge from-layer="1346" from-port="5" to-layer="1365" to-port="0"/>
34075 <edge from-layer="1364" from-port="2" to-layer="1365" to-port="1"/>
34076 <edge from-layer="1365" from-port="2" to-layer="1367" to-port="0"/>
34077 <edge from-layer="1366" from-port="0" to-layer="1367" to-port="1"/>
34078 <edge from-layer="1367" from-port="2" to-layer="1369" to-port="0"/>
34079 <edge from-layer="1368" from-port="0" to-layer="1369" to-port="1"/>
34080 <edge from-layer="1369" from-port="2" to-layer="1370" to-port="0"/>
34081 <edge from-layer="1355" from-port="0" to-layer="1370" to-port="1"/>
34082 <edge from-layer="1356" from-port="0" to-layer="1370" to-port="2"/>
34083 <edge from-layer="1357" from-port="0" to-layer="1370" to-port="3"/>
34084 <edge from-layer="1358" from-port="0" to-layer="1370" to-port="4"/>
34085 <edge from-layer="1372" from-port="0" to-layer="1373" to-port="0"/>
34086 <edge from-layer="1373" from-port="1" to-layer="1375" to-port="0"/>
34087 <edge from-layer="1374" from-port="0" to-layer="1375" to-port="1"/>
34088 <edge from-layer="1375" from-port="2" to-layer="1377" to-port="0"/>
34089 <edge from-layer="1376" from-port="0" to-layer="1377" to-port="1"/>
34090 <edge from-layer="1377" from-port="2" to-layer="1378" to-port="0"/>
34091 <edge from-layer="1371" from-port="0" to-layer="1378" to-port="1"/>
34092 <edge from-layer="1370" from-port="5" to-layer="1379" to-port="0"/>
34093 <edge from-layer="1378" from-port="2" to-layer="1379" to-port="1"/>
34094 <edge from-layer="1379" from-port="2" to-layer="1381" to-port="0"/>
34095 <edge from-layer="1380" from-port="0" to-layer="1381" to-port="1"/>
34096 <edge from-layer="1381" from-port="2" to-layer="1383" to-port="0"/>
34097 <edge from-layer="1382" from-port="0" to-layer="1383" to-port="1"/>
34098 <edge from-layer="1383" from-port="2" to-layer="1384" to-port="0"/>
34099 <edge from-layer="1351" from-port="0" to-layer="1384" to-port="1"/>
34100 <edge from-layer="1352" from-port="0" to-layer="1384" to-port="2"/>
34101 <edge from-layer="1353" from-port="0" to-layer="1384" to-port="3"/>
34102 <edge from-layer="1354" from-port="0" to-layer="1384" to-port="4"/>
34103 <edge from-layer="1385" from-port="0" to-layer="1386" to-port="0"/>
34104 <edge from-layer="1386" from-port="1" to-layer="1388" to-port="0"/>
34105 <edge from-layer="1387" from-port="0" to-layer="1388" to-port="1"/>
34106 <edge from-layer="1388" from-port="2" to-layer="1390" to-port="0"/>
34107 <edge from-layer="1389" from-port="0" to-layer="1390" to-port="1"/>
34108 <edge from-layer="1384" from-port="5" to-layer="1391" to-port="0"/>
34109 <edge from-layer="1390" from-port="2" to-layer="1391" to-port="1"/>
34110 <edge from-layer="1391" from-port="2" to-layer="1393" to-port="0"/>
34111 <edge from-layer="1392" from-port="0" to-layer="1393" to-port="1"/>
34112 <edge from-layer="1393" from-port="2" to-layer="1394" to-port="0"/>
34113 <edge from-layer="1347" from-port="0" to-layer="1394" to-port="1"/>
34114 <edge from-layer="1348" from-port="0" to-layer="1394" to-port="2"/>
34115 <edge from-layer="1349" from-port="0" to-layer="1394" to-port="3"/>
34116 <edge from-layer="1350" from-port="0" to-layer="1394" to-port="4"/>
34117 <edge from-layer="1346" from-port="5" to-layer="1395" to-port="0"/>
34118 <edge from-layer="1394" from-port="5" to-layer="1395" to-port="1"/>
34119 <edge from-layer="1395" from-port="2" to-layer="1397" to-port="0"/>
34120 <edge from-layer="1396" from-port="0" to-layer="1397" to-port="1"/>
34121 <edge from-layer="1397" from-port="2" to-layer="1398" to-port="0"/>
34122 <edge from-layer="56" from-port="0" to-layer="1398" to-port="1"/>
34123 <edge from-layer="57" from-port="0" to-layer="1398" to-port="2"/>
34124 <edge from-layer="58" from-port="0" to-layer="1398" to-port="3"/>
34125 <edge from-layer="59" from-port="0" to-layer="1398" to-port="4"/>
34126 <edge from-layer="1411" from-port="0" to-layer="1412" to-port="0"/>
34127 <edge from-layer="1412" from-port="1" to-layer="1414" to-port="0"/>
34128 <edge from-layer="1413" from-port="0" to-layer="1414" to-port="1"/>
34129 <edge from-layer="1414" from-port="2" to-layer="1416" to-port="0"/>
34130 <edge from-layer="1415" from-port="0" to-layer="1416" to-port="1"/>
34131 <edge from-layer="1398" from-port="5" to-layer="1417" to-port="0"/>
34132 <edge from-layer="1416" from-port="2" to-layer="1417" to-port="1"/>
34133 <edge from-layer="1417" from-port="2" to-layer="1419" to-port="0"/>
34134 <edge from-layer="1418" from-port="0" to-layer="1419" to-port="1"/>
34135 <edge from-layer="1419" from-port="2" to-layer="1421" to-port="0"/>
34136 <edge from-layer="1420" from-port="0" to-layer="1421" to-port="1"/>
34137 <edge from-layer="1421" from-port="2" to-layer="1422" to-port="0"/>
34138 <edge from-layer="1407" from-port="0" to-layer="1422" to-port="1"/>
34139 <edge from-layer="1408" from-port="0" to-layer="1422" to-port="2"/>
34140 <edge from-layer="1409" from-port="0" to-layer="1422" to-port="3"/>
34141 <edge from-layer="1410" from-port="0" to-layer="1422" to-port="4"/>
34142 <edge from-layer="1424" from-port="0" to-layer="1425" to-port="0"/>
34143 <edge from-layer="1425" from-port="1" to-layer="1427" to-port="0"/>
34144 <edge from-layer="1426" from-port="0" to-layer="1427" to-port="1"/>
34145 <edge from-layer="1427" from-port="2" to-layer="1429" to-port="0"/>
34146 <edge from-layer="1428" from-port="0" to-layer="1429" to-port="1"/>
34147 <edge from-layer="1429" from-port="2" to-layer="1430" to-port="0"/>
34148 <edge from-layer="1423" from-port="0" to-layer="1430" to-port="1"/>
34149 <edge from-layer="1422" from-port="5" to-layer="1431" to-port="0"/>
34150 <edge from-layer="1430" from-port="2" to-layer="1431" to-port="1"/>
34151 <edge from-layer="1431" from-port="2" to-layer="1433" to-port="0"/>
34152 <edge from-layer="1432" from-port="0" to-layer="1433" to-port="1"/>
34153 <edge from-layer="1433" from-port="2" to-layer="1435" to-port="0"/>
34154 <edge from-layer="1434" from-port="0" to-layer="1435" to-port="1"/>
34155 <edge from-layer="1435" from-port="2" to-layer="1436" to-port="0"/>
34156 <edge from-layer="1403" from-port="0" to-layer="1436" to-port="1"/>
34157 <edge from-layer="1404" from-port="0" to-layer="1436" to-port="2"/>
34158 <edge from-layer="1405" from-port="0" to-layer="1436" to-port="3"/>
34159 <edge from-layer="1406" from-port="0" to-layer="1436" to-port="4"/>
34160 <edge from-layer="1437" from-port="0" to-layer="1438" to-port="0"/>
34161 <edge from-layer="1438" from-port="1" to-layer="1440" to-port="0"/>
34162 <edge from-layer="1439" from-port="0" to-layer="1440" to-port="1"/>
34163 <edge from-layer="1440" from-port="2" to-layer="1442" to-port="0"/>
34164 <edge from-layer="1441" from-port="0" to-layer="1442" to-port="1"/>
34165 <edge from-layer="1436" from-port="5" to-layer="1443" to-port="0"/>
34166 <edge from-layer="1442" from-port="2" to-layer="1443" to-port="1"/>
34167 <edge from-layer="1443" from-port="2" to-layer="1445" to-port="0"/>
34168 <edge from-layer="1444" from-port="0" to-layer="1445" to-port="1"/>
34169 <edge from-layer="1445" from-port="2" to-layer="1446" to-port="0"/>
34170 <edge from-layer="1399" from-port="0" to-layer="1446" to-port="1"/>
34171 <edge from-layer="1400" from-port="0" to-layer="1446" to-port="2"/>
34172 <edge from-layer="1401" from-port="0" to-layer="1446" to-port="3"/>
34173 <edge from-layer="1402" from-port="0" to-layer="1446" to-port="4"/>
34174 <edge from-layer="1398" from-port="5" to-layer="1447" to-port="0"/>
34175 <edge from-layer="1446" from-port="5" to-layer="1447" to-port="1"/>
34176 <edge from-layer="1447" from-port="2" to-layer="1449" to-port="0"/>
34177 <edge from-layer="1448" from-port="0" to-layer="1449" to-port="1"/>
34178 <edge from-layer="1449" from-port="2" to-layer="1450" to-port="0"/>
34179 <edge from-layer="52" from-port="0" to-layer="1450" to-port="1"/>
34180 <edge from-layer="53" from-port="0" to-layer="1450" to-port="2"/>
34181 <edge from-layer="54" from-port="0" to-layer="1450" to-port="3"/>
34182 <edge from-layer="55" from-port="0" to-layer="1450" to-port="4"/>
34183 <edge from-layer="1450" from-port="5" to-layer="1451" to-port="0"/>
34184 <edge from-layer="1452" from-port="0" to-layer="1453" to-port="0"/>
34185 <edge from-layer="1453" from-port="1" to-layer="1455" to-port="0"/>
34186 <edge from-layer="1454" from-port="0" to-layer="1455" to-port="1"/>
34187 <edge from-layer="1455" from-port="2" to-layer="1457" to-port="0"/>
34188 <edge from-layer="1456" from-port="0" to-layer="1457" to-port="1"/>
34189 <edge from-layer="1451" from-port="1" to-layer="1458" to-port="0"/>
34190 <edge from-layer="1457" from-port="2" to-layer="1458" to-port="1"/>
34191 <edge from-layer="1458" from-port="2" to-layer="1460" to-port="0"/>
34192 <edge from-layer="1459" from-port="0" to-layer="1460" to-port="1"/>
34193 <edge from-layer="1460" from-port="2" to-layer="1461" to-port="0"/>
34194 <edge from-layer="48" from-port="0" to-layer="1461" to-port="1"/>
34195 <edge from-layer="49" from-port="0" to-layer="1461" to-port="2"/>
34196 <edge from-layer="50" from-port="0" to-layer="1461" to-port="3"/>
34197 <edge from-layer="51" from-port="0" to-layer="1461" to-port="4"/>
34198 <edge from-layer="1474" from-port="0" to-layer="1475" to-port="0"/>
34199 <edge from-layer="1475" from-port="1" to-layer="1477" to-port="0"/>
34200 <edge from-layer="1476" from-port="0" to-layer="1477" to-port="1"/>
34201 <edge from-layer="1477" from-port="2" to-layer="1479" to-port="0"/>
34202 <edge from-layer="1478" from-port="0" to-layer="1479" to-port="1"/>
34203 <edge from-layer="1450" from-port="5" to-layer="1480" to-port="0"/>
34204 <edge from-layer="1479" from-port="2" to-layer="1480" to-port="1"/>
34205 <edge from-layer="1480" from-port="2" to-layer="1482" to-port="0"/>
34206 <edge from-layer="1481" from-port="0" to-layer="1482" to-port="1"/>
34207 <edge from-layer="1482" from-port="2" to-layer="1484" to-port="0"/>
34208 <edge from-layer="1483" from-port="0" to-layer="1484" to-port="1"/>
34209 <edge from-layer="1484" from-port="2" to-layer="1485" to-port="0"/>
34210 <edge from-layer="1470" from-port="0" to-layer="1485" to-port="1"/>
34211 <edge from-layer="1471" from-port="0" to-layer="1485" to-port="2"/>
34212 <edge from-layer="1472" from-port="0" to-layer="1485" to-port="3"/>
34213 <edge from-layer="1473" from-port="0" to-layer="1485" to-port="4"/>
34214 <edge from-layer="1487" from-port="0" to-layer="1488" to-port="0"/>
34215 <edge from-layer="1488" from-port="1" to-layer="1490" to-port="0"/>
34216 <edge from-layer="1489" from-port="0" to-layer="1490" to-port="1"/>
34217 <edge from-layer="1490" from-port="2" to-layer="1492" to-port="0"/>
34218 <edge from-layer="1491" from-port="0" to-layer="1492" to-port="1"/>
34219 <edge from-layer="1492" from-port="2" to-layer="1493" to-port="0"/>
34220 <edge from-layer="1486" from-port="0" to-layer="1493" to-port="1"/>
34221 <edge from-layer="1485" from-port="5" to-layer="1494" to-port="0"/>
34222 <edge from-layer="1493" from-port="2" to-layer="1494" to-port="1"/>
34223 <edge from-layer="1494" from-port="2" to-layer="1496" to-port="0"/>
34224 <edge from-layer="1495" from-port="0" to-layer="1496" to-port="1"/>
34225 <edge from-layer="1496" from-port="2" to-layer="1498" to-port="0"/>
34226 <edge from-layer="1497" from-port="0" to-layer="1498" to-port="1"/>
34227 <edge from-layer="1498" from-port="2" to-layer="1499" to-port="0"/>
34228 <edge from-layer="1466" from-port="0" to-layer="1499" to-port="1"/>
34229 <edge from-layer="1467" from-port="0" to-layer="1499" to-port="2"/>
34230 <edge from-layer="1468" from-port="0" to-layer="1499" to-port="3"/>
34231 <edge from-layer="1469" from-port="0" to-layer="1499" to-port="4"/>
34232 <edge from-layer="1500" from-port="0" to-layer="1501" to-port="0"/>
34233 <edge from-layer="1501" from-port="1" to-layer="1503" to-port="0"/>
34234 <edge from-layer="1502" from-port="0" to-layer="1503" to-port="1"/>
34235 <edge from-layer="1503" from-port="2" to-layer="1505" to-port="0"/>
34236 <edge from-layer="1504" from-port="0" to-layer="1505" to-port="1"/>
34237 <edge from-layer="1499" from-port="5" to-layer="1506" to-port="0"/>
34238 <edge from-layer="1505" from-port="2" to-layer="1506" to-port="1"/>
34239 <edge from-layer="1506" from-port="2" to-layer="1508" to-port="0"/>
34240 <edge from-layer="1507" from-port="0" to-layer="1508" to-port="1"/>
34241 <edge from-layer="1508" from-port="2" to-layer="1509" to-port="0"/>
34242 <edge from-layer="1462" from-port="0" to-layer="1509" to-port="1"/>
34243 <edge from-layer="1463" from-port="0" to-layer="1509" to-port="2"/>
34244 <edge from-layer="1464" from-port="0" to-layer="1509" to-port="3"/>
34245 <edge from-layer="1465" from-port="0" to-layer="1509" to-port="4"/>
34246 <edge from-layer="1461" from-port="5" to-layer="1510" to-port="0"/>
34247 <edge from-layer="1509" from-port="5" to-layer="1510" to-port="1"/>
34248 <edge from-layer="1510" from-port="2" to-layer="1512" to-port="0"/>
34249 <edge from-layer="1511" from-port="0" to-layer="1512" to-port="1"/>
34250 <edge from-layer="1512" from-port="2" to-layer="1513" to-port="0"/>
34251 <edge from-layer="44" from-port="0" to-layer="1513" to-port="1"/>
34252 <edge from-layer="45" from-port="0" to-layer="1513" to-port="2"/>
34253 <edge from-layer="46" from-port="0" to-layer="1513" to-port="3"/>
34254 <edge from-layer="47" from-port="0" to-layer="1513" to-port="4"/>
34255 <edge from-layer="1526" from-port="0" to-layer="1527" to-port="0"/>
34256 <edge from-layer="1527" from-port="1" to-layer="1529" to-port="0"/>
34257 <edge from-layer="1528" from-port="0" to-layer="1529" to-port="1"/>
34258 <edge from-layer="1529" from-port="2" to-layer="1531" to-port="0"/>
34259 <edge from-layer="1530" from-port="0" to-layer="1531" to-port="1"/>
34260 <edge from-layer="1513" from-port="5" to-layer="1532" to-port="0"/>
34261 <edge from-layer="1531" from-port="2" to-layer="1532" to-port="1"/>
34262 <edge from-layer="1532" from-port="2" to-layer="1534" to-port="0"/>
34263 <edge from-layer="1533" from-port="0" to-layer="1534" to-port="1"/>
34264 <edge from-layer="1534" from-port="2" to-layer="1536" to-port="0"/>
34265 <edge from-layer="1535" from-port="0" to-layer="1536" to-port="1"/>
34266 <edge from-layer="1536" from-port="2" to-layer="1537" to-port="0"/>
34267 <edge from-layer="1522" from-port="0" to-layer="1537" to-port="1"/>
34268 <edge from-layer="1523" from-port="0" to-layer="1537" to-port="2"/>
34269 <edge from-layer="1524" from-port="0" to-layer="1537" to-port="3"/>
34270 <edge from-layer="1525" from-port="0" to-layer="1537" to-port="4"/>
34271 <edge from-layer="1539" from-port="0" to-layer="1540" to-port="0"/>
34272 <edge from-layer="1540" from-port="1" to-layer="1542" to-port="0"/>
34273 <edge from-layer="1541" from-port="0" to-layer="1542" to-port="1"/>
34274 <edge from-layer="1542" from-port="2" to-layer="1544" to-port="0"/>
34275 <edge from-layer="1543" from-port="0" to-layer="1544" to-port="1"/>
34276 <edge from-layer="1544" from-port="2" to-layer="1545" to-port="0"/>
34277 <edge from-layer="1538" from-port="0" to-layer="1545" to-port="1"/>
34278 <edge from-layer="1537" from-port="5" to-layer="1546" to-port="0"/>
34279 <edge from-layer="1545" from-port="2" to-layer="1546" to-port="1"/>
34280 <edge from-layer="1546" from-port="2" to-layer="1548" to-port="0"/>
34281 <edge from-layer="1547" from-port="0" to-layer="1548" to-port="1"/>
34282 <edge from-layer="1548" from-port="2" to-layer="1550" to-port="0"/>
34283 <edge from-layer="1549" from-port="0" to-layer="1550" to-port="1"/>
34284 <edge from-layer="1550" from-port="2" to-layer="1551" to-port="0"/>
34285 <edge from-layer="1518" from-port="0" to-layer="1551" to-port="1"/>
34286 <edge from-layer="1519" from-port="0" to-layer="1551" to-port="2"/>
34287 <edge from-layer="1520" from-port="0" to-layer="1551" to-port="3"/>
34288 <edge from-layer="1521" from-port="0" to-layer="1551" to-port="4"/>
34289 <edge from-layer="1552" from-port="0" to-layer="1553" to-port="0"/>
34290 <edge from-layer="1553" from-port="1" to-layer="1555" to-port="0"/>
34291 <edge from-layer="1554" from-port="0" to-layer="1555" to-port="1"/>
34292 <edge from-layer="1555" from-port="2" to-layer="1557" to-port="0"/>
34293 <edge from-layer="1556" from-port="0" to-layer="1557" to-port="1"/>
34294 <edge from-layer="1551" from-port="5" to-layer="1558" to-port="0"/>
34295 <edge from-layer="1557" from-port="2" to-layer="1558" to-port="1"/>
34296 <edge from-layer="1558" from-port="2" to-layer="1560" to-port="0"/>
34297 <edge from-layer="1559" from-port="0" to-layer="1560" to-port="1"/>
34298 <edge from-layer="1560" from-port="2" to-layer="1561" to-port="0"/>
34299 <edge from-layer="1514" from-port="0" to-layer="1561" to-port="1"/>
34300 <edge from-layer="1515" from-port="0" to-layer="1561" to-port="2"/>
34301 <edge from-layer="1516" from-port="0" to-layer="1561" to-port="3"/>
34302 <edge from-layer="1517" from-port="0" to-layer="1561" to-port="4"/>
34303 <edge from-layer="1513" from-port="5" to-layer="1562" to-port="0"/>
34304 <edge from-layer="1561" from-port="5" to-layer="1562" to-port="1"/>
34305 <edge from-layer="1562" from-port="2" to-layer="1564" to-port="0"/>
34306 <edge from-layer="1563" from-port="0" to-layer="1564" to-port="1"/>
34307 <edge from-layer="1564" from-port="2" to-layer="1565" to-port="0"/>
34308 <edge from-layer="40" from-port="0" to-layer="1565" to-port="1"/>
34309 <edge from-layer="41" from-port="0" to-layer="1565" to-port="2"/>
34310 <edge from-layer="42" from-port="0" to-layer="1565" to-port="3"/>
34311 <edge from-layer="43" from-port="0" to-layer="1565" to-port="4"/>
34312 <edge from-layer="1578" from-port="0" to-layer="1579" to-port="0"/>
34313 <edge from-layer="1579" from-port="1" to-layer="1581" to-port="0"/>
34314 <edge from-layer="1580" from-port="0" to-layer="1581" to-port="1"/>
34315 <edge from-layer="1581" from-port="2" to-layer="1583" to-port="0"/>
34316 <edge from-layer="1582" from-port="0" to-layer="1583" to-port="1"/>
34317 <edge from-layer="1565" from-port="5" to-layer="1584" to-port="0"/>
34318 <edge from-layer="1583" from-port="2" to-layer="1584" to-port="1"/>
34319 <edge from-layer="1584" from-port="2" to-layer="1586" to-port="0"/>
34320 <edge from-layer="1585" from-port="0" to-layer="1586" to-port="1"/>
34321 <edge from-layer="1586" from-port="2" to-layer="1588" to-port="0"/>
34322 <edge from-layer="1587" from-port="0" to-layer="1588" to-port="1"/>
34323 <edge from-layer="1588" from-port="2" to-layer="1589" to-port="0"/>
34324 <edge from-layer="1574" from-port="0" to-layer="1589" to-port="1"/>
34325 <edge from-layer="1575" from-port="0" to-layer="1589" to-port="2"/>
34326 <edge from-layer="1576" from-port="0" to-layer="1589" to-port="3"/>
34327 <edge from-layer="1577" from-port="0" to-layer="1589" to-port="4"/>
34328 <edge from-layer="1591" from-port="0" to-layer="1592" to-port="0"/>
34329 <edge from-layer="1592" from-port="1" to-layer="1594" to-port="0"/>
34330 <edge from-layer="1593" from-port="0" to-layer="1594" to-port="1"/>
34331 <edge from-layer="1594" from-port="2" to-layer="1596" to-port="0"/>
34332 <edge from-layer="1595" from-port="0" to-layer="1596" to-port="1"/>
34333 <edge from-layer="1596" from-port="2" to-layer="1597" to-port="0"/>
34334 <edge from-layer="1590" from-port="0" to-layer="1597" to-port="1"/>
34335 <edge from-layer="1589" from-port="5" to-layer="1598" to-port="0"/>
34336 <edge from-layer="1597" from-port="2" to-layer="1598" to-port="1"/>
34337 <edge from-layer="1598" from-port="2" to-layer="1600" to-port="0"/>
34338 <edge from-layer="1599" from-port="0" to-layer="1600" to-port="1"/>
34339 <edge from-layer="1600" from-port="2" to-layer="1602" to-port="0"/>
34340 <edge from-layer="1601" from-port="0" to-layer="1602" to-port="1"/>
34341 <edge from-layer="1602" from-port="2" to-layer="1603" to-port="0"/>
34342 <edge from-layer="1570" from-port="0" to-layer="1603" to-port="1"/>
34343 <edge from-layer="1571" from-port="0" to-layer="1603" to-port="2"/>
34344 <edge from-layer="1572" from-port="0" to-layer="1603" to-port="3"/>
34345 <edge from-layer="1573" from-port="0" to-layer="1603" to-port="4"/>
34346 <edge from-layer="1604" from-port="0" to-layer="1605" to-port="0"/>
34347 <edge from-layer="1605" from-port="1" to-layer="1607" to-port="0"/>
34348 <edge from-layer="1606" from-port="0" to-layer="1607" to-port="1"/>
34349 <edge from-layer="1607" from-port="2" to-layer="1609" to-port="0"/>
34350 <edge from-layer="1608" from-port="0" to-layer="1609" to-port="1"/>
34351 <edge from-layer="1603" from-port="5" to-layer="1610" to-port="0"/>
34352 <edge from-layer="1609" from-port="2" to-layer="1610" to-port="1"/>
34353 <edge from-layer="1610" from-port="2" to-layer="1612" to-port="0"/>
34354 <edge from-layer="1611" from-port="0" to-layer="1612" to-port="1"/>
34355 <edge from-layer="1612" from-port="2" to-layer="1613" to-port="0"/>
34356 <edge from-layer="1566" from-port="0" to-layer="1613" to-port="1"/>
34357 <edge from-layer="1567" from-port="0" to-layer="1613" to-port="2"/>
34358 <edge from-layer="1568" from-port="0" to-layer="1613" to-port="3"/>
34359 <edge from-layer="1569" from-port="0" to-layer="1613" to-port="4"/>
34360 <edge from-layer="1565" from-port="5" to-layer="1614" to-port="0"/>
34361 <edge from-layer="1613" from-port="5" to-layer="1614" to-port="1"/>
34362 <edge from-layer="1614" from-port="2" to-layer="1616" to-port="0"/>
34363 <edge from-layer="1615" from-port="0" to-layer="1616" to-port="1"/>
34364 <edge from-layer="1616" from-port="2" to-layer="1617" to-port="0"/>
34365 <edge from-layer="36" from-port="0" to-layer="1617" to-port="1"/>
34366 <edge from-layer="37" from-port="0" to-layer="1617" to-port="2"/>
34367 <edge from-layer="38" from-port="0" to-layer="1617" to-port="3"/>
34368 <edge from-layer="39" from-port="0" to-layer="1617" to-port="4"/>
34369 <edge from-layer="1630" from-port="0" to-layer="1631" to-port="0"/>
34370 <edge from-layer="1631" from-port="1" to-layer="1633" to-port="0"/>
34371 <edge from-layer="1632" from-port="0" to-layer="1633" to-port="1"/>
34372 <edge from-layer="1633" from-port="2" to-layer="1635" to-port="0"/>
34373 <edge from-layer="1634" from-port="0" to-layer="1635" to-port="1"/>
34374 <edge from-layer="1617" from-port="5" to-layer="1636" to-port="0"/>
34375 <edge from-layer="1635" from-port="2" to-layer="1636" to-port="1"/>
34376 <edge from-layer="1636" from-port="2" to-layer="1638" to-port="0"/>
34377 <edge from-layer="1637" from-port="0" to-layer="1638" to-port="1"/>
34378 <edge from-layer="1638" from-port="2" to-layer="1640" to-port="0"/>
34379 <edge from-layer="1639" from-port="0" to-layer="1640" to-port="1"/>
34380 <edge from-layer="1640" from-port="2" to-layer="1641" to-port="0"/>
34381 <edge from-layer="1626" from-port="0" to-layer="1641" to-port="1"/>
34382 <edge from-layer="1627" from-port="0" to-layer="1641" to-port="2"/>
34383 <edge from-layer="1628" from-port="0" to-layer="1641" to-port="3"/>
34384 <edge from-layer="1629" from-port="0" to-layer="1641" to-port="4"/>
34385 <edge from-layer="1643" from-port="0" to-layer="1644" to-port="0"/>
34386 <edge from-layer="1644" from-port="1" to-layer="1646" to-port="0"/>
34387 <edge from-layer="1645" from-port="0" to-layer="1646" to-port="1"/>
34388 <edge from-layer="1646" from-port="2" to-layer="1648" to-port="0"/>
34389 <edge from-layer="1647" from-port="0" to-layer="1648" to-port="1"/>
34390 <edge from-layer="1648" from-port="2" to-layer="1649" to-port="0"/>
34391 <edge from-layer="1642" from-port="0" to-layer="1649" to-port="1"/>
34392 <edge from-layer="1641" from-port="5" to-layer="1650" to-port="0"/>
34393 <edge from-layer="1649" from-port="2" to-layer="1650" to-port="1"/>
34394 <edge from-layer="1650" from-port="2" to-layer="1652" to-port="0"/>
34395 <edge from-layer="1651" from-port="0" to-layer="1652" to-port="1"/>
34396 <edge from-layer="1652" from-port="2" to-layer="1654" to-port="0"/>
34397 <edge from-layer="1653" from-port="0" to-layer="1654" to-port="1"/>
34398 <edge from-layer="1654" from-port="2" to-layer="1655" to-port="0"/>
34399 <edge from-layer="1622" from-port="0" to-layer="1655" to-port="1"/>
34400 <edge from-layer="1623" from-port="0" to-layer="1655" to-port="2"/>
34401 <edge from-layer="1624" from-port="0" to-layer="1655" to-port="3"/>
34402 <edge from-layer="1625" from-port="0" to-layer="1655" to-port="4"/>
34403 <edge from-layer="1656" from-port="0" to-layer="1657" to-port="0"/>
34404 <edge from-layer="1657" from-port="1" to-layer="1659" to-port="0"/>
34405 <edge from-layer="1658" from-port="0" to-layer="1659" to-port="1"/>
34406 <edge from-layer="1659" from-port="2" to-layer="1661" to-port="0"/>
34407 <edge from-layer="1660" from-port="0" to-layer="1661" to-port="1"/>
34408 <edge from-layer="1655" from-port="5" to-layer="1662" to-port="0"/>
34409 <edge from-layer="1661" from-port="2" to-layer="1662" to-port="1"/>
34410 <edge from-layer="1662" from-port="2" to-layer="1664" to-port="0"/>
34411 <edge from-layer="1663" from-port="0" to-layer="1664" to-port="1"/>
34412 <edge from-layer="1664" from-port="2" to-layer="1665" to-port="0"/>
34413 <edge from-layer="1618" from-port="0" to-layer="1665" to-port="1"/>
34414 <edge from-layer="1619" from-port="0" to-layer="1665" to-port="2"/>
34415 <edge from-layer="1620" from-port="0" to-layer="1665" to-port="3"/>
34416 <edge from-layer="1621" from-port="0" to-layer="1665" to-port="4"/>
34417 <edge from-layer="1617" from-port="5" to-layer="1666" to-port="0"/>
34418 <edge from-layer="1665" from-port="5" to-layer="1666" to-port="1"/>
34419 <edge from-layer="1666" from-port="2" to-layer="1668" to-port="0"/>
34420 <edge from-layer="1667" from-port="0" to-layer="1668" to-port="1"/>
34421 <edge from-layer="1668" from-port="2" to-layer="1669" to-port="0"/>
34422 <edge from-layer="32" from-port="0" to-layer="1669" to-port="1"/>
34423 <edge from-layer="33" from-port="0" to-layer="1669" to-port="2"/>
34424 <edge from-layer="34" from-port="0" to-layer="1669" to-port="3"/>
34425 <edge from-layer="35" from-port="0" to-layer="1669" to-port="4"/>
34426 <edge from-layer="1682" from-port="0" to-layer="1683" to-port="0"/>
34427 <edge from-layer="1683" from-port="1" to-layer="1685" to-port="0"/>
34428 <edge from-layer="1684" from-port="0" to-layer="1685" to-port="1"/>
34429 <edge from-layer="1685" from-port="2" to-layer="1687" to-port="0"/>
34430 <edge from-layer="1686" from-port="0" to-layer="1687" to-port="1"/>
34431 <edge from-layer="1669" from-port="5" to-layer="1688" to-port="0"/>
34432 <edge from-layer="1687" from-port="2" to-layer="1688" to-port="1"/>
34433 <edge from-layer="1688" from-port="2" to-layer="1690" to-port="0"/>
34434 <edge from-layer="1689" from-port="0" to-layer="1690" to-port="1"/>
34435 <edge from-layer="1690" from-port="2" to-layer="1692" to-port="0"/>
34436 <edge from-layer="1691" from-port="0" to-layer="1692" to-port="1"/>
34437 <edge from-layer="1692" from-port="2" to-layer="1693" to-port="0"/>
34438 <edge from-layer="1678" from-port="0" to-layer="1693" to-port="1"/>
34439 <edge from-layer="1679" from-port="0" to-layer="1693" to-port="2"/>
34440 <edge from-layer="1680" from-port="0" to-layer="1693" to-port="3"/>
34441 <edge from-layer="1681" from-port="0" to-layer="1693" to-port="4"/>
34442 <edge from-layer="1695" from-port="0" to-layer="1696" to-port="0"/>
34443 <edge from-layer="1696" from-port="1" to-layer="1698" to-port="0"/>
34444 <edge from-layer="1697" from-port="0" to-layer="1698" to-port="1"/>
34445 <edge from-layer="1698" from-port="2" to-layer="1700" to-port="0"/>
34446 <edge from-layer="1699" from-port="0" to-layer="1700" to-port="1"/>
34447 <edge from-layer="1700" from-port="2" to-layer="1701" to-port="0"/>
34448 <edge from-layer="1694" from-port="0" to-layer="1701" to-port="1"/>
34449 <edge from-layer="1693" from-port="5" to-layer="1702" to-port="0"/>
34450 <edge from-layer="1701" from-port="2" to-layer="1702" to-port="1"/>
34451 <edge from-layer="1702" from-port="2" to-layer="1704" to-port="0"/>
34452 <edge from-layer="1703" from-port="0" to-layer="1704" to-port="1"/>
34453 <edge from-layer="1704" from-port="2" to-layer="1706" to-port="0"/>
34454 <edge from-layer="1705" from-port="0" to-layer="1706" to-port="1"/>
34455 <edge from-layer="1706" from-port="2" to-layer="1707" to-port="0"/>
34456 <edge from-layer="1674" from-port="0" to-layer="1707" to-port="1"/>
34457 <edge from-layer="1675" from-port="0" to-layer="1707" to-port="2"/>
34458 <edge from-layer="1676" from-port="0" to-layer="1707" to-port="3"/>
34459 <edge from-layer="1677" from-port="0" to-layer="1707" to-port="4"/>
34460 <edge from-layer="1708" from-port="0" to-layer="1709" to-port="0"/>
34461 <edge from-layer="1709" from-port="1" to-layer="1711" to-port="0"/>
34462 <edge from-layer="1710" from-port="0" to-layer="1711" to-port="1"/>
34463 <edge from-layer="1711" from-port="2" to-layer="1713" to-port="0"/>
34464 <edge from-layer="1712" from-port="0" to-layer="1713" to-port="1"/>
34465 <edge from-layer="1707" from-port="5" to-layer="1714" to-port="0"/>
34466 <edge from-layer="1713" from-port="2" to-layer="1714" to-port="1"/>
34467 <edge from-layer="1714" from-port="2" to-layer="1716" to-port="0"/>
34468 <edge from-layer="1715" from-port="0" to-layer="1716" to-port="1"/>
34469 <edge from-layer="1716" from-port="2" to-layer="1717" to-port="0"/>
34470 <edge from-layer="1670" from-port="0" to-layer="1717" to-port="1"/>
34471 <edge from-layer="1671" from-port="0" to-layer="1717" to-port="2"/>
34472 <edge from-layer="1672" from-port="0" to-layer="1717" to-port="3"/>
34473 <edge from-layer="1673" from-port="0" to-layer="1717" to-port="4"/>
34474 <edge from-layer="1669" from-port="5" to-layer="1718" to-port="0"/>
34475 <edge from-layer="1717" from-port="5" to-layer="1718" to-port="1"/>
34476 <edge from-layer="1718" from-port="2" to-layer="1720" to-port="0"/>
34477 <edge from-layer="1719" from-port="0" to-layer="1720" to-port="1"/>
34478 <edge from-layer="1720" from-port="2" to-layer="1721" to-port="0"/>
34479 <edge from-layer="28" from-port="0" to-layer="1721" to-port="1"/>
34480 <edge from-layer="29" from-port="0" to-layer="1721" to-port="2"/>
34481 <edge from-layer="30" from-port="0" to-layer="1721" to-port="3"/>
34482 <edge from-layer="31" from-port="0" to-layer="1721" to-port="4"/>
34483 <edge from-layer="1734" from-port="0" to-layer="1735" to-port="0"/>
34484 <edge from-layer="1735" from-port="1" to-layer="1737" to-port="0"/>
34485 <edge from-layer="1736" from-port="0" to-layer="1737" to-port="1"/>
34486 <edge from-layer="1737" from-port="2" to-layer="1739" to-port="0"/>
34487 <edge from-layer="1738" from-port="0" to-layer="1739" to-port="1"/>
34488 <edge from-layer="1721" from-port="5" to-layer="1740" to-port="0"/>
34489 <edge from-layer="1739" from-port="2" to-layer="1740" to-port="1"/>
34490 <edge from-layer="1740" from-port="2" to-layer="1742" to-port="0"/>
34491 <edge from-layer="1741" from-port="0" to-layer="1742" to-port="1"/>
34492 <edge from-layer="1742" from-port="2" to-layer="1744" to-port="0"/>
34493 <edge from-layer="1743" from-port="0" to-layer="1744" to-port="1"/>
34494 <edge from-layer="1744" from-port="2" to-layer="1745" to-port="0"/>
34495 <edge from-layer="1730" from-port="0" to-layer="1745" to-port="1"/>
34496 <edge from-layer="1731" from-port="0" to-layer="1745" to-port="2"/>
34497 <edge from-layer="1732" from-port="0" to-layer="1745" to-port="3"/>
34498 <edge from-layer="1733" from-port="0" to-layer="1745" to-port="4"/>
34499 <edge from-layer="1747" from-port="0" to-layer="1748" to-port="0"/>
34500 <edge from-layer="1748" from-port="1" to-layer="1750" to-port="0"/>
34501 <edge from-layer="1749" from-port="0" to-layer="1750" to-port="1"/>
34502 <edge from-layer="1750" from-port="2" to-layer="1752" to-port="0"/>
34503 <edge from-layer="1751" from-port="0" to-layer="1752" to-port="1"/>
34504 <edge from-layer="1752" from-port="2" to-layer="1753" to-port="0"/>
34505 <edge from-layer="1746" from-port="0" to-layer="1753" to-port="1"/>
34506 <edge from-layer="1745" from-port="5" to-layer="1754" to-port="0"/>
34507 <edge from-layer="1753" from-port="2" to-layer="1754" to-port="1"/>
34508 <edge from-layer="1754" from-port="2" to-layer="1756" to-port="0"/>
34509 <edge from-layer="1755" from-port="0" to-layer="1756" to-port="1"/>
34510 <edge from-layer="1756" from-port="2" to-layer="1758" to-port="0"/>
34511 <edge from-layer="1757" from-port="0" to-layer="1758" to-port="1"/>
34512 <edge from-layer="1758" from-port="2" to-layer="1759" to-port="0"/>
34513 <edge from-layer="1726" from-port="0" to-layer="1759" to-port="1"/>
34514 <edge from-layer="1727" from-port="0" to-layer="1759" to-port="2"/>
34515 <edge from-layer="1728" from-port="0" to-layer="1759" to-port="3"/>
34516 <edge from-layer="1729" from-port="0" to-layer="1759" to-port="4"/>
34517 <edge from-layer="1760" from-port="0" to-layer="1761" to-port="0"/>
34518 <edge from-layer="1761" from-port="1" to-layer="1763" to-port="0"/>
34519 <edge from-layer="1762" from-port="0" to-layer="1763" to-port="1"/>
34520 <edge from-layer="1763" from-port="2" to-layer="1765" to-port="0"/>
34521 <edge from-layer="1764" from-port="0" to-layer="1765" to-port="1"/>
34522 <edge from-layer="1759" from-port="5" to-layer="1766" to-port="0"/>
34523 <edge from-layer="1765" from-port="2" to-layer="1766" to-port="1"/>
34524 <edge from-layer="1766" from-port="2" to-layer="1768" to-port="0"/>
34525 <edge from-layer="1767" from-port="0" to-layer="1768" to-port="1"/>
34526 <edge from-layer="1768" from-port="2" to-layer="1769" to-port="0"/>
34527 <edge from-layer="1722" from-port="0" to-layer="1769" to-port="1"/>
34528 <edge from-layer="1723" from-port="0" to-layer="1769" to-port="2"/>
34529 <edge from-layer="1724" from-port="0" to-layer="1769" to-port="3"/>
34530 <edge from-layer="1725" from-port="0" to-layer="1769" to-port="4"/>
34531 <edge from-layer="1721" from-port="5" to-layer="1770" to-port="0"/>
34532 <edge from-layer="1769" from-port="5" to-layer="1770" to-port="1"/>
34533 <edge from-layer="1770" from-port="2" to-layer="1772" to-port="0"/>
34534 <edge from-layer="1771" from-port="0" to-layer="1772" to-port="1"/>
34535 <edge from-layer="1772" from-port="2" to-layer="1773" to-port="0"/>
34536 <edge from-layer="24" from-port="0" to-layer="1773" to-port="1"/>
34537 <edge from-layer="25" from-port="0" to-layer="1773" to-port="2"/>
34538 <edge from-layer="26" from-port="0" to-layer="1773" to-port="3"/>
34539 <edge from-layer="27" from-port="0" to-layer="1773" to-port="4"/>
34540 <edge from-layer="1786" from-port="0" to-layer="1787" to-port="0"/>
34541 <edge from-layer="1787" from-port="1" to-layer="1789" to-port="0"/>
34542 <edge from-layer="1788" from-port="0" to-layer="1789" to-port="1"/>
34543 <edge from-layer="1789" from-port="2" to-layer="1791" to-port="0"/>
34544 <edge from-layer="1790" from-port="0" to-layer="1791" to-port="1"/>
34545 <edge from-layer="1773" from-port="5" to-layer="1792" to-port="0"/>
34546 <edge from-layer="1791" from-port="2" to-layer="1792" to-port="1"/>
34547 <edge from-layer="1792" from-port="2" to-layer="1794" to-port="0"/>
34548 <edge from-layer="1793" from-port="0" to-layer="1794" to-port="1"/>
34549 <edge from-layer="1794" from-port="2" to-layer="1796" to-port="0"/>
34550 <edge from-layer="1795" from-port="0" to-layer="1796" to-port="1"/>
34551 <edge from-layer="1796" from-port="2" to-layer="1797" to-port="0"/>
34552 <edge from-layer="1782" from-port="0" to-layer="1797" to-port="1"/>
34553 <edge from-layer="1783" from-port="0" to-layer="1797" to-port="2"/>
34554 <edge from-layer="1784" from-port="0" to-layer="1797" to-port="3"/>
34555 <edge from-layer="1785" from-port="0" to-layer="1797" to-port="4"/>
34556 <edge from-layer="1799" from-port="0" to-layer="1800" to-port="0"/>
34557 <edge from-layer="1800" from-port="1" to-layer="1802" to-port="0"/>
34558 <edge from-layer="1801" from-port="0" to-layer="1802" to-port="1"/>
34559 <edge from-layer="1802" from-port="2" to-layer="1804" to-port="0"/>
34560 <edge from-layer="1803" from-port="0" to-layer="1804" to-port="1"/>
34561 <edge from-layer="1804" from-port="2" to-layer="1805" to-port="0"/>
34562 <edge from-layer="1798" from-port="0" to-layer="1805" to-port="1"/>
34563 <edge from-layer="1797" from-port="5" to-layer="1806" to-port="0"/>
34564 <edge from-layer="1805" from-port="2" to-layer="1806" to-port="1"/>
34565 <edge from-layer="1806" from-port="2" to-layer="1808" to-port="0"/>
34566 <edge from-layer="1807" from-port="0" to-layer="1808" to-port="1"/>
34567 <edge from-layer="1808" from-port="2" to-layer="1810" to-port="0"/>
34568 <edge from-layer="1809" from-port="0" to-layer="1810" to-port="1"/>
34569 <edge from-layer="1810" from-port="2" to-layer="1811" to-port="0"/>
34570 <edge from-layer="1778" from-port="0" to-layer="1811" to-port="1"/>
34571 <edge from-layer="1779" from-port="0" to-layer="1811" to-port="2"/>
34572 <edge from-layer="1780" from-port="0" to-layer="1811" to-port="3"/>
34573 <edge from-layer="1781" from-port="0" to-layer="1811" to-port="4"/>
34574 <edge from-layer="1812" from-port="0" to-layer="1813" to-port="0"/>
34575 <edge from-layer="1813" from-port="1" to-layer="1815" to-port="0"/>
34576 <edge from-layer="1814" from-port="0" to-layer="1815" to-port="1"/>
34577 <edge from-layer="1815" from-port="2" to-layer="1817" to-port="0"/>
34578 <edge from-layer="1816" from-port="0" to-layer="1817" to-port="1"/>
34579 <edge from-layer="1811" from-port="5" to-layer="1818" to-port="0"/>
34580 <edge from-layer="1817" from-port="2" to-layer="1818" to-port="1"/>
34581 <edge from-layer="1818" from-port="2" to-layer="1820" to-port="0"/>
34582 <edge from-layer="1819" from-port="0" to-layer="1820" to-port="1"/>
34583 <edge from-layer="1820" from-port="2" to-layer="1821" to-port="0"/>
34584 <edge from-layer="1774" from-port="0" to-layer="1821" to-port="1"/>
34585 <edge from-layer="1775" from-port="0" to-layer="1821" to-port="2"/>
34586 <edge from-layer="1776" from-port="0" to-layer="1821" to-port="3"/>
34587 <edge from-layer="1777" from-port="0" to-layer="1821" to-port="4"/>
34588 <edge from-layer="1773" from-port="5" to-layer="1822" to-port="0"/>
34589 <edge from-layer="1821" from-port="5" to-layer="1822" to-port="1"/>
34590 <edge from-layer="1822" from-port="2" to-layer="1824" to-port="0"/>
34591 <edge from-layer="1823" from-port="0" to-layer="1824" to-port="1"/>
34592 <edge from-layer="1824" from-port="2" to-layer="1825" to-port="0"/>
34593 <edge from-layer="20" from-port="0" to-layer="1825" to-port="1"/>
34594 <edge from-layer="21" from-port="0" to-layer="1825" to-port="2"/>
34595 <edge from-layer="22" from-port="0" to-layer="1825" to-port="3"/>
34596 <edge from-layer="23" from-port="0" to-layer="1825" to-port="4"/>
34597 <edge from-layer="1838" from-port="0" to-layer="1839" to-port="0"/>
34598 <edge from-layer="1839" from-port="1" to-layer="1841" to-port="0"/>
34599 <edge from-layer="1840" from-port="0" to-layer="1841" to-port="1"/>
34600 <edge from-layer="1841" from-port="2" to-layer="1843" to-port="0"/>
34601 <edge from-layer="1842" from-port="0" to-layer="1843" to-port="1"/>
34602 <edge from-layer="1825" from-port="5" to-layer="1844" to-port="0"/>
34603 <edge from-layer="1843" from-port="2" to-layer="1844" to-port="1"/>
34604 <edge from-layer="1844" from-port="2" to-layer="1846" to-port="0"/>
34605 <edge from-layer="1845" from-port="0" to-layer="1846" to-port="1"/>
34606 <edge from-layer="1846" from-port="2" to-layer="1848" to-port="0"/>
34607 <edge from-layer="1847" from-port="0" to-layer="1848" to-port="1"/>
34608 <edge from-layer="1848" from-port="2" to-layer="1849" to-port="0"/>
34609 <edge from-layer="1834" from-port="0" to-layer="1849" to-port="1"/>
34610 <edge from-layer="1835" from-port="0" to-layer="1849" to-port="2"/>
34611 <edge from-layer="1836" from-port="0" to-layer="1849" to-port="3"/>
34612 <edge from-layer="1837" from-port="0" to-layer="1849" to-port="4"/>
34613 <edge from-layer="1851" from-port="0" to-layer="1852" to-port="0"/>
34614 <edge from-layer="1852" from-port="1" to-layer="1854" to-port="0"/>
34615 <edge from-layer="1853" from-port="0" to-layer="1854" to-port="1"/>
34616 <edge from-layer="1854" from-port="2" to-layer="1856" to-port="0"/>
34617 <edge from-layer="1855" from-port="0" to-layer="1856" to-port="1"/>
34618 <edge from-layer="1856" from-port="2" to-layer="1857" to-port="0"/>
34619 <edge from-layer="1850" from-port="0" to-layer="1857" to-port="1"/>
34620 <edge from-layer="1849" from-port="5" to-layer="1858" to-port="0"/>
34621 <edge from-layer="1857" from-port="2" to-layer="1858" to-port="1"/>
34622 <edge from-layer="1858" from-port="2" to-layer="1860" to-port="0"/>
34623 <edge from-layer="1859" from-port="0" to-layer="1860" to-port="1"/>
34624 <edge from-layer="1860" from-port="2" to-layer="1862" to-port="0"/>
34625 <edge from-layer="1861" from-port="0" to-layer="1862" to-port="1"/>
34626 <edge from-layer="1862" from-port="2" to-layer="1863" to-port="0"/>
34627 <edge from-layer="1830" from-port="0" to-layer="1863" to-port="1"/>
34628 <edge from-layer="1831" from-port="0" to-layer="1863" to-port="2"/>
34629 <edge from-layer="1832" from-port="0" to-layer="1863" to-port="3"/>
34630 <edge from-layer="1833" from-port="0" to-layer="1863" to-port="4"/>
34631 <edge from-layer="1864" from-port="0" to-layer="1865" to-port="0"/>
34632 <edge from-layer="1865" from-port="1" to-layer="1867" to-port="0"/>
34633 <edge from-layer="1866" from-port="0" to-layer="1867" to-port="1"/>
34634 <edge from-layer="1867" from-port="2" to-layer="1869" to-port="0"/>
34635 <edge from-layer="1868" from-port="0" to-layer="1869" to-port="1"/>
34636 <edge from-layer="1863" from-port="5" to-layer="1870" to-port="0"/>
34637 <edge from-layer="1869" from-port="2" to-layer="1870" to-port="1"/>
34638 <edge from-layer="1870" from-port="2" to-layer="1872" to-port="0"/>
34639 <edge from-layer="1871" from-port="0" to-layer="1872" to-port="1"/>
34640 <edge from-layer="1872" from-port="2" to-layer="1873" to-port="0"/>
34641 <edge from-layer="1826" from-port="0" to-layer="1873" to-port="1"/>
34642 <edge from-layer="1827" from-port="0" to-layer="1873" to-port="2"/>
34643 <edge from-layer="1828" from-port="0" to-layer="1873" to-port="3"/>
34644 <edge from-layer="1829" from-port="0" to-layer="1873" to-port="4"/>
34645 <edge from-layer="1825" from-port="5" to-layer="1874" to-port="0"/>
34646 <edge from-layer="1873" from-port="5" to-layer="1874" to-port="1"/>
34647 <edge from-layer="1874" from-port="2" to-layer="1876" to-port="0"/>
34648 <edge from-layer="1875" from-port="0" to-layer="1876" to-port="1"/>
34649 <edge from-layer="1876" from-port="2" to-layer="1877" to-port="0"/>
34650 <edge from-layer="16" from-port="0" to-layer="1877" to-port="1"/>
34651 <edge from-layer="17" from-port="0" to-layer="1877" to-port="2"/>
34652 <edge from-layer="18" from-port="0" to-layer="1877" to-port="3"/>
34653 <edge from-layer="19" from-port="0" to-layer="1877" to-port="4"/>
34654 <edge from-layer="1890" from-port="0" to-layer="1891" to-port="0"/>
34655 <edge from-layer="1891" from-port="1" to-layer="1893" to-port="0"/>
34656 <edge from-layer="1892" from-port="0" to-layer="1893" to-port="1"/>
34657 <edge from-layer="1893" from-port="2" to-layer="1895" to-port="0"/>
34658 <edge from-layer="1894" from-port="0" to-layer="1895" to-port="1"/>
34659 <edge from-layer="1877" from-port="5" to-layer="1896" to-port="0"/>
34660 <edge from-layer="1895" from-port="2" to-layer="1896" to-port="1"/>
34661 <edge from-layer="1896" from-port="2" to-layer="1898" to-port="0"/>
34662 <edge from-layer="1897" from-port="0" to-layer="1898" to-port="1"/>
34663 <edge from-layer="1898" from-port="2" to-layer="1900" to-port="0"/>
34664 <edge from-layer="1899" from-port="0" to-layer="1900" to-port="1"/>
34665 <edge from-layer="1900" from-port="2" to-layer="1901" to-port="0"/>
34666 <edge from-layer="1886" from-port="0" to-layer="1901" to-port="1"/>
34667 <edge from-layer="1887" from-port="0" to-layer="1901" to-port="2"/>
34668 <edge from-layer="1888" from-port="0" to-layer="1901" to-port="3"/>
34669 <edge from-layer="1889" from-port="0" to-layer="1901" to-port="4"/>
34670 <edge from-layer="1903" from-port="0" to-layer="1904" to-port="0"/>
34671 <edge from-layer="1904" from-port="1" to-layer="1906" to-port="0"/>
34672 <edge from-layer="1905" from-port="0" to-layer="1906" to-port="1"/>
34673 <edge from-layer="1906" from-port="2" to-layer="1908" to-port="0"/>
34674 <edge from-layer="1907" from-port="0" to-layer="1908" to-port="1"/>
34675 <edge from-layer="1908" from-port="2" to-layer="1909" to-port="0"/>
34676 <edge from-layer="1902" from-port="0" to-layer="1909" to-port="1"/>
34677 <edge from-layer="1901" from-port="5" to-layer="1910" to-port="0"/>
34678 <edge from-layer="1909" from-port="2" to-layer="1910" to-port="1"/>
34679 <edge from-layer="1910" from-port="2" to-layer="1912" to-port="0"/>
34680 <edge from-layer="1911" from-port="0" to-layer="1912" to-port="1"/>
34681 <edge from-layer="1912" from-port="2" to-layer="1914" to-port="0"/>
34682 <edge from-layer="1913" from-port="0" to-layer="1914" to-port="1"/>
34683 <edge from-layer="1914" from-port="2" to-layer="1915" to-port="0"/>
34684 <edge from-layer="1882" from-port="0" to-layer="1915" to-port="1"/>
34685 <edge from-layer="1883" from-port="0" to-layer="1915" to-port="2"/>
34686 <edge from-layer="1884" from-port="0" to-layer="1915" to-port="3"/>
34687 <edge from-layer="1885" from-port="0" to-layer="1915" to-port="4"/>
34688 <edge from-layer="1916" from-port="0" to-layer="1917" to-port="0"/>
34689 <edge from-layer="1917" from-port="1" to-layer="1919" to-port="0"/>
34690 <edge from-layer="1918" from-port="0" to-layer="1919" to-port="1"/>
34691 <edge from-layer="1919" from-port="2" to-layer="1921" to-port="0"/>
34692 <edge from-layer="1920" from-port="0" to-layer="1921" to-port="1"/>
34693 <edge from-layer="1915" from-port="5" to-layer="1922" to-port="0"/>
34694 <edge from-layer="1921" from-port="2" to-layer="1922" to-port="1"/>
34695 <edge from-layer="1922" from-port="2" to-layer="1924" to-port="0"/>
34696 <edge from-layer="1923" from-port="0" to-layer="1924" to-port="1"/>
34697 <edge from-layer="1924" from-port="2" to-layer="1925" to-port="0"/>
34698 <edge from-layer="1878" from-port="0" to-layer="1925" to-port="1"/>
34699 <edge from-layer="1879" from-port="0" to-layer="1925" to-port="2"/>
34700 <edge from-layer="1880" from-port="0" to-layer="1925" to-port="3"/>
34701 <edge from-layer="1881" from-port="0" to-layer="1925" to-port="4"/>
34702 <edge from-layer="1877" from-port="5" to-layer="1926" to-port="0"/>
34703 <edge from-layer="1925" from-port="5" to-layer="1926" to-port="1"/>
34704 <edge from-layer="1926" from-port="2" to-layer="1928" to-port="0"/>
34705 <edge from-layer="1927" from-port="0" to-layer="1928" to-port="1"/>
34706 <edge from-layer="1928" from-port="2" to-layer="1929" to-port="0"/>
34707 <edge from-layer="12" from-port="0" to-layer="1929" to-port="1"/>
34708 <edge from-layer="13" from-port="0" to-layer="1929" to-port="2"/>
34709 <edge from-layer="14" from-port="0" to-layer="1929" to-port="3"/>
34710 <edge from-layer="15" from-port="0" to-layer="1929" to-port="4"/>
34711 <edge from-layer="1942" from-port="0" to-layer="1943" to-port="0"/>
34712 <edge from-layer="1943" from-port="1" to-layer="1945" to-port="0"/>
34713 <edge from-layer="1944" from-port="0" to-layer="1945" to-port="1"/>
34714 <edge from-layer="1945" from-port="2" to-layer="1947" to-port="0"/>
34715 <edge from-layer="1946" from-port="0" to-layer="1947" to-port="1"/>
34716 <edge from-layer="1929" from-port="5" to-layer="1948" to-port="0"/>
34717 <edge from-layer="1947" from-port="2" to-layer="1948" to-port="1"/>
34718 <edge from-layer="1948" from-port="2" to-layer="1950" to-port="0"/>
34719 <edge from-layer="1949" from-port="0" to-layer="1950" to-port="1"/>
34720 <edge from-layer="1950" from-port="2" to-layer="1952" to-port="0"/>
34721 <edge from-layer="1951" from-port="0" to-layer="1952" to-port="1"/>
34722 <edge from-layer="1952" from-port="2" to-layer="1953" to-port="0"/>
34723 <edge from-layer="1938" from-port="0" to-layer="1953" to-port="1"/>
34724 <edge from-layer="1939" from-port="0" to-layer="1953" to-port="2"/>
34725 <edge from-layer="1940" from-port="0" to-layer="1953" to-port="3"/>
34726 <edge from-layer="1941" from-port="0" to-layer="1953" to-port="4"/>
34727 <edge from-layer="1955" from-port="0" to-layer="1956" to-port="0"/>
34728 <edge from-layer="1956" from-port="1" to-layer="1958" to-port="0"/>
34729 <edge from-layer="1957" from-port="0" to-layer="1958" to-port="1"/>
34730 <edge from-layer="1958" from-port="2" to-layer="1960" to-port="0"/>
34731 <edge from-layer="1959" from-port="0" to-layer="1960" to-port="1"/>
34732 <edge from-layer="1960" from-port="2" to-layer="1961" to-port="0"/>
34733 <edge from-layer="1954" from-port="0" to-layer="1961" to-port="1"/>
34734 <edge from-layer="1953" from-port="5" to-layer="1962" to-port="0"/>
34735 <edge from-layer="1961" from-port="2" to-layer="1962" to-port="1"/>
34736 <edge from-layer="1962" from-port="2" to-layer="1964" to-port="0"/>
34737 <edge from-layer="1963" from-port="0" to-layer="1964" to-port="1"/>
34738 <edge from-layer="1964" from-port="2" to-layer="1966" to-port="0"/>
34739 <edge from-layer="1965" from-port="0" to-layer="1966" to-port="1"/>
34740 <edge from-layer="1966" from-port="2" to-layer="1967" to-port="0"/>
34741 <edge from-layer="1934" from-port="0" to-layer="1967" to-port="1"/>
34742 <edge from-layer="1935" from-port="0" to-layer="1967" to-port="2"/>
34743 <edge from-layer="1936" from-port="0" to-layer="1967" to-port="3"/>
34744 <edge from-layer="1937" from-port="0" to-layer="1967" to-port="4"/>
34745 <edge from-layer="1968" from-port="0" to-layer="1969" to-port="0"/>
34746 <edge from-layer="1969" from-port="1" to-layer="1971" to-port="0"/>
34747 <edge from-layer="1970" from-port="0" to-layer="1971" to-port="1"/>
34748 <edge from-layer="1971" from-port="2" to-layer="1973" to-port="0"/>
34749 <edge from-layer="1972" from-port="0" to-layer="1973" to-port="1"/>
34750 <edge from-layer="1967" from-port="5" to-layer="1974" to-port="0"/>
34751 <edge from-layer="1973" from-port="2" to-layer="1974" to-port="1"/>
34752 <edge from-layer="1974" from-port="2" to-layer="1976" to-port="0"/>
34753 <edge from-layer="1975" from-port="0" to-layer="1976" to-port="1"/>
34754 <edge from-layer="1976" from-port="2" to-layer="1977" to-port="0"/>
34755 <edge from-layer="1930" from-port="0" to-layer="1977" to-port="1"/>
34756 <edge from-layer="1931" from-port="0" to-layer="1977" to-port="2"/>
34757 <edge from-layer="1932" from-port="0" to-layer="1977" to-port="3"/>
34758 <edge from-layer="1933" from-port="0" to-layer="1977" to-port="4"/>
34759 <edge from-layer="1929" from-port="5" to-layer="1978" to-port="0"/>
34760 <edge from-layer="1977" from-port="5" to-layer="1978" to-port="1"/>
34761 <edge from-layer="1978" from-port="2" to-layer="1980" to-port="0"/>
34762 <edge from-layer="1979" from-port="0" to-layer="1980" to-port="1"/>
34763 <edge from-layer="1980" from-port="2" to-layer="1981" to-port="0"/>
34764 <edge from-layer="8" from-port="0" to-layer="1981" to-port="1"/>
34765 <edge from-layer="9" from-port="0" to-layer="1981" to-port="2"/>
34766 <edge from-layer="10" from-port="0" to-layer="1981" to-port="3"/>
34767 <edge from-layer="11" from-port="0" to-layer="1981" to-port="4"/>
34768 <edge from-layer="1994" from-port="0" to-layer="1995" to-port="0"/>
34769 <edge from-layer="1995" from-port="1" to-layer="1997" to-port="0"/>
34770 <edge from-layer="1996" from-port="0" to-layer="1997" to-port="1"/>
34771 <edge from-layer="1997" from-port="2" to-layer="1999" to-port="0"/>
34772 <edge from-layer="1998" from-port="0" to-layer="1999" to-port="1"/>
34773 <edge from-layer="1981" from-port="5" to-layer="2000" to-port="0"/>
34774 <edge from-layer="1999" from-port="2" to-layer="2000" to-port="1"/>
34775 <edge from-layer="2000" from-port="2" to-layer="2002" to-port="0"/>
34776 <edge from-layer="2001" from-port="0" to-layer="2002" to-port="1"/>
34777 <edge from-layer="2002" from-port="2" to-layer="2004" to-port="0"/>
34778 <edge from-layer="2003" from-port="0" to-layer="2004" to-port="1"/>
34779 <edge from-layer="2004" from-port="2" to-layer="2005" to-port="0"/>
34780 <edge from-layer="1990" from-port="0" to-layer="2005" to-port="1"/>
34781 <edge from-layer="1991" from-port="0" to-layer="2005" to-port="2"/>
34782 <edge from-layer="1992" from-port="0" to-layer="2005" to-port="3"/>
34783 <edge from-layer="1993" from-port="0" to-layer="2005" to-port="4"/>
34784 <edge from-layer="2007" from-port="0" to-layer="2008" to-port="0"/>
34785 <edge from-layer="2008" from-port="1" to-layer="2010" to-port="0"/>
34786 <edge from-layer="2009" from-port="0" to-layer="2010" to-port="1"/>
34787 <edge from-layer="2010" from-port="2" to-layer="2012" to-port="0"/>
34788 <edge from-layer="2011" from-port="0" to-layer="2012" to-port="1"/>
34789 <edge from-layer="2012" from-port="2" to-layer="2013" to-port="0"/>
34790 <edge from-layer="2006" from-port="0" to-layer="2013" to-port="1"/>
34791 <edge from-layer="2005" from-port="5" to-layer="2014" to-port="0"/>
34792 <edge from-layer="2013" from-port="2" to-layer="2014" to-port="1"/>
34793 <edge from-layer="2014" from-port="2" to-layer="2016" to-port="0"/>
34794 <edge from-layer="2015" from-port="0" to-layer="2016" to-port="1"/>
34795 <edge from-layer="2016" from-port="2" to-layer="2018" to-port="0"/>
34796 <edge from-layer="2017" from-port="0" to-layer="2018" to-port="1"/>
34797 <edge from-layer="2018" from-port="2" to-layer="2019" to-port="0"/>
34798 <edge from-layer="1986" from-port="0" to-layer="2019" to-port="1"/>
34799 <edge from-layer="1987" from-port="0" to-layer="2019" to-port="2"/>
34800 <edge from-layer="1988" from-port="0" to-layer="2019" to-port="3"/>
34801 <edge from-layer="1989" from-port="0" to-layer="2019" to-port="4"/>
34802 <edge from-layer="2020" from-port="0" to-layer="2021" to-port="0"/>
34803 <edge from-layer="2021" from-port="1" to-layer="2023" to-port="0"/>
34804 <edge from-layer="2022" from-port="0" to-layer="2023" to-port="1"/>
34805 <edge from-layer="2023" from-port="2" to-layer="2025" to-port="0"/>
34806 <edge from-layer="2024" from-port="0" to-layer="2025" to-port="1"/>
34807 <edge from-layer="2019" from-port="5" to-layer="2026" to-port="0"/>
34808 <edge from-layer="2025" from-port="2" to-layer="2026" to-port="1"/>
34809 <edge from-layer="2026" from-port="2" to-layer="2028" to-port="0"/>
34810 <edge from-layer="2027" from-port="0" to-layer="2028" to-port="1"/>
34811 <edge from-layer="2028" from-port="2" to-layer="2029" to-port="0"/>
34812 <edge from-layer="1982" from-port="0" to-layer="2029" to-port="1"/>
34813 <edge from-layer="1983" from-port="0" to-layer="2029" to-port="2"/>
34814 <edge from-layer="1984" from-port="0" to-layer="2029" to-port="3"/>
34815 <edge from-layer="1985" from-port="0" to-layer="2029" to-port="4"/>
34816 <edge from-layer="1981" from-port="5" to-layer="2030" to-port="0"/>
34817 <edge from-layer="2029" from-port="5" to-layer="2030" to-port="1"/>
34818 <edge from-layer="2030" from-port="2" to-layer="2032" to-port="0"/>
34819 <edge from-layer="2031" from-port="0" to-layer="2032" to-port="1"/>
34820 <edge from-layer="2032" from-port="2" to-layer="2033" to-port="0"/>
34821 <edge from-layer="4" from-port="0" to-layer="2033" to-port="1"/>
34822 <edge from-layer="5" from-port="0" to-layer="2033" to-port="2"/>
34823 <edge from-layer="6" from-port="0" to-layer="2033" to-port="3"/>
34824 <edge from-layer="7" from-port="0" to-layer="2033" to-port="4"/>
34825 <edge from-layer="2046" from-port="0" to-layer="2047" to-port="0"/>
34826 <edge from-layer="2047" from-port="1" to-layer="2049" to-port="0"/>
34827 <edge from-layer="2048" from-port="0" to-layer="2049" to-port="1"/>
34828 <edge from-layer="2049" from-port="2" to-layer="2051" to-port="0"/>
34829 <edge from-layer="2050" from-port="0" to-layer="2051" to-port="1"/>
34830 <edge from-layer="2033" from-port="5" to-layer="2052" to-port="0"/>
34831 <edge from-layer="2051" from-port="2" to-layer="2052" to-port="1"/>
34832 <edge from-layer="2052" from-port="2" to-layer="2054" to-port="0"/>
34833 <edge from-layer="2053" from-port="0" to-layer="2054" to-port="1"/>
34834 <edge from-layer="2054" from-port="2" to-layer="2056" to-port="0"/>
34835 <edge from-layer="2055" from-port="0" to-layer="2056" to-port="1"/>
34836 <edge from-layer="2056" from-port="2" to-layer="2057" to-port="0"/>
34837 <edge from-layer="2042" from-port="0" to-layer="2057" to-port="1"/>
34838 <edge from-layer="2043" from-port="0" to-layer="2057" to-port="2"/>
34839 <edge from-layer="2044" from-port="0" to-layer="2057" to-port="3"/>
34840 <edge from-layer="2045" from-port="0" to-layer="2057" to-port="4"/>
34841 <edge from-layer="2059" from-port="0" to-layer="2060" to-port="0"/>
34842 <edge from-layer="2060" from-port="1" to-layer="2062" to-port="0"/>
34843 <edge from-layer="2061" from-port="0" to-layer="2062" to-port="1"/>
34844 <edge from-layer="2062" from-port="2" to-layer="2064" to-port="0"/>
34845 <edge from-layer="2063" from-port="0" to-layer="2064" to-port="1"/>
34846 <edge from-layer="2064" from-port="2" to-layer="2065" to-port="0"/>
34847 <edge from-layer="2058" from-port="0" to-layer="2065" to-port="1"/>
34848 <edge from-layer="2057" from-port="5" to-layer="2066" to-port="0"/>
34849 <edge from-layer="2065" from-port="2" to-layer="2066" to-port="1"/>
34850 <edge from-layer="2066" from-port="2" to-layer="2068" to-port="0"/>
34851 <edge from-layer="2067" from-port="0" to-layer="2068" to-port="1"/>
34852 <edge from-layer="2068" from-port="2" to-layer="2070" to-port="0"/>
34853 <edge from-layer="2069" from-port="0" to-layer="2070" to-port="1"/>
34854 <edge from-layer="2070" from-port="2" to-layer="2071" to-port="0"/>
34855 <edge from-layer="2038" from-port="0" to-layer="2071" to-port="1"/>
34856 <edge from-layer="2039" from-port="0" to-layer="2071" to-port="2"/>
34857 <edge from-layer="2040" from-port="0" to-layer="2071" to-port="3"/>
34858 <edge from-layer="2041" from-port="0" to-layer="2071" to-port="4"/>
34859 <edge from-layer="2072" from-port="0" to-layer="2073" to-port="0"/>
34860 <edge from-layer="2073" from-port="1" to-layer="2075" to-port="0"/>
34861 <edge from-layer="2074" from-port="0" to-layer="2075" to-port="1"/>
34862 <edge from-layer="2075" from-port="2" to-layer="2077" to-port="0"/>
34863 <edge from-layer="2076" from-port="0" to-layer="2077" to-port="1"/>
34864 <edge from-layer="2071" from-port="5" to-layer="2078" to-port="0"/>
34865 <edge from-layer="2077" from-port="2" to-layer="2078" to-port="1"/>
34866 <edge from-layer="2078" from-port="2" to-layer="2080" to-port="0"/>
34867 <edge from-layer="2079" from-port="0" to-layer="2080" to-port="1"/>
34868 <edge from-layer="2080" from-port="2" to-layer="2081" to-port="0"/>
34869 <edge from-layer="2034" from-port="0" to-layer="2081" to-port="1"/>
34870 <edge from-layer="2035" from-port="0" to-layer="2081" to-port="2"/>
34871 <edge from-layer="2036" from-port="0" to-layer="2081" to-port="3"/>
34872 <edge from-layer="2037" from-port="0" to-layer="2081" to-port="4"/>
34873 <edge from-layer="2033" from-port="5" to-layer="2082" to-port="0"/>
34874 <edge from-layer="2081" from-port="5" to-layer="2082" to-port="1"/>
34875 <edge from-layer="2082" from-port="2" to-layer="2084" to-port="0"/>
34876 <edge from-layer="2083" from-port="0" to-layer="2084" to-port="1"/>
34877 <edge from-layer="2084" from-port="2" to-layer="2085" to-port="0"/>
34878 <edge from-layer="0" from-port="0" to-layer="2085" to-port="1"/>
34879 <edge from-layer="1" from-port="0" to-layer="2085" to-port="2"/>
34880 <edge from-layer="2" from-port="0" to-layer="2085" to-port="3"/>
34881 <edge from-layer="3" from-port="0" to-layer="2085" to-port="4"/>
34882 <edge from-layer="2086" from-port="0" to-layer="2087" to-port="0"/>
34883 <edge from-layer="2087" from-port="1" to-layer="2089" to-port="0"/>
34884 <edge from-layer="2088" from-port="0" to-layer="2089" to-port="1"/>
34885 <edge from-layer="2089" from-port="2" to-layer="2091" to-port="0"/>
34886 <edge from-layer="2090" from-port="0" to-layer="2091" to-port="1"/>
34887 <edge from-layer="2085" from-port="5" to-layer="2092" to-port="0"/>
34888 <edge from-layer="2091" from-port="2" to-layer="2092" to-port="1"/>
34889 <edge from-layer="2092" from-port="2" to-layer="2094" to-port="0"/>
34890 <edge from-layer="2093" from-port="0" to-layer="2094" to-port="1"/>
34891 <edge from-layer="2094" from-port="2" to-layer="2096" to-port="0"/>
34892 <edge from-layer="2095" from-port="0" to-layer="2096" to-port="1"/>
34893 <edge from-layer="2096" from-port="2" to-layer="2098" to-port="0"/>
34894 <edge from-layer="2097" from-port="0" to-layer="2098" to-port="1"/>
34895 <edge from-layer="2099" from-port="0" to-layer="2100" to-port="0"/>
34896 <edge from-layer="2100" from-port="1" to-layer="2102" to-port="0"/>
34897 <edge from-layer="2101" from-port="0" to-layer="2102" to-port="1"/>
34898 <edge from-layer="2102" from-port="2" to-layer="2104" to-port="0"/>
34899 <edge from-layer="2103" from-port="0" to-layer="2104" to-port="1"/>
34900 <edge from-layer="2085" from-port="5" to-layer="2105" to-port="0"/>
34901 <edge from-layer="2104" from-port="2" to-layer="2105" to-port="1"/>
34902 <edge from-layer="2105" from-port="2" to-layer="2107" to-port="0"/>
34903 <edge from-layer="2106" from-port="0" to-layer="2107" to-port="1"/>
34904 <edge from-layer="2107" from-port="2" to-layer="2109" to-port="0"/>
34905 <edge from-layer="2108" from-port="0" to-layer="2109" to-port="1"/>
34906 <edge from-layer="2109" from-port="2" to-layer="2111" to-port="0"/>
34907 <edge from-layer="2110" from-port="0" to-layer="2111" to-port="1"/>
34908 <edge from-layer="2111" from-port="2" to-layer="2113" to-port="0"/>
34909 <edge from-layer="2112" from-port="0" to-layer="2113" to-port="1"/>
34910 <edge from-layer="2113" from-port="2" to-layer="2114" to-port="0"/>
34911 <edge from-layer="2114" from-port="1" to-layer="2116" to-port="0"/>
34912 <edge from-layer="2115" from-port="0" to-layer="2116" to-port="1"/>
34913 <edge from-layer="2084" from-port="2" to-layer="2117" to-port="0"/>
34914 <edge from-layer="2117" from-port="1" to-layer="2121" to-port="0"/>
34915 <edge from-layer="2118" from-port="0" to-layer="2121" to-port="1"/>
34916 <edge from-layer="2119" from-port="0" to-layer="2121" to-port="2"/>
34917 <edge from-layer="2120" from-port="0" to-layer="2121" to-port="3"/>
34918 <edge from-layer="164" from-port="0" to-layer="2122" to-port="0"/>
34919 <edge from-layer="2122" from-port="1" to-layer="2126" to-port="0"/>
34920 <edge from-layer="2123" from-port="0" to-layer="2126" to-port="1"/>
34921 <edge from-layer="2124" from-port="0" to-layer="2126" to-port="2"/>
34922 <edge from-layer="2125" from-port="0" to-layer="2126" to-port="3"/>
34923 <edge from-layer="2121" from-port="4" to-layer="2127" to-port="0"/>
34924 <edge from-layer="2126" from-port="4" to-layer="2127" to-port="1"/>
34925 <edge from-layer="2127" from-port="2" to-layer="2129" to-port="0"/>
34926 <edge from-layer="2128" from-port="0" to-layer="2129" to-port="1"/>
34927 <edge from-layer="2098" from-port="2" to-layer="2130" to-port="0"/>
34928 <edge from-layer="2116" from-port="2" to-layer="2130" to-port="1"/>
34929 <edge from-layer="2129" from-port="2" to-layer="2130" to-port="2"/>
34930 <edge from-layer="2130" from-port="3" to-layer="2131" to-port="0"/>
34931 </edges>
34932 <meta_data>
34933 <MO_version value="custom_HEAD_149c43044cb1e8ed8cd4f3f196b23f7b3f129a36"/>
34934 <cli_parameters>
34935 <caffe_parser_path value="DIR"/>
34936 <data_type value="FP16"/>
34937 <disable_nhwc_to_nchw value="False"/>
34938 <disable_omitting_optional value="False"/>
34939 <disable_resnet_optimization value="False"/>
34940 <disable_weights_compression value="False"/>
34941 <enable_concat_optimization value="False"/>
34942 <enable_flattening_nested_params value="False"/>
34943 <enable_ssd_gluoncv value="False"/>
34944 <extensions value="DIR"/>
34945 <framework value="caffe"/>
34946 <freeze_placeholder_with_value value="{}"/>
34947 <generate_deprecated_IR_V7 value="False"/>
34948 <input value="data"/>
34949 <input_model value="DIR/rmnet_lrelu_pd_ssd.caffemodel"/>
34950 <input_model_is_text value="False"/>
34951 <input_proto value="DIR/rmnet_lrelu_pd_ssd.prototxt"/>
34952 <input_shape value="[1,3,320,544]"/>
34953 <k value="DIR/CustomLayersMapping.xml"/>
34954 <keep_shape_ops value="True"/>
34955 <legacy_ir_generation value="False"/>
34956 <legacy_mxnet_model value="False"/>
34957 <log_level value="ERROR"/>
34958 <mean_scale_values value="{}"/>
34959 <mean_values value="()"/>
34960 <model_name value="person-detection-retail-0013"/>
34961 <output value="['detection_out']"/>
34962 <output_dir value="DIR"/>
34963 <placeholder_data_types value="{}"/>
34964 <placeholder_shapes value="{'data': array([ 1, 3, 320, 544])}"/>
34965 <progress value="False"/>
34966 <remove_memory value="False"/>
34967 <remove_output_softmax value="False"/>
34968 <reverse_input_channels value="False"/>
34969 <save_params_from_nd value="False"/>
34970 <scale_values value="()"/>
34971 <silent value="False"/>
34972 <static_shape value="False"/>
34973 <stream_output value="False"/>
34974 <transform value=""/>
34975 <unset unset_cli_parameters="batch, counts, disable_fusing, disable_gfusing, finegrain_fusing, input_checkpoint, input_meta_graph, input_symbol, mean_file, mean_file_offsets, move_to_preprocess, nd_prefix_name, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_use_custom_operations_config, transformations_config"/>
34976 </cli_parameters>
34977 </meta_data>
34978 <quantization_parameters>
34979 <config>{
34980 'compression': {
34981 'algorithms': [
34982 {
34983 'name': 'DefaultQuantization',
34984 'params': {
34985 'num_samples_for_tuning': 2000,
34986 'preset': 'performance',
34987 'stat_subset_size': 300,
34988 'use_layerwise_tuning': false
34989 }
34990 }
34991 ],
34992 'dump_intermediate_model': true,
34993 'target_device': 'ANY'
34994 },
34995 'engine': {
34996 'models': [
34997 {
34998 'name': 'person-detection-retail-0013',
34999 'launchers': [
35000 {
35001 'framework': 'dlsdk',
35002 'adapter': 'ssd',
35003 'device': 'CPU'
35004 }
35005 ],
35006 'datasets': [
35007 {
35008 'name': 'person_detection',
35009 'data_source': 'PATH',
35010 'annotation_conversion': {
35011 'converter': 'datatang_street_subway',
35012 'annotation_file': 'PATH'
35013 },
35014 'annotation': 'PATH',
35015 'dataset_meta': 'PATH',
35016 'preprocessing': [
35017 {
35018 'type': 'resize',
35019 'dst_width': 544,
35020 'dst_height': 320
35021 }
35022 ],
35023 'postprocessing': [
35024 {
35025 'type': 'resize_prediction_boxes'
35026 },
35027 {
35028 'type': 'filter',
35029 'apply_to': 'annotation',
35030 'height_range': 100,
35031 'aspect_ratio': '0.666, 5',
35032 'is_empty': true,
35033 'min_visibility': 'partially occluded'
35034 },
35035 {
35036 'type': 'filter',
35037 'apply_to': 'prediction',
35038 'height_range': 100,
35039 'is_empty': true,
35040 'aspect_ratio': '0.666, 5'
35041 }
35042 ],
35043 'metrics': [
35044 {
35045 'type': 'map',
35046 'ignore_difficult': true,
35047 'include_boundaries': true,
35048 'allow_multiple_matches_per_ignored': false,
35049 'distinct_conf': false
35050 }
35051 ],
35052 '_command_line_mapping': {
35053 'annotation_file': 'PATH'
35054 }
35055 }
35056 ]
35057 }
35058 ],
35059 'stat_requests_number': null,
35060 'eval_requests_number': null,
35061 'type': 'accuracy_checker'
35062 }
35063 }</config>
35064 <version value="1.0"/>
35065 <cli_params value="{'quantize': None, 'preset': None, 'model': None, 'weights': None, 'name': None, 'ac_config': None, 'max_drop': None, 'evaluate': False, 'output_dir': 'PATH', 'direct_dump': True, 'log_level': 'INFO', 'pbar': False, 'stream_output': False, 'keep_uncompressed_weights': False}"/>
35066 </quantization_parameters>
35067</net>