From 88fc0f9f9b7fd3eb81c958ca41ed822cf3657c47 Mon Sep 17 00:00:00 2001
From: baoshiwei <baoshiwei@shlanbao.cn>
Date: 星期二, 22 四月 2025 15:50:22 +0800
Subject: [PATCH] refactor: 重构中药识别项目 分为onnx版和openvino版

---
 openvino/config/herb_ai.yaml           |    6 
 onnx/herb_ai.py                        |    8 
 openvino/identifier.py                 |   59 
 onnx/test.py                           |   50 
 openvino/model/herb_id/best.xml        | 4486 +++++++++++
 openvino/model/load_id/best.bin        |    0 
 openvino/model/safe_det/metadata.yaml  |   98 
 onnx/model/safety_det.onnx             |    0 
 onnx/config/herb_ai.yaml               |    2 
 openvino/readme                        |    1 
 onnx/model/loading.onnx                |    0 
 openvino/safety_detect.py              |  103 
 openvino/model/feeder_id/best.xml      | 4756 ++++++++++++
 onnx/logger_config.py                  |    0 
 openvino/test.py                       |  129 
 onnx/model/safety_det_.onnx            |    0 
 onnx/herb_ai_.py                       |  257 
 onnx/model/herb_identify.onnx          |    0 
 openvino/model/load_id/best.xml        | 4756 ++++++++++++
 openvino/model/feeder_id/metadata.yaml |   15 
 openvino/herb_ai.py                    |   46 
 openvino/model/feeder_id/best.bin      |    0 
 onnx/model/hl.onnx                     |    0 
 openvino/model/safe_det/best.xml       | 7975 ++++++++++++++++++++
 onnx/cam_util.py                       |    9 
 openvino/model/safe_det/best.bin       |    0 
 onnx/safety_detect.py                  |    2 
 onnx/herb_ai.spec                      |   44 
 openvino/model/load_id/metadata.yaml   |   15 
 openvino/model/herb_id/best.bin        |    0 
 onnx/identifier.py                     |    2 
 openvino/logger_config.py              |    0 
 openvino/model/herb_id/metadata.yaml   |  669 +
 openvino/openvino_test.py              |   89 
 34 files changed, 23,367 insertions(+), 210 deletions(-)

diff --git a/cam_util.py b/onnx/cam_util.py
similarity index 78%
rename from cam_util.py
rename to onnx/cam_util.py
index c511c6d..f4fde60 100644
--- a/cam_util.py
+++ b/onnx/cam_util.py
@@ -22,14 +22,23 @@
         for device in webcams:
             name = getattr(device, 'Name', None)
             pnp_class = getattr(device, 'PNPClass', None)
+            if pnp_class == 'Camera':
+                # 灏嗚澶囧悕瀛楀拰绱㈠紩娣诲姞鍒板瓧鍏镐腑
+                print(f"{name},{pnp_class}")
             if name is not None and self.cam1 in name:
+
                 # 灏嗚澶囧悕瀛楀拰绱㈠紩娣诲姞鍒板瓧鍏镐腑
                 webcam_dict[self.cam1] = index
                 index += 1
             elif name is not None and self.cam2 in name:
+
                 # 灏嗚澶囧悕瀛楀拰绱㈠紩娣诲姞鍒板瓧鍏镐腑
                 webcam_dict[self.cam2] = index
                 index += 1
             self.webcam_list = webcam_dict
 
+        print(webcam_dict)
+        # 鎸夊悕绉伴噸鏂版帓搴�
+        webcam_dict = dict(sorted(webcam_dict.items(), key=lambda x: x[0]))
+        print(webcam_dict)
         return webcam_dict
\ No newline at end of file
diff --git a/config/herb_ai.yaml b/onnx/config/herb_ai.yaml
similarity index 87%
rename from config/herb_ai.yaml
rename to onnx/config/herb_ai.yaml
index ee33167..0c25d4a 100644
--- a/config/herb_ai.yaml
+++ b/onnx/config/herb_ai.yaml
@@ -7,7 +7,7 @@
 log:
   path: "./log/herb_ai.log"
 model:
-  safe: './model/safety_det.onnx'
+  safe: './model/safety_det_.onnx'
   cls: './model/herb_identify.onnx'
 cam:
   cam1: 0
diff --git a/herb_ai.py b/onnx/herb_ai.py
similarity index 98%
rename from herb_ai.py
rename to onnx/herb_ai.py
index 66be1e8..261f04b 100644
--- a/herb_ai.py
+++ b/onnx/herb_ai.py
@@ -304,6 +304,12 @@
         width = cv2.getWindowImageRect("AICamera")[2]
         height = cv2.getWindowImageRect("AICamera")[3]
 
+        print("width", width, "height", height)
+
+        # 濡傛灉height灏忎簬1鍒欒祴鍊�100
+        if height < 1:
+            height = 100
+
         # 璋冩暣鍥惧儚澶у皬浠ラ�傚簲绐楀彛
         resized_frame = cv2.resize(draw_img, (width, height))
 
@@ -480,7 +486,7 @@
 
     load_identifier = IDENTIFIER("model/loading.onnx")
     hoister_position = IDENTIFIER("model/hl.onnx")
-    safety_detect = SAFETY_DETECT("model/safety_det.onnx")
+    safety_detect = SAFETY_DETECT("model/safety_det_.onnx")
     config = read_config()
     PCOPYDATASTRUCT = ctypes.POINTER(COPYDATASTRUCT)
 
diff --git a/onnx/herb_ai.spec b/onnx/herb_ai.spec
new file mode 100644
index 0000000..ff41f6c
--- /dev/null
+++ b/onnx/herb_ai.spec
@@ -0,0 +1,44 @@
+# -*- mode: python ; coding: utf-8 -*-
+
+
+a = Analysis(
+    ['herb_ai.py'],
+    pathex=[],
+    binaries=[],
+    datas=[],
+    hiddenimports=[],
+    hookspath=[],
+    hooksconfig={},
+    runtime_hooks=[],
+    excludes=[],
+    noarchive=False,
+    optimize=0,
+)
+pyz = PYZ(a.pure)
+
+exe = EXE(
+    pyz,
+    a.scripts,
+    [],
+    exclude_binaries=True,
+    name='herb_ai',
+    debug=False,
+    bootloader_ignore_signals=False,
+    strip=False,
+    upx=True,
+    console=False,
+    disable_windowed_traceback=False,
+    argv_emulation=False,
+    target_arch=None,
+    codesign_identity=None,
+    entitlements_file=None,
+)
+coll = COLLECT(
+    exe,
+    a.binaries,
+    a.datas,
+    strip=False,
+    upx=True,
+    upx_exclude=[],
+    name='herb_ai',
+)
diff --git a/herb_ai.py b/onnx/herb_ai_.py
similarity index 63%
copy from herb_ai.py
copy to onnx/herb_ai_.py
index 66be1e8..01006a6 100644
--- a/herb_ai.py
+++ b/onnx/herb_ai_.py
@@ -10,6 +10,7 @@
 import win32gui
 import multiprocessing
 from safety_detect import SAFETY_DETECT
+from cam_util import CAM_UTIL
 from identifier import IDENTIFIER
 import os
 from logger_config import logger
@@ -22,34 +23,11 @@
 def variance_of_laplacian(image):
     # 璁$畻杈撳叆鍥惧儚鐨勬媺鏅媺鏂搷搴旂殑鏂瑰樊
     return cv2.Laplacian(image, cv2.CV_64F).var()
-
-
-def clean_directory(path, days_threshold, max_files):
-    """娓呯悊瓒呰繃鏃堕棿鎴栨暟閲忕殑鏂囦欢"""
-    now = time.time()
-    threshold = now - days_threshold * 24 * 3600
-    files = []
-    for f in os.listdir(path):
-        file_path = os.path.join(path, f)
-        if os.path.isfile(file_path):
-            files.append((file_path, os.path.getmtime(file_path)))
-    # 鎸変慨鏀规椂闂撮檷搴忔帓搴忥紝淇濈暀鏈�鏂版枃浠�
-    files.sort(key=lambda x: x[1], reverse=True)
-    # 鍒犻櫎杩囨湡鏂囦欢
-    for file_info in files:
-        if file_info[1] < threshold:
-            os.remove(file_info[0])
-    # 淇濈暀鏈�鏂版枃浠讹紝鍒犻櫎澶氫綑鏂囦欢
-    for file_info in files[max_files:]:
-        os.remove(file_info[0])
-
-
 # 璋冪敤鍙︿竴涓暱鐒﹂暅澶达紝鎷嶆憚娓呮櫚鐨勫眬閮ㄨ嵂鏉愬浘鐗�
 def get_image():
-    herb_identifier = IDENTIFIER("model/herb_identify.onnx")
     logger.info("璇嗗埆绾跨▼鍚姩")
     global is_loaded, class_count, class_count_max, class_sum
-    camera2_index = config['cam']['cam2']
+    camera2_index = 1
     print("绗簩涓憚鍍忓ご绱㈠紩锛�" + str(camera2_index))
     # 鎵撳紑鎽勫儚澶�
     capture = cv2.VideoCapture(camera2_index, cv2.CAP_DSHOW)
@@ -76,7 +54,7 @@
             break
         count += 1
 
-        if count == config['cam']['frames']:
+        if count == 10:
             herb_probabilities = herb_identifier(frame2)
             top_five_classes = np.argsort(herb_probabilities, axis=1)[0][-5:][::-1]
             name = ""
@@ -90,17 +68,16 @@
             # 璁$畻鎷夋櫘鎷夋柉鍝嶅簲鐨勬柟宸�
             laplacian = variance_of_laplacian(frame2)
             # 鐢熸垚淇濆瓨鏂囦欢鍚嶏紝浠ュ綋鍓嶆椂闂村懡鍚�
-            save_name2 = time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_" +name +"_["+ str(round(laplacian, 2)) +"]"+  ".jpg"
+            save_name2 = name +"_["+ str(round(laplacian, 2)) +"]_"+ time.strftime("%Y%m%d%H%M%S", time.localtime()) + ".jpg"
             logger.info(f"璇嗗埆缁撴灉杞崲涓轰繚瀛樺浘鐗囧悕绉�:, {save_name2}")
             # 鍒ゆ柇鍥惧儚鐨勬竻鏅板害
             # 淇濆瓨璋冩暣灏哄鍚庣殑鍥剧墖
             if laplacian > 200:
                 c_ = save_path + "2/c/"
+                # 鍒ゆ柇鏂囦欢鏄惁瀛樺湪锛屼笉瀛樺湪鍒欏垱寤�
                 if not os.path.exists(c_):
                     os.makedirs(c_)
                 cv2.imwrite(c_ + save_name2, frame2)
-                # 鏂板娓呯悊璋冪敤
-                clean_directory(c_, config['cam']['days_threshold'], config['cam']['max_files'])
                 # 娓呮櫚鏉′欢涓嬬疮璁¤瘑鍒粨鏋滀腑鑽潗鍚嶇О鍑虹幇鐨勬鏁�
                 # 绱姣忕鑽潗涓嶈鍚嶆鍑虹幇鐨勬鏁�,绱姣忕鑽潗缃俊搴︽渶楂樼殑娆℃暟,绱姣忕鑽潗鐨勭疆淇″害鎬诲拰
                 # class_count = {}
@@ -127,11 +104,10 @@
                 is_loaded = True
             else:
                 n_ = save_path + "2/n/"
+                # 鍒ゆ柇鏂囦欢鏄惁瀛樺湪锛屼笉瀛樺湪鍒欏垱寤�
                 if not os.path.exists(n_):
                     os.makedirs(n_)
                 cv2.imwrite(n_ + save_name2, frame2)
-                # 鏂板娓呯悊璋冪敤
-                clean_directory(n_, config['cam']['days_threshold'], config['cam']['max_files'])
             # cv2.imshow("Camera", resized_frame2)
             print("淇濆瓨鍥剧墖:", save_name2)
             break
@@ -139,30 +115,19 @@
     capture.release()
 def send_result():
     global is_loaded,class_count, class_count_max, class_sum
-    # 瀵筩lass_count杩涜鎺掑簭锛屾寜鐓у�间粠澶у埌灏忔帓搴�,杩斿洖鍊兼渶澶х殑鍓嶄簲涓�
-    sorted_class_count = dict(sorted(class_count.items(), key=lambda x: x[1], reverse=True)[:5])
-    # 瀵筩lass_count_max杩涜鎺掑簭锛屾寜鐓у�间粠澶у埌灏忔帓搴�,杩斿洖鍊兼渶澶х殑鍓嶄簲涓�
-    sorted_class_count_max = dict(sorted(class_count_max.items(), key=lambda x: x[1], reverse=True)[:5])
-    # 瀵� class_sum杩涜鎺掑簭锛屾寜鐓у�间粠澶у埌灏忔帓搴�,杩斿洖鍊兼渶澶х殑鍓嶄簲涓�
-    sorted_class_sum = dict(sorted(class_sum.items(), key=lambda x: x[1], reverse=True)[:5])
     # 灏嗕笁绉嶇粺璁$粨鏋滆緭鍑哄埌鏃ュ織涓�
     logger.info("class_count:"+str(class_count))
-    logger.info("sorted_class_count:"+str(sorted_class_count))
     logger.info("class_count_max:"+str(class_count_max))
-    logger.info("sorted_class_count_max:"+str(sorted_class_count_max))
     logger.info("class_sum:"+str(class_sum))
-    logger.info("sorted_class_sum:"+str(sorted_class_sum))
     is_loaded = False
-    count_msg = "airecognize," + f"{sorted_class_count}"
-    logger.info("鍙戦�佽嵂鏉愯瘑鍒粨鏋滐細"+str(count_msg))
-    l.send_msg(count_msg)
+    l.send_msg("airecognize," + f"{class_count}")
     pass
 
 
 def load_identify():
-    global is_loaded
+    global is_loaded, frame
     # 鎽勫儚澶寸储寮曞彿锛岄�氬父涓�0琛ㄧず绗竴涓憚鍍忓ご
-    camera_index = config['cam']['cam1']
+    camera_index = 0
     print("绗竴涓憚鍍忓ご绱㈠紩锛�" + str(camera_index))
     # 鎵撳紑鎽勫儚澶�
     cap = cv2.VideoCapture(camera_index, cv2.CAP_DSHOW)
@@ -182,24 +147,21 @@
     # 璁℃椂鍣�
     frame_count = 0
     start_time = time.time()
-    stime = time.time()
+    sstime = time.time()
+
     if not os.path.exists(save_path):
         os.makedirs(save_path)
-    # 涓婃璇嗗埆缁撴灉
-    class_old = "1"
-    # 绱娆℃暟
-    count = 0
+
+
     # 涓婃枡鐘舵��
     status = "娌℃湁涓婃枡"
 
-    # 鍒涘缓绐楀彛骞惰缃负鍙皟鏁村ぇ灏�
-    cv2.namedWindow("AICamera", cv2.WINDOW_NORMAL)
 
     # 寰幆璇诲彇鎽勫儚澶寸敾闈�
     while True:
-        logger.info("寰幆璇诲彇鎽勫儚澶寸敾闈�")
+        # logger.info("寰幆璇诲彇鎽勫儚澶寸敾闈�")
         # 鐫$湢100姣
-        time.sleep(config['cam']['sleep'])
+        time.sleep(0.1)
         ret, frame = cap.read()
         if not ret:
             print("鏃犳硶璇诲彇鎽勫儚澶寸敾闈�")
@@ -207,107 +169,26 @@
             break
         # 鑾峰彇褰撳墠鏃堕棿
         current_time = time.time()
-        # 姣忛殧3绉掑彇涓�甯у浘鍍�
+        # 姣忛殧n绉掑彇涓�甯у浘鍍�
+        if current_time - sstime >= config['cam']['sleep']:
+            sstime = current_time
 
-        # 瀹夊叏妫�娴�
-        boxes, scores, class_ids = safety_detect(frame)
-        draw_img = safety_detect.draw_detections(frame, boxes, scores, class_ids)
-
-        det_res = {}
-        if class_ids is not None:
-            # 閬嶅巻class_ids 杞崲鎴愮被鍒悕绉�
-            for i in range(len(class_ids)):
-                class_id = class_ids[i]
-                class_name = safety_detect.class_names[class_id]
-                # 瀛樺叆鍒癲et_res涓�
-                if class_name in det_res:
-                    det_res[class_name] = det_res[class_name] if det_res[class_name] > scores[i] else scores[i]
-                else:
-                    det_res[class_name] = scores[i]
-        print(det_res)
-        logger.info(f"瀹夊叏妫�娴嬭瘑鍒粨鏋�, {det_res}")
-        # 濡傛灉cass_ids涓寘鍚�0锛屽垯琛ㄧず鏈夊畨鍏ㄦ娴嬪埌浜轰綋
-        if 0 in class_ids:
-            res_ = "aidetect," + f"{det_res}"
-            logger.info("鍙戦�佸畨鍏ㄦ娴嬬粨鏋滐細"+str(res_))
-            l.send_msg(res_)
-
-        # 涓婃枡璇嗗埆
-        probabilities = load_identifier(frame)
-        # 鎵惧埌鏈�澶ф鐜囩殑绫诲埆
-        predicted_class = np.argmax(probabilities, axis=1)[0]
-        max_probability = np.max(probabilities, axis=1)[0]
-        class_ = load_identifier.class_names[predicted_class]
-        # 璁$畻绫诲瀷閲嶅鐨勬鏁帮紝绫诲埆鏇存崲涔嬪悗閲嶆柊璁℃暟
-        if class_ != class_old:
-            count = 0
-        else:
-            count += 1
-        class_old = class_
-        print(f"{class_}:{count}: {max_probability}")
-        logger.info(f"{class_}:{count}: {max_probability}")
-        # 鍒ゆ柇鏄惁涓婃枡骞朵笖涓婃枡娆℃暟澶т簬10娆�
-        if class_ == "shangliao" and count > 10:
-            status = "姝e湪涓婃枡"
-            # 姣忛殧3绉掑彇涓�甯у浘鍍�
-            # 濡傛灉璺濈涓婁竴娆′繚瀛樺凡缁忚繃鍘�1绉掞紝鍒欎繚瀛樺綋鍓嶇敾闈�
-            if current_time - stime >= 10.0:
-                save_name = time.strftime("%Y%m%d%H%M%S", time.localtime()) + ".jpg"
-                # 淇濆瓨璋冩暣灏哄鍚庣殑鍥剧墖
-                path_ = save_path + "1/"
-                if not os.path.exists(path_):
-                    os.makedirs(path_)
-                cv2.imwrite(path_ + save_name, frame)
-                # 鏂板娓呯悊璋冪敤
-                clean_directory(path_, config['cam']['days_threshold'], config['cam']['max_files'])
-                # 閲嶇疆璁℃椂鍣�
-                stime = time.time()
-
-                thread = threading.Thread(target=get_image)
-                thread.start()
-
-        else:
-            status = "娌℃湁涓婃枡"
-            if class_ == "meishangliao" and count == 3 and is_loaded:
-                logger.info("鍋滄涓婃枡鍚庡彂閫佽瘑鍒粨鏋�")
-                send_result()
-            if class_ == "meishangliao" and count == 1000:
-                is_loaded = False
-                logger.info("闀挎椂闂存湭涓婃枡锛岄噸缃鍦ㄤ笂鏂欑姸鎬�")
+            thread1 = threading.Thread(target=method_name)
+            thread1.start()
+            # frame = draw_img
         # print(status)
 
-        # 涓婃枡鏈轰綅缃瘑鍒�
-        probabilities2 = hoister_position(frame);
-        predicted_class2 = np.argmax(probabilities2, axis=1)[0]
-        max_probability2 = np.max(probabilities2, axis=1)[0]
-        class_2 = hoister_position.class_names[predicted_class2]
-        print(f"-----------{class_2}:{predicted_class2}: {max_probability2}")
-        logger.info(f"-----------{class_2}:{predicted_class2}: {max_probability2}")
-
-        if predicted_class2 == 0:
-            feeder_res = {class_2: max_probability2}
-            class_feeder = "aifeeder," + f"{feeder_res}"
-            print("send_msg", class_feeder)
-            logger.info("鍙戦�佷笂鏂欐満浣嶇疆璇嗗埆缁撴灉锛�"+str(class_feeder))
-            l.send_msg(class_feeder)
         # 璁$畻甯ч�熺巼
-        frame_count += 1
-        end_time = time.time()
-        elapsed_time = end_time - start_time
-        fps = frame_count / elapsed_time
-        # print(f"FPS: {fps:.2f}")
-        # 灏咶PS缁樺埗鍦ㄥ浘鍍忎笂
-        cv2.putText(draw_img, f"FPS: {fps:.2f}", (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2,
-                    cv2.LINE_AA)
+        # frame_count += 1
+        # end_time = time.time()
+        # elapsed_time = end_time - start_time
+        # fps = frame_count / elapsed_time
+        # # print(f"FPS: {fps:.2f}")
+        # # 灏咶PS缁樺埗鍦ㄥ浘鍍忎笂
+        # cv2.putText(frame, f"FPS: {fps:.2f}", (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2,
+        #             cv2.LINE_AA)
         # 鏄剧ず鐢婚潰
-        # 鑾峰彇褰撳墠绐楀彛澶у皬
-        width = cv2.getWindowImageRect("AICamera")[2]
-        height = cv2.getWindowImageRect("AICamera")[3]
-
-        # 璋冩暣鍥惧儚澶у皬浠ラ�傚簲绐楀彛
-        resized_frame = cv2.resize(draw_img, (width, height))
-
-        cv2.imshow("AICamera", resized_frame)
+        cv2.imshow("AICamera", frame)
         # 妫�娴嬫寜閿紝濡傛灉鎸変笅q閿垯閫�鍑哄惊鐜�
         if cv2.waitKey(1) & 0xFF == ord('q'):
             break
@@ -316,6 +197,69 @@
     # 鍏抽棴鎵�鏈夌獥鍙�
     cv2.destroyAllWindows()
 
+
+def method_name(  ):
+    global is_loaded,count,class_old,stime,frame
+    # 瀹夊叏妫�娴�
+    boxes, scores, class_ids = safety_detect(frame)
+    draw_img = safety_detect.draw_detections(frame, boxes, scores, class_ids)
+    print(boxes, scores, class_ids)
+    det_res = {}
+    if class_ids is not None:
+        # 閬嶅巻class_ids 杞崲鎴愮被鍒悕绉�
+        for i in range(len(class_ids)):
+            class_id = class_ids[i]
+            class_name = safety_detect.class_names[class_id]
+            # 瀛樺叆鍒癲et_res涓�
+            if class_name in det_res:
+                det_res[class_name] = det_res[class_name] if det_res[class_name] > scores[i] else scores[i]
+            else:
+                det_res[class_name] = scores[i]
+    logger.info(f"瀹夊叏妫�娴嬭瘑鍒粨鏋�, {det_res}")
+    # 濡傛灉cass_ids涓寘鍚�0锛屽垯琛ㄧず鏈夊畨鍏ㄦ娴嬪埌浜轰綋
+    if 0 in class_ids:
+        l.send_msg("aidetect," + f"{det_res}")
+    # 涓婃枡璇嗗埆
+    probabilities = load_identifier(frame)
+    # 鎵惧埌鏈�澶ф鐜囩殑绫诲埆
+    predicted_class = np.argmax(probabilities, axis=1)[0]
+    max_probability = np.max(probabilities, axis=1)[0]
+    class_ = load_identifier.class_names[predicted_class]
+    # 璁$畻绫诲瀷閲嶅鐨勬鏁帮紝绫诲埆鏇存崲涔嬪悗閲嶆柊璁℃暟
+    if class_ != class_old:
+        count = 0
+    else:
+        count += 1
+    class_old = class_
+    print(f"{class_}:{count}: {max_probability}")
+    logger.info(f"{class_}:{count}: {max_probability}")
+    # 鍒ゆ柇鏄惁涓婃枡骞朵笖涓婃枡娆℃暟澶т簬10娆�
+    if class_ == "shangliao" and count > 10:
+        status = "姝e湪涓婃枡"
+        # 姣忛殧3绉掑彇涓�甯у浘鍍�
+        # 濡傛灉璺濈涓婁竴娆′繚瀛樺凡缁忚繃鍘�1绉掞紝鍒欎繚瀛樺綋鍓嶇敾闈�
+        current_time = time.time()
+        if current_time - stime >= 10.0:
+            save_name = time.strftime("%Y%m%d%H%M%S", time.localtime()) + ".jpg"
+            # 淇濆瓨璋冩暣灏哄鍚庣殑鍥剧墖
+            path_ = save_path + "1/"
+            if not os.path.exists(path_):
+                os.makedirs(path_)
+            cv2.imwrite(path_ + save_name, frame)
+            # 閲嶇疆璁℃椂鍣�
+            stime = time.time()
+
+            thread = threading.Thread(target=get_image)
+            thread.start()
+
+    else:
+        status = "娌℃湁涓婃枡"
+        if class_ == "meishangliao" and count == 3 and is_loaded:
+            logger.info("鍋滄涓婃枡鍚庡彂閫佽瘑鍒粨鏋�")
+            send_result()
+        if class_ == "meishangliao" and count == 1000:
+            is_loaded = False
+            logger.info("闀挎椂闂存湭涓婃枡锛岄噸缃鍦ㄤ笂鏂欑姸鎬�")
 
 
 # 璇诲彇閰嶇疆鏂囦欢
@@ -386,7 +330,7 @@
     def OnCopyData(self, hwnd, msg, wparam, lparam):
         try:
             # 璁板綍寮�濮嬫椂闂�
-            start_time = time.time()
+            startTime = time.time()
             pCDS = ctypes.cast(lparam, PCOPYDATASTRUCT)
             s = ctypes.string_at(pCDS.contents.lpData).decode()
             strArr = s.split(",")
@@ -426,9 +370,9 @@
             logger.info(f"璇嗗埆缁撴灉锛歿res}")
             self.send_msg(msg)
             # 璁板綍缁撴潫鏃堕棿
-            end_time = time.time()
+            endTime = time.time()
             # 璁$畻鎵ц鏃堕棿
-            execution_time = end_time - start_time
+            execution_time = endTime - startTime
             # 鎵撳嵃鎵ц鏃堕棿
             print(f"绋嬪簭鎵ц鏃堕棿涓�:{execution_time}绉�")
             logger.info(f"绋嬪簭鎵ц鏃堕棿涓�:{execution_time,}绉�")
@@ -468,6 +412,12 @@
     class_count_max = {}
     # 绱姣忕鑽潗鐨勭疆淇″害鎬诲拰
     class_sum = {}
+    # 涓婃璇嗗埆缁撴灉
+    class_old = "1"
+    # 绱娆℃暟
+    count = 0
+    stime = time.time()
+    frame = None
     # cam1 = "USB Camera"
     # cam2 = "USB ZOOM Camera"
     # camUtil = CAM_UTIL(cam1, cam2)
@@ -477,9 +427,8 @@
     # 鏄惁涓婅繃鏂�
     is_loaded = False
     # 鍔犺浇ONNX妯″瀷
-
+    herb_identifier = IDENTIFIER("model/herb_identify.onnx")
     load_identifier = IDENTIFIER("model/loading.onnx")
-    hoister_position = IDENTIFIER("model/hl.onnx")
     safety_detect = SAFETY_DETECT("model/safety_det.onnx")
     config = read_config()
     PCOPYDATASTRUCT = ctypes.POINTER(COPYDATASTRUCT)
diff --git a/identifier.py b/onnx/identifier.py
similarity index 97%
rename from identifier.py
rename to onnx/identifier.py
index 293bffe..b86d734 100644
--- a/identifier.py
+++ b/onnx/identifier.py
@@ -14,7 +14,7 @@
         return self.idengify(image)
 
     def initialize_model(self, path):
-        self.session = onnxruntime.InferenceSession(path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider'])
+        self.session = onnxruntime.InferenceSession(path, providers=['CPUExecutionProvider'])
         self.class_names = eval(self.session.get_modelmeta().custom_metadata_map['names'])
         # Get model info
         self.get_input_details()
diff --git a/logger_config.py b/onnx/logger_config.py
similarity index 100%
rename from logger_config.py
rename to onnx/logger_config.py
diff --git a/model/herb_identify.onnx b/onnx/model/herb_identify.onnx
similarity index 100%
rename from model/herb_identify.onnx
rename to onnx/model/herb_identify.onnx
Binary files differ
diff --git a/model/hl.onnx b/onnx/model/hl.onnx
similarity index 100%
rename from model/hl.onnx
rename to onnx/model/hl.onnx
Binary files differ
diff --git a/model/loading.onnx b/onnx/model/loading.onnx
similarity index 100%
rename from model/loading.onnx
rename to onnx/model/loading.onnx
Binary files differ
diff --git a/onnx/model/safety_det.onnx b/onnx/model/safety_det.onnx
new file mode 100644
index 0000000..e57bef2
--- /dev/null
+++ b/onnx/model/safety_det.onnx
Binary files differ
diff --git a/model/safety_det.onnx b/onnx/model/safety_det_.onnx
similarity index 100%
rename from model/safety_det.onnx
rename to onnx/model/safety_det_.onnx
Binary files differ
diff --git a/safety_detect.py b/onnx/safety_detect.py
similarity index 98%
rename from safety_detect.py
rename to onnx/safety_detect.py
index 61c5dd8..694a2de 100644
--- a/safety_detect.py
+++ b/onnx/safety_detect.py
@@ -18,7 +18,7 @@
         return self.detect_objects(image)
 
     def initialize_model(self, path):
-        self.session = onnxruntime.InferenceSession(path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider'])
+        self.session = onnxruntime.InferenceSession(path, providers=['CPUExecutionProvider'])
         self.class_names = eval(self.session.get_modelmeta().custom_metadata_map['names'])
         # Get model info
         self.get_input_details()
diff --git a/onnx/test.py b/onnx/test.py
new file mode 100644
index 0000000..bb19ed3
--- /dev/null
+++ b/onnx/test.py
@@ -0,0 +1,50 @@
+import cv2
+import win32com.client
+import re
+
+
+def get_camera_index_by_hardware_id():
+    # 鑾峰彇鎵�鏈夋憚鍍忓ご纭欢淇℃伅
+    wmi = win32com.client.GetObject("winmgmts:")
+    cameras = []
+    wmi_instances_of = wmi.InstancesOf("Win32_PnPEntity")
+
+    for item in wmi_instances_of:
+        if "Camera" ==   getattr(item, 'PNPClass', None):
+            # 鎻愬彇璁惧瀹炰緥璺緞锛堝 USB\VID_046D&PID_0825\...锛�
+            device_id = item.DeviceID
+            cameras.append({
+                "name": item.Name,
+                "device_id": device_id
+            })
+
+    # 閬嶅巻 OpenCV 绱㈠紩锛岃幏鍙栫‖浠朵俊鎭�
+    opencv_indices = []
+    for index in range(0, 10):
+        cap = cv2.VideoCapture(index, cv2.CAP_DSHOW)  # 浣跨敤 DirectShow 鎺ュ彛
+        if not cap.isOpened():
+            continue
+
+        # 鑾峰彇 OpenCV 鎹曡幏璁惧鐨勭‖浠惰矾寰勶紙闇�杞崲鏍煎紡锛�
+        cap_props = cap.getBackendName()
+        hw_info = str(cap.get(cv2.CAP_PROP_HW_DEVICE))  # 閮ㄥ垎椹卞姩鏀寔
+
+        # 鍖归厤璁惧瀹炰緥璺緞涓殑鍏抽敭鏍囪瘑绗︼紙濡� VID/PID锛�
+        for cam in cameras:
+            # 杞崲鏍煎紡锛堜緥濡傦細灏� USB\VID_XXXX&PID_XXXX 涓庣‖浠惰矾寰勫尮閰嶏級
+            if re.search(cam["device_id"].split("\\")[-1], hw_info, re.IGNORECASE):
+                opencv_indices.append({
+                    "index": index,
+                    "name": cam["name"],
+                    "device_id": cam["device_id"]
+                })
+                break
+        cap.release()
+
+    return opencv_indices
+
+
+# 娴嬭瘯浠g爜
+cameras = get_camera_index_by_hardware_id()
+for cam in cameras:
+    print(f"Index: {cam['index']} | Name: {cam['name']}")
diff --git a/config/herb_ai.yaml b/openvino/config/herb_ai.yaml
similarity index 74%
copy from config/herb_ai.yaml
copy to openvino/config/herb_ai.yaml
index ee33167..a179e4f 100644
--- a/config/herb_ai.yaml
+++ b/openvino/config/herb_ai.yaml
@@ -7,12 +7,12 @@
 log:
   path: "./log/herb_ai.log"
 model:
-  safe: './model/safety_det.onnx'
+  safe: './model/safety_det_.onnx'
   cls: './model/herb_identify.onnx'
 cam:
   cam1: 0
   cam2: 1
   sleep: 0.1
   frames: 50
-  days_threshold: 7
-  max_files: 100
+  days_threshold: 100
+  max_files: 10000
diff --git a/herb_ai.py b/openvino/herb_ai.py
similarity index 93%
copy from herb_ai.py
copy to openvino/herb_ai.py
index 66be1e8..c916d9e 100644
--- a/herb_ai.py
+++ b/openvino/herb_ai.py
@@ -11,6 +11,7 @@
 import multiprocessing
 from safety_detect import SAFETY_DETECT
 from identifier import IDENTIFIER
+
 import os
 from logger_config import logger
 import threading
@@ -46,7 +47,7 @@
 
 # 璋冪敤鍙︿竴涓暱鐒﹂暅澶达紝鎷嶆憚娓呮櫚鐨勫眬閮ㄨ嵂鏉愬浘鐗�
 def get_image():
-    herb_identifier = IDENTIFIER("model/herb_identify.onnx")
+    herb_identifier = IDENTIFIER("./model/herb_id")
     logger.info("璇嗗埆绾跨▼鍚姩")
     global is_loaded, class_count, class_count_max, class_sum
     camera2_index = config['cam']['cam2']
@@ -179,9 +180,7 @@
     print("鎽勫儚澶村垎杈ㄧ巼:", width, "x", height)
     logger.info(f"鎽勫儚澶村垎杈ㄧ巼:, {width}, x, {height}")
     # 鐩爣鍥惧儚灏哄
-    # 璁℃椂鍣�
-    frame_count = 0
-    start_time = time.time()
+
     stime = time.time()
     if not os.path.exists(save_path):
         os.makedirs(save_path)
@@ -197,7 +196,7 @@
 
     # 寰幆璇诲彇鎽勫儚澶寸敾闈�
     while True:
-        logger.info("寰幆璇诲彇鎽勫儚澶寸敾闈�")
+        start_time = time.time()
         # 鐫$湢100姣
         time.sleep(config['cam']['sleep'])
         ret, frame = cap.read()
@@ -211,7 +210,7 @@
 
         # 瀹夊叏妫�娴�
         boxes, scores, class_ids = safety_detect(frame)
-        draw_img = safety_detect.draw_detections(frame, boxes, scores, class_ids)
+        draw_img = safety_detect.draw_detections(frame, class_ids, scores,boxes )
 
         det_res = {}
         if class_ids is not None:
@@ -277,7 +276,7 @@
         # print(status)
 
         # 涓婃枡鏈轰綅缃瘑鍒�
-        probabilities2 = hoister_position(frame);
+        probabilities2 = hoister_position(frame)
         predicted_class2 = np.argmax(probabilities2, axis=1)[0]
         max_probability2 = np.max(probabilities2, axis=1)[0]
         class_2 = hoister_position.class_names[predicted_class2]
@@ -291,10 +290,8 @@
             logger.info("鍙戦�佷笂鏂欐満浣嶇疆璇嗗埆缁撴灉锛�"+str(class_feeder))
             l.send_msg(class_feeder)
         # 璁$畻甯ч�熺巼
-        frame_count += 1
         end_time = time.time()
-        elapsed_time = end_time - start_time
-        fps = frame_count / elapsed_time
+        fps = (1 / (end_time - start_time))
         # print(f"FPS: {fps:.2f}")
         # 灏咶PS缁樺埗鍦ㄥ浘鍍忎笂
         cv2.putText(draw_img, f"FPS: {fps:.2f}", (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2,
@@ -303,6 +300,11 @@
         # 鑾峰彇褰撳墠绐楀彛澶у皬
         width = cv2.getWindowImageRect("AICamera")[2]
         height = cv2.getWindowImageRect("AICamera")[3]
+        # print("width", width, "height", height)
+
+        # 濡傛灉height灏忎簬1鍒欒祴鍊�100
+        if height < 1:
+            height = 100
 
         # 璋冩暣鍥惧儚澶у皬浠ラ�傚簲绐楀彛
         resized_frame = cv2.resize(draw_img, (width, height))
@@ -335,21 +337,6 @@
         ('cbData', ctypes.wintypes.DWORD),
         ('lpData', ctypes.c_char_p)
     ]
-
-
-
-# logging.info("鍑嗗鍔犺浇瀹夊叏妫�娴嬫ā鍨�..")
-# print("鍑嗗鍔犺浇瀹夊叏妫�娴嬫ā鍨�..")
-# model_safe = SAFETY_DETECT(config['model']['safe'])
-#
-# logging.info("瀹夊叏妫�娴嬫ā鍨嬪姞杞芥垚鍔熴��")
-# print("瀹夊叏妫�娴嬫ā鍨嬪姞杞芥垚鍔熴��")
-# logging.info("鍑嗗鍔犺浇鑽潗璇嗗埆妯″瀷..")
-# print("鍑嗗鍔犺浇鑽潗璇嗗埆妯″瀷..")
-# model_cls = HERB_IDENTIFY(config['model']['cls'])
-# logging.info("鑽潗璇嗗埆妯″瀷鍔犺浇鎴愬姛銆�")
-# print("鑽潗璇嗗埆妯″瀷鍔犺浇鎴愬姛銆�")
-
 
 class Listener:
     def __init__(self):
@@ -462,6 +449,8 @@
         return 0
 
 if __name__ == '__main__':
+
+
     # 绱姣忕鑽潗涓嶈鍚嶆鍑虹幇鐨勬鏁�
     class_count = {}
     # 绱姣忕鑽潗缃俊搴︽渶楂樼殑娆℃暟
@@ -478,9 +467,10 @@
     is_loaded = False
     # 鍔犺浇ONNX妯″瀷
 
-    load_identifier = IDENTIFIER("model/loading.onnx")
-    hoister_position = IDENTIFIER("model/hl.onnx")
-    safety_detect = SAFETY_DETECT("model/safety_det.onnx")
+    print("鍔犺浇妯″瀷===============")
+    load_identifier = IDENTIFIER("./model/load_id")
+    hoister_position = IDENTIFIER("./model/feeder_id")
+    safety_detect = SAFETY_DETECT("./model/safe_det")
     config = read_config()
     PCOPYDATASTRUCT = ctypes.POINTER(COPYDATASTRUCT)
 
diff --git a/identifier.py b/openvino/identifier.py
similarity index 61%
copy from identifier.py
copy to openvino/identifier.py
index 293bffe..61c13b6 100644
--- a/identifier.py
+++ b/openvino/identifier.py
@@ -1,7 +1,8 @@
 import time
 import cv2
 import numpy as np
-import onnxruntime
+import yaml
+from openvino.runtime import Core
 
 
 class IDENTIFIER:
@@ -12,13 +13,29 @@
 
     def __call__(self, image):
         return self.idengify(image)
-
+    def read_config(self, path):
+        file_path = path+'/metadata.yaml'
+        with open(file_path, 'r', encoding="utf-8") as file:
+            config = yaml.safe_load(file)
+        return config
     def initialize_model(self, path):
-        self.session = onnxruntime.InferenceSession(path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider'])
-        self.class_names = eval(self.session.get_modelmeta().custom_metadata_map['names'])
+        model_path = path + '/best.xml'
+        # Initialize OpenVINO Runtime
+        self.core = Core()
+        # Load the model
+        self.model = self.core.read_model(model=model_path)
+        # Compile the model
+        self.compiled_model = self.core.compile_model(model=self.model, device_name="CPU")
+        # Get input and output layers
+        self.input_layer = self.compiled_model.input(0)
+        N,C,self.input_width,self.input_height = self.input_layer.shape
+
+        self.output_layer = self.compiled_model.output(0)
+        # Get class names
+        self.class_names = CLASSES = self.read_config(path)['names']
         # Get model info
-        self.get_input_details()
-        self.get_output_details()
+        # self.get_input_details()
+        # self.get_output_details()
 
     def idengify(self, image):
         input_tensor = self.prepare_input(image)
@@ -26,9 +43,9 @@
         # Perform inference on the image
         outputs = self.inference(input_tensor)
 
-        self.herb_probabilities = outputs[0]
 
-        return self.herb_probabilities
+
+        return outputs
 
     def prepare_input(self, image):
         self.img_height, self.img_width = image.shape[:2]
@@ -48,25 +65,25 @@
         return input_tensor
 
     def inference(self, input_tensor):
-        start = time.perf_counter()
-        outputs = self.session.run(self.output_names, {self.input_names[0]: input_tensor})
+        ir = self.compiled_model.create_infer_request()
+        outs = ir.infer(input_tensor)[self.output_layer]
 
         # print(f"Inference time: {(time.perf_counter() - start)*1000:.2f} ms")
-        return outputs
+        return outs
 
 
 
-    def get_input_details(self):
-        model_inputs = self.session.get_inputs()
-        self.input_names = [model_inputs[i].name for i in range(len(model_inputs))]
+    # def get_input_details(self):
+    #     model_inputs = self.session.get_inputs()
+    #     self.input_names = [model_inputs[i].name for i in range(len(model_inputs))]
+    #
+    #     self.input_shape = model_inputs[0].shape
+    #     self.input_height = self.input_shape[2]
+    #     self.input_width = self.input_shape[3]
 
-        self.input_shape = model_inputs[0].shape
-        self.input_height = self.input_shape[2]
-        self.input_width = self.input_shape[3]
-
-    def get_output_details(self):
-        model_outputs = self.session.get_outputs()
-        self.output_names = [model_outputs[i].name for i in range(len(model_outputs))]
+    # def get_output_details(self):
+    #     model_outputs = self.session.get_outputs()
+    #     self.output_names = [model_outputs[i].name for i in range(len(model_outputs))]
 
     # 绛夋瘮渚嬬缉鏀惧浘鐗�
     def ratioresize(self, im, color=114):
diff --git a/logger_config.py b/openvino/logger_config.py
similarity index 100%
copy from logger_config.py
copy to openvino/logger_config.py
diff --git a/openvino/model/feeder_id/best.bin b/openvino/model/feeder_id/best.bin
new file mode 100644
index 0000000..c55fd8d
--- /dev/null
+++ b/openvino/model/feeder_id/best.bin
Binary files differ
diff --git a/openvino/model/feeder_id/best.xml b/openvino/model/feeder_id/best.xml
new file mode 100644
index 0000000..53173db
--- /dev/null
+++ b/openvino/model/feeder_id/best.xml
@@ -0,0 +1,4756 @@
+<?xml version="1.0"?>
+<net name="Model0" version="11">
+	<layers>
+		<layer id="0" name="x" type="Parameter" version="opset1">
+			<data shape="1,3,640,640" element_type="f32" />
+			<output>
+				<port id="0" precision="FP32" names="x">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="1" name="self.model.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 3, 3, 3" offset="0" size="1728" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.0.conv.weight">
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="2" name="__module.model.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="3" name="__module.model.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="1728" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="4" name="__module.model.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="51_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="5" name="__module.model.10.conv.act/aten::silu_/Swish" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="51,input.1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="6" name="self.model.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 16, 3, 3" offset="1792" size="18432" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.1.conv.weight">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="7" name="__module.model.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="8" name="__module.model.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="20224" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="9" name="__module.model.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="65_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="10" name="__module.model.10.conv.act/aten::silu_/Swish_1" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="65,input.5">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="11" name="self.model.2.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 1, 1" offset="20352" size="4096" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="12" name="__module.model.2.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="13" name="__module.model.2.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="24448" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="14" name="__module.model.2.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="83_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="15" name="__module.model.10.conv.act/aten::silu_/Swish_2" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="83,input.9">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="16" name="71" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="71" />
+			</output>
+		</layer>
+		<layer id="17" name="Constant_187" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="24584" size="16" />
+			<output>
+				<port id="0" precision="I64" names="85">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="18" name="__module.model.2/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="87">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="4" precision="FP32" names="88,input.11">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="19" name="self.model.2.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="8, 16, 3, 3" offset="24600" size="4608" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv1.conv.weight">
+					<dim>8</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="20" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>8</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="21" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 8, 1, 1" offset="29208" size="32" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="22" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="98_1">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="23" name="__module.model.10.conv.act/aten::silu_/Swish_3" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="98,input.13">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="24" name="self.model.2.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 8, 3, 3" offset="29240" size="4608" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv2.conv.weight">
+					<dim>16</dim>
+					<dim>8</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="25" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>8</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="26" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="33848" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="27" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="107_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="28" name="__module.model.10.conv.act/aten::silu_/Swish_4" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="107,input.17">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="29" name="__module.model.2.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="109">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="30" name="__module.model.2/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="111,input.19">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="31" name="self.model.2.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 48, 1, 1" offset="33912" size="12288" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="32" name="__module.model.2.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="33" name="__module.model.2.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="46200" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="34" name="__module.model.2.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="119_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="35" name="__module.model.10.conv.act/aten::silu_/Swish_5" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="119,input.21">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="36" name="self.model.3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="46456" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.3.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="37" name="__module.model.3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="38" name="__module.model.3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="193912" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="39" name="__module.model.3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="133_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="40" name="__module.model.10.conv.act/aten::silu_/Swish_6" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="133,input.25">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="41" name="self.model.4.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="194168" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="42" name="__module.model.4.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="43" name="__module.model.4.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="210552" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="44" name="__module.model.4.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="151_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="45" name="__module.model.10.conv.act/aten::silu_/Swish_7" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="151,input.29">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="46" name="139" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="139" />
+			</output>
+		</layer>
+		<layer id="47" name="Constant_445" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="210808" size="16" />
+			<output>
+				<port id="0" precision="I64" names="153">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="48" name="__module.model.4/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="155">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="4" precision="FP32" names="156,input.31">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="49" name="self.model.4.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 32, 3, 3" offset="210824" size="18432" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv1.conv.weight">
+					<dim>16</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="50" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="51" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="229256" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="52" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="166_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="53" name="__module.model.10.conv.act/aten::silu_/Swish_8" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="166,input.33">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="54" name="self.model.4.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 16, 3, 3" offset="229320" size="18432" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="55" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="56" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="247752" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="57" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="175_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="58" name="__module.model.10.conv.act/aten::silu_/Swish_9" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="175,input.37">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="59" name="__module.model.4.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="177">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="60" name="__module.model.4/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="179,input.39">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="61" name="self.model.4.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 96, 1, 1" offset="247880" size="49152" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="62" name="__module.model.4.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="63" name="__module.model.4.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="297032" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="64" name="__module.model.4.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="187_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="65" name="__module.model.10.conv.act/aten::silu_/Swish_10" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="187,input.41">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="66" name="self.model.5.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 3, 3" offset="297544" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.5.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="67" name="__module.model.5.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="68" name="__module.model.5.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="887368" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="69" name="__module.model.5.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="201_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="70" name="__module.model.10.conv.act/aten::silu_/Swish_11" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="201,input.45">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="71" name="self.model.6.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 1, 1" offset="887880" size="65536" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="72" name="__module.model.6.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="73" name="__module.model.6.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="953416" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="74" name="__module.model.6.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="219_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="75" name="__module.model.10.conv.act/aten::silu_/Swish_12" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="219,input.49">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="76" name="207" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="207" />
+			</output>
+		</layer>
+		<layer id="77" name="Constant_703" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="953928" size="16" />
+			<output>
+				<port id="0" precision="I64" names="221">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="78" name="__module.model.6/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="223">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="4" precision="FP32" names="224,input.51">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="79" name="self.model.6.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 64, 1, 1" offset="953944" size="8192" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="80" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="81" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="962136" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="82" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="236_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="83" name="__module.model.10.conv.act/aten::silu_/Swish_13" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="236,input.53">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="84" name="self.model.6.m.0.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="962264" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.0.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="85" name="__module.model.6.m.0.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="86" name="__module.model.6.m.0.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="999128" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="87" name="__module.model.6.m.0.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="249_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="88" name="__module.model.10.conv.act/aten::silu_/Swish_14" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="249,input.57">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="89" name="self.model.6.m.0.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="999256" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="90" name="__module.model.6.m.0.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="91" name="__module.model.6.m.0.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1036120" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="92" name="__module.model.6.m.0.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="258_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="93" name="__module.model.10.conv.act/aten::silu_/Swish_15" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="258,input.61">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="94" name="__module.model.6.m.0.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="260,input.63">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="95" name="self.model.6.m.0.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="1036248" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.1.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="96" name="__module.model.6.m.0.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="97" name="__module.model.6.m.0.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1073112" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="98" name="__module.model.6.m.0.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="270_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="99" name="__module.model.10.conv.act/aten::silu_/Swish_16" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="270,input.65">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="100" name="self.model.6.m.0.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="1073240" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.1.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="101" name="__module.model.6.m.0.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="102" name="__module.model.6.m.0.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1110104" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="103" name="__module.model.6.m.0.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="279_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="104" name="__module.model.10.conv.act/aten::silu_/Swish_17" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="279,input.69">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="105" name="__module.model.6.m.0.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="281">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="106" name="self.model.6.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 64, 1, 1" offset="1110232" size="8192" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="107" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="108" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1118424" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="109" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="289_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="110" name="__module.model.10.conv.act/aten::silu_/Swish_18" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="289,input.71">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="111" name="__module.model.6.m.0/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="292,input.73">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="112" name="self.model.6.m.0.cv3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="1118552" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv3.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="113" name="__module.model.6.m.0.cv3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="114" name="__module.model.6.m.0.cv3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="1134936" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="115" name="__module.model.6.m.0.cv3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="300_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="116" name="__module.model.10.conv.act/aten::silu_/Swish_19" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="300,input.75">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="117" name="__module.model.6/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="303,input.77">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="118" name="self.model.6.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 192, 1, 1" offset="1135192" size="98304" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="119" name="__module.model.6.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="120" name="__module.model.6.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="1233496" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="121" name="__module.model.6.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="311_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="122" name="__module.model.10.conv.act/aten::silu_/Swish_20" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="311,input.79">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="123" name="self.model.7.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 128, 3, 3" offset="1234008" size="1179648" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.7.conv.weight">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="124" name="__module.model.7.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="125" name="__module.model.7.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="2413656" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="126" name="__module.model.7.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="325_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="127" name="__module.model.10.conv.act/aten::silu_/Swish_21" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="325,input.83">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="128" name="self.model.8.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 256, 1, 1" offset="2414680" size="262144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv1.conv.weight">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="129" name="__module.model.8.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="130" name="__module.model.8.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="2676824" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="131" name="__module.model.8.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="343_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="132" name="__module.model.10.conv.act/aten::silu_/Swish_22" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="343,input.87">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="133" name="331" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="331" />
+			</output>
+		</layer>
+		<layer id="134" name="Constant_1204" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="2677848" size="16" />
+			<output>
+				<port id="0" precision="I64" names="345">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="135" name="__module.model.8/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="347">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="4" precision="FP32" names="348,input.89">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="136" name="self.model.8.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 128, 1, 1" offset="2677864" size="32768" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="137" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="138" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="2710632" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="139" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="360_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="140" name="__module.model.10.conv.act/aten::silu_/Swish_23" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="360,input.91">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="141" name="self.model.8.m.0.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="2710888" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.0.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="142" name="__module.model.8.m.0.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="143" name="__module.model.8.m.0.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="2858344" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="144" name="__module.model.8.m.0.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="373_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="145" name="__module.model.10.conv.act/aten::silu_/Swish_24" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="373,input.95">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="146" name="self.model.8.m.0.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="2858600" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.0.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="147" name="__module.model.8.m.0.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="148" name="__module.model.8.m.0.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3006056" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="149" name="__module.model.8.m.0.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="382_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="150" name="__module.model.10.conv.act/aten::silu_/Swish_25" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="382,input.99">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="151" name="__module.model.8.m.0.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="384,input.101">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="152" name="self.model.8.m.0.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="3006312" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.1.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="153" name="__module.model.8.m.0.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="154" name="__module.model.8.m.0.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3153768" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="155" name="__module.model.8.m.0.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="394_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="156" name="__module.model.10.conv.act/aten::silu_/Swish_26" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="394,input.103">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="157" name="self.model.8.m.0.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="3154024" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.1.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="158" name="__module.model.8.m.0.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="159" name="__module.model.8.m.0.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3301480" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="160" name="__module.model.8.m.0.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="403_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="161" name="__module.model.10.conv.act/aten::silu_/Swish_27" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="403,input.107">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="162" name="__module.model.8.m.0.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="405">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="163" name="self.model.8.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 128, 1, 1" offset="3301736" size="32768" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="164" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="165" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3334504" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="166" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="413_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="167" name="__module.model.10.conv.act/aten::silu_/Swish_28" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="413,input.109">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="168" name="__module.model.8.m.0/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="416,input.111">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="169" name="self.model.8.m.0.cv3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 1, 1" offset="3334760" size="65536" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv3.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="170" name="__module.model.8.m.0.cv3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="171" name="__module.model.8.m.0.cv3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="3400296" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="172" name="__module.model.8.m.0.cv3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="424_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="173" name="__module.model.10.conv.act/aten::silu_/Swish_29" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="424,input.113">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="174" name="__module.model.8/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="427,input.115">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="175" name="self.model.8.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 384, 1, 1" offset="3400808" size="393216" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv2.conv.weight">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="176" name="__module.model.8.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="177" name="__module.model.8.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="3794024" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="178" name="__module.model.8.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="435_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="179" name="__module.model.10.conv.act/aten::silu_/Swish_30" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="435,input.117">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="180" name="self.model.9.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 256, 1, 1" offset="3795048" size="262144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.cv1.conv.weight">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="181" name="__module.model.9.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="182" name="__module.model.9.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4057192" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="183" name="__module.model.9.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="460_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="184" name="__module.model.10.conv.act/aten::silu_/Swish_31" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="460,input.121">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="185" name="449" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="449" />
+			</output>
+		</layer>
+		<layer id="186" name="Constant_1662" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="2677848" size="16" />
+			<output>
+				<port id="0" precision="I64" names="462">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="187" name="__module.model.9/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="464,a">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="4" precision="FP32" names="465,x.3">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="188" name="self.model.9.m.0.attn.qkv.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 128, 1, 1" offset="4058216" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.attn.qkv.conv.weight">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="189" name="__module.model.9.m.0.attn.qkv.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="190" name="__module.model.9.m.0.attn.qkv.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4189288" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="191" name="__module.model.9.m.0.attn.qkv.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="487,qkv.1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="192" name="Constant_6493" type="Const" version="opset1">
+			<data element_type="i64" shape="4" offset="4190312" size="32" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>4</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="193" name="__module.model.9.m.0.attn/aten::view/Reshape" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="489">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>128</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="194" name="437" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="4190344" size="8" />
+			<output>
+				<port id="0" precision="I64" names="437" />
+			</output>
+		</layer>
+		<layer id="195" name="Constant_1884" type="Const" version="opset1">
+			<data element_type="i64" shape="3" offset="4190352" size="24" />
+			<output>
+				<port id="0" precision="I64" names="490">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="196" name="__module.model.9.m.0.attn/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>128</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="492,q">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="4" precision="FP32" names="493,k">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="5" precision="FP32" names="494,v">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="197" name="Constant_6395" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 1, 1, 1" offset="4190376" size="4" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="198" name="Multiply_6371" type="Multiply" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="199" name="__module.model.9.m.0.attn/aten::mul/Multiply_1" type="MatMul" version="opset1">
+			<data transpose_a="true" transpose_b="false" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="497,attn.1">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="200" name="__module.model.9.m.0.attn/aten::softmax/Softmax" type="SoftMax" version="opset8">
+			<data axis="-1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="498,attn.3">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="201" name="__module.model.9.m.0.attn/aten::matmul/MatMul_1" type="MatMul" version="opset1">
+			<data transpose_a="false" transpose_b="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="500">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="202" name="__module.model.9.m.0.attn/aten::size/ShapeOf" type="ShapeOf" version="opset3">
+			<data output_type="i64" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="203" name="__module.model.9.m.0.attn/aten::view/Reshape_1" type="Reshape" version="opset1">
+			<data special_zero="false" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="502">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="204" name="__module.model.9.m.0.attn/aten::reshape/Reshape" type="Reshape" version="opset1">
+			<data special_zero="false" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="504,input.123">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="205" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 1, 1, 3, 3" offset="4190380" size="4608" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="206" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/GroupConvolution" type="GroupConvolution" version="opset1">
+			<data strides="1, 1" pads_begin="1, 1" pads_end="1, 1" dilations="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="207" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/Reshape_1" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4194988" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="208" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="512">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="209" name="__module.model.9.m.0.attn/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="513,input.125">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="210" name="self.model.9.m.0.attn.proj.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 1, 1" offset="4195500" size="65536" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.attn.proj.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="211" name="__module.model.9.m.0.attn.proj.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="212" name="__module.model.9.m.0.attn.proj.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4261036" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="213" name="__module.model.9.m.0.attn.proj.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="521">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="214" name="__module.model.9.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="522,input.127">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="215" name="self.model.9.m.0.ffn.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 128, 1, 1" offset="4261548" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.ffn.0.conv.weight">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="216" name="__module.model.9.m.0.ffn.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="217" name="__module.model.9.m.0.ffn.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4392620" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="218" name="__module.model.9.m.0.ffn.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="532_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="219" name="__module.model.10.conv.act/aten::silu_/Swish_32" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="532,input.129">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="220" name="self.model.9.m.0.ffn.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 256, 1, 1" offset="4393644" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.ffn.1.conv.weight">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="221" name="__module.model.9.m.0.ffn.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="222" name="__module.model.9.m.0.ffn.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4524716" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="223" name="__module.model.9.m.0.ffn.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="542">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="224" name="__module.model.9.m.0/aten::add/Add_1" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="543,b">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="225" name="__module.model.9/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="545,input.133">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="226" name="self.model.9.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 256, 1, 1" offset="4525228" size="262144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.cv2.conv.weight">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="227" name="__module.model.9.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="228" name="__module.model.9.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4787372" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="229" name="__module.model.9.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="553_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="230" name="__module.model.10.conv.act/aten::silu_/Swish_33" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="553,input.135">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="231" name="self.model.10.conv.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="1280, 256, 1, 1" offset="4788396" size="1310720" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.10.conv.conv.weight">
+					<dim>1280</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="232" name="__module.model.10.conv.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1280</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="233" name="__module.model.10.conv.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 1280, 1, 1" offset="6099116" size="5120" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="234" name="__module.model.10.conv.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="574_1">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="235" name="__module.model.10.conv.act/aten::silu_/Swish_34" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="574,input.139">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="236" name="Constant_6367" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="6104236" size="16" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="237" name="__module.model.10.pool/aten::adaptive_avg_pool2d/AdaptiveAvgPool" type="ReduceMean" version="opset1">
+			<data keep_dims="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="577">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="238" name="Concat_4406" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="6104252" size="16" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="239" name="__module.model.10/aten::flatten/Reshape" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="578,input.143">
+					<dim>1</dim>
+					<dim>1280</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="240" name="self.model.10.linear.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="2, 1280" offset="6104268" size="10240" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.10.linear.weight">
+					<dim>2</dim>
+					<dim>1280</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="241" name="__module.model.10.linear/aten::linear/MatMul" type="MatMul" version="opset1">
+			<data transpose_a="false" transpose_b="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>2</dim>
+					<dim>1280</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="242" name="Constant_6396" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 2" offset="6114508" size="8" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="243" name="__module.model.10.linear/aten::linear/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="582,x_1">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="244" name="__module.model.10/aten::softmax/Softmax" type="SoftMax" version="opset8">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="245" name="Result_2508" type="Result" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</input>
+		</layer>
+	</layers>
+	<edges>
+		<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
+		<edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
+		<edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
+		<edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
+		<edge from-layer="4" from-port="2" to-layer="5" to-port="0" />
+		<edge from-layer="5" from-port="1" to-layer="7" to-port="0" />
+		<edge from-layer="6" from-port="0" to-layer="7" to-port="1" />
+		<edge from-layer="7" from-port="2" to-layer="9" to-port="0" />
+		<edge from-layer="8" from-port="0" to-layer="9" to-port="1" />
+		<edge from-layer="9" from-port="2" to-layer="10" to-port="0" />
+		<edge from-layer="10" from-port="1" to-layer="12" to-port="0" />
+		<edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
+		<edge from-layer="12" from-port="2" to-layer="14" to-port="0" />
+		<edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
+		<edge from-layer="14" from-port="2" to-layer="15" to-port="0" />
+		<edge from-layer="15" from-port="1" to-layer="18" to-port="0" />
+		<edge from-layer="16" from-port="0" to-layer="18" to-port="1" />
+		<edge from-layer="17" from-port="0" to-layer="18" to-port="2" />
+		<edge from-layer="18" from-port="4" to-layer="20" to-port="0" />
+		<edge from-layer="18" from-port="4" to-layer="29" to-port="0" />
+		<edge from-layer="18" from-port="3" to-layer="30" to-port="0" />
+		<edge from-layer="18" from-port="4" to-layer="30" to-port="1" />
+		<edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
+		<edge from-layer="20" from-port="2" to-layer="22" to-port="0" />
+		<edge from-layer="21" from-port="0" to-layer="22" to-port="1" />
+		<edge from-layer="22" from-port="2" to-layer="23" to-port="0" />
+		<edge from-layer="23" from-port="1" to-layer="25" to-port="0" />
+		<edge from-layer="24" from-port="0" to-layer="25" to-port="1" />
+		<edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
+		<edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
+		<edge from-layer="27" from-port="2" to-layer="28" to-port="0" />
+		<edge from-layer="28" from-port="1" to-layer="29" to-port="1" />
+		<edge from-layer="29" from-port="2" to-layer="30" to-port="2" />
+		<edge from-layer="30" from-port="3" to-layer="32" to-port="0" />
+		<edge from-layer="31" from-port="0" to-layer="32" to-port="1" />
+		<edge from-layer="32" from-port="2" to-layer="34" to-port="0" />
+		<edge from-layer="33" from-port="0" to-layer="34" to-port="1" />
+		<edge from-layer="34" from-port="2" to-layer="35" to-port="0" />
+		<edge from-layer="35" from-port="1" to-layer="37" to-port="0" />
+		<edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
+		<edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
+		<edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
+		<edge from-layer="39" from-port="2" to-layer="40" to-port="0" />
+		<edge from-layer="40" from-port="1" to-layer="42" to-port="0" />
+		<edge from-layer="41" from-port="0" to-layer="42" to-port="1" />
+		<edge from-layer="42" from-port="2" to-layer="44" to-port="0" />
+		<edge from-layer="43" from-port="0" to-layer="44" to-port="1" />
+		<edge from-layer="44" from-port="2" to-layer="45" to-port="0" />
+		<edge from-layer="45" from-port="1" to-layer="48" to-port="0" />
+		<edge from-layer="46" from-port="0" to-layer="48" to-port="1" />
+		<edge from-layer="47" from-port="0" to-layer="48" to-port="2" />
+		<edge from-layer="48" from-port="4" to-layer="50" to-port="0" />
+		<edge from-layer="48" from-port="4" to-layer="59" to-port="0" />
+		<edge from-layer="48" from-port="3" to-layer="60" to-port="0" />
+		<edge from-layer="48" from-port="4" to-layer="60" to-port="1" />
+		<edge from-layer="49" from-port="0" to-layer="50" to-port="1" />
+		<edge from-layer="50" from-port="2" to-layer="52" to-port="0" />
+		<edge from-layer="51" from-port="0" to-layer="52" to-port="1" />
+		<edge from-layer="52" from-port="2" to-layer="53" to-port="0" />
+		<edge from-layer="53" from-port="1" to-layer="55" to-port="0" />
+		<edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
+		<edge from-layer="55" from-port="2" to-layer="57" to-port="0" />
+		<edge from-layer="56" from-port="0" to-layer="57" to-port="1" />
+		<edge from-layer="57" from-port="2" to-layer="58" to-port="0" />
+		<edge from-layer="58" from-port="1" to-layer="59" to-port="1" />
+		<edge from-layer="59" from-port="2" to-layer="60" to-port="2" />
+		<edge from-layer="60" from-port="3" to-layer="62" to-port="0" />
+		<edge from-layer="61" from-port="0" to-layer="62" to-port="1" />
+		<edge from-layer="62" from-port="2" to-layer="64" to-port="0" />
+		<edge from-layer="63" from-port="0" to-layer="64" to-port="1" />
+		<edge from-layer="64" from-port="2" to-layer="65" to-port="0" />
+		<edge from-layer="65" from-port="1" to-layer="67" to-port="0" />
+		<edge from-layer="66" from-port="0" to-layer="67" to-port="1" />
+		<edge from-layer="67" from-port="2" to-layer="69" to-port="0" />
+		<edge from-layer="68" from-port="0" to-layer="69" to-port="1" />
+		<edge from-layer="69" from-port="2" to-layer="70" to-port="0" />
+		<edge from-layer="70" from-port="1" to-layer="72" to-port="0" />
+		<edge from-layer="71" from-port="0" to-layer="72" to-port="1" />
+		<edge from-layer="72" from-port="2" to-layer="74" to-port="0" />
+		<edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
+		<edge from-layer="74" from-port="2" to-layer="75" to-port="0" />
+		<edge from-layer="75" from-port="1" to-layer="78" to-port="0" />
+		<edge from-layer="76" from-port="0" to-layer="78" to-port="1" />
+		<edge from-layer="77" from-port="0" to-layer="78" to-port="2" />
+		<edge from-layer="78" from-port="4" to-layer="80" to-port="0" />
+		<edge from-layer="78" from-port="3" to-layer="117" to-port="0" />
+		<edge from-layer="78" from-port="4" to-layer="117" to-port="1" />
+		<edge from-layer="78" from-port="4" to-layer="107" to-port="0" />
+		<edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
+		<edge from-layer="80" from-port="2" to-layer="82" to-port="0" />
+		<edge from-layer="81" from-port="0" to-layer="82" to-port="1" />
+		<edge from-layer="82" from-port="2" to-layer="83" to-port="0" />
+		<edge from-layer="83" from-port="1" to-layer="85" to-port="0" />
+		<edge from-layer="83" from-port="1" to-layer="94" to-port="0" />
+		<edge from-layer="84" from-port="0" to-layer="85" to-port="1" />
+		<edge from-layer="85" from-port="2" to-layer="87" to-port="0" />
+		<edge from-layer="86" from-port="0" to-layer="87" to-port="1" />
+		<edge from-layer="87" from-port="2" to-layer="88" to-port="0" />
+		<edge from-layer="88" from-port="1" to-layer="90" to-port="0" />
+		<edge from-layer="89" from-port="0" to-layer="90" to-port="1" />
+		<edge from-layer="90" from-port="2" to-layer="92" to-port="0" />
+		<edge from-layer="91" from-port="0" to-layer="92" to-port="1" />
+		<edge from-layer="92" from-port="2" to-layer="93" to-port="0" />
+		<edge from-layer="93" from-port="1" to-layer="94" to-port="1" />
+		<edge from-layer="94" from-port="2" to-layer="105" to-port="0" />
+		<edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
+		<edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
+		<edge from-layer="96" from-port="2" to-layer="98" to-port="0" />
+		<edge from-layer="97" from-port="0" to-layer="98" to-port="1" />
+		<edge from-layer="98" from-port="2" to-layer="99" to-port="0" />
+		<edge from-layer="99" from-port="1" to-layer="101" to-port="0" />
+		<edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
+		<edge from-layer="101" from-port="2" to-layer="103" to-port="0" />
+		<edge from-layer="102" from-port="0" to-layer="103" to-port="1" />
+		<edge from-layer="103" from-port="2" to-layer="104" to-port="0" />
+		<edge from-layer="104" from-port="1" to-layer="105" to-port="1" />
+		<edge from-layer="105" from-port="2" to-layer="111" to-port="0" />
+		<edge from-layer="106" from-port="0" to-layer="107" to-port="1" />
+		<edge from-layer="107" from-port="2" to-layer="109" to-port="0" />
+		<edge from-layer="108" from-port="0" to-layer="109" to-port="1" />
+		<edge from-layer="109" from-port="2" to-layer="110" to-port="0" />
+		<edge from-layer="110" from-port="1" to-layer="111" to-port="1" />
+		<edge from-layer="111" from-port="2" to-layer="113" to-port="0" />
+		<edge from-layer="112" from-port="0" to-layer="113" to-port="1" />
+		<edge from-layer="113" from-port="2" to-layer="115" to-port="0" />
+		<edge from-layer="114" from-port="0" to-layer="115" to-port="1" />
+		<edge from-layer="115" from-port="2" to-layer="116" to-port="0" />
+		<edge from-layer="116" from-port="1" to-layer="117" to-port="2" />
+		<edge from-layer="117" from-port="3" to-layer="119" to-port="0" />
+		<edge from-layer="118" from-port="0" to-layer="119" to-port="1" />
+		<edge from-layer="119" from-port="2" to-layer="121" to-port="0" />
+		<edge from-layer="120" from-port="0" to-layer="121" to-port="1" />
+		<edge from-layer="121" from-port="2" to-layer="122" to-port="0" />
+		<edge from-layer="122" from-port="1" to-layer="124" to-port="0" />
+		<edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
+		<edge from-layer="124" from-port="2" to-layer="126" to-port="0" />
+		<edge from-layer="125" from-port="0" to-layer="126" to-port="1" />
+		<edge from-layer="126" from-port="2" to-layer="127" to-port="0" />
+		<edge from-layer="127" from-port="1" to-layer="129" to-port="0" />
+		<edge from-layer="128" from-port="0" to-layer="129" to-port="1" />
+		<edge from-layer="129" from-port="2" to-layer="131" to-port="0" />
+		<edge from-layer="130" from-port="0" to-layer="131" to-port="1" />
+		<edge from-layer="131" from-port="2" to-layer="132" to-port="0" />
+		<edge from-layer="132" from-port="1" to-layer="135" to-port="0" />
+		<edge from-layer="133" from-port="0" to-layer="135" to-port="1" />
+		<edge from-layer="134" from-port="0" to-layer="135" to-port="2" />
+		<edge from-layer="135" from-port="4" to-layer="137" to-port="0" />
+		<edge from-layer="135" from-port="4" to-layer="174" to-port="1" />
+		<edge from-layer="135" from-port="4" to-layer="164" to-port="0" />
+		<edge from-layer="135" from-port="3" to-layer="174" to-port="0" />
+		<edge from-layer="136" from-port="0" to-layer="137" to-port="1" />
+		<edge from-layer="137" from-port="2" to-layer="139" to-port="0" />
+		<edge from-layer="138" from-port="0" to-layer="139" to-port="1" />
+		<edge from-layer="139" from-port="2" to-layer="140" to-port="0" />
+		<edge from-layer="140" from-port="1" to-layer="142" to-port="0" />
+		<edge from-layer="140" from-port="1" to-layer="151" to-port="0" />
+		<edge from-layer="141" from-port="0" to-layer="142" to-port="1" />
+		<edge from-layer="142" from-port="2" to-layer="144" to-port="0" />
+		<edge from-layer="143" from-port="0" to-layer="144" to-port="1" />
+		<edge from-layer="144" from-port="2" to-layer="145" to-port="0" />
+		<edge from-layer="145" from-port="1" to-layer="147" to-port="0" />
+		<edge from-layer="146" from-port="0" to-layer="147" to-port="1" />
+		<edge from-layer="147" from-port="2" to-layer="149" to-port="0" />
+		<edge from-layer="148" from-port="0" to-layer="149" to-port="1" />
+		<edge from-layer="149" from-port="2" to-layer="150" to-port="0" />
+		<edge from-layer="150" from-port="1" to-layer="151" to-port="1" />
+		<edge from-layer="151" from-port="2" to-layer="153" to-port="0" />
+		<edge from-layer="151" from-port="2" to-layer="162" to-port="0" />
+		<edge from-layer="152" from-port="0" to-layer="153" to-port="1" />
+		<edge from-layer="153" from-port="2" to-layer="155" to-port="0" />
+		<edge from-layer="154" from-port="0" to-layer="155" to-port="1" />
+		<edge from-layer="155" from-port="2" to-layer="156" to-port="0" />
+		<edge from-layer="156" from-port="1" to-layer="158" to-port="0" />
+		<edge from-layer="157" from-port="0" to-layer="158" to-port="1" />
+		<edge from-layer="158" from-port="2" to-layer="160" to-port="0" />
+		<edge from-layer="159" from-port="0" to-layer="160" to-port="1" />
+		<edge from-layer="160" from-port="2" to-layer="161" to-port="0" />
+		<edge from-layer="161" from-port="1" to-layer="162" to-port="1" />
+		<edge from-layer="162" from-port="2" to-layer="168" to-port="0" />
+		<edge from-layer="163" from-port="0" to-layer="164" to-port="1" />
+		<edge from-layer="164" from-port="2" to-layer="166" to-port="0" />
+		<edge from-layer="165" from-port="0" to-layer="166" to-port="1" />
+		<edge from-layer="166" from-port="2" to-layer="167" to-port="0" />
+		<edge from-layer="167" from-port="1" to-layer="168" to-port="1" />
+		<edge from-layer="168" from-port="2" to-layer="170" to-port="0" />
+		<edge from-layer="169" from-port="0" to-layer="170" to-port="1" />
+		<edge from-layer="170" from-port="2" to-layer="172" to-port="0" />
+		<edge from-layer="171" from-port="0" to-layer="172" to-port="1" />
+		<edge from-layer="172" from-port="2" to-layer="173" to-port="0" />
+		<edge from-layer="173" from-port="1" to-layer="174" to-port="2" />
+		<edge from-layer="174" from-port="3" to-layer="176" to-port="0" />
+		<edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
+		<edge from-layer="176" from-port="2" to-layer="178" to-port="0" />
+		<edge from-layer="177" from-port="0" to-layer="178" to-port="1" />
+		<edge from-layer="178" from-port="2" to-layer="179" to-port="0" />
+		<edge from-layer="179" from-port="1" to-layer="181" to-port="0" />
+		<edge from-layer="180" from-port="0" to-layer="181" to-port="1" />
+		<edge from-layer="181" from-port="2" to-layer="183" to-port="0" />
+		<edge from-layer="182" from-port="0" to-layer="183" to-port="1" />
+		<edge from-layer="183" from-port="2" to-layer="184" to-port="0" />
+		<edge from-layer="184" from-port="1" to-layer="187" to-port="0" />
+		<edge from-layer="185" from-port="0" to-layer="187" to-port="1" />
+		<edge from-layer="186" from-port="0" to-layer="187" to-port="2" />
+		<edge from-layer="187" from-port="4" to-layer="189" to-port="0" />
+		<edge from-layer="187" from-port="4" to-layer="214" to-port="0" />
+		<edge from-layer="187" from-port="4" to-layer="202" to-port="0" />
+		<edge from-layer="187" from-port="3" to-layer="225" to-port="0" />
+		<edge from-layer="188" from-port="0" to-layer="189" to-port="1" />
+		<edge from-layer="189" from-port="2" to-layer="191" to-port="0" />
+		<edge from-layer="190" from-port="0" to-layer="191" to-port="1" />
+		<edge from-layer="191" from-port="2" to-layer="193" to-port="0" />
+		<edge from-layer="192" from-port="0" to-layer="193" to-port="1" />
+		<edge from-layer="193" from-port="2" to-layer="196" to-port="0" />
+		<edge from-layer="194" from-port="0" to-layer="196" to-port="1" />
+		<edge from-layer="195" from-port="0" to-layer="196" to-port="2" />
+		<edge from-layer="196" from-port="4" to-layer="198" to-port="0" />
+		<edge from-layer="196" from-port="3" to-layer="199" to-port="0" />
+		<edge from-layer="196" from-port="5" to-layer="201" to-port="0" />
+		<edge from-layer="196" from-port="5" to-layer="204" to-port="0" />
+		<edge from-layer="197" from-port="0" to-layer="198" to-port="1" />
+		<edge from-layer="198" from-port="2" to-layer="199" to-port="1" />
+		<edge from-layer="199" from-port="2" to-layer="200" to-port="0" />
+		<edge from-layer="200" from-port="1" to-layer="201" to-port="1" />
+		<edge from-layer="201" from-port="2" to-layer="203" to-port="0" />
+		<edge from-layer="202" from-port="1" to-layer="204" to-port="1" />
+		<edge from-layer="202" from-port="1" to-layer="203" to-port="1" />
+		<edge from-layer="203" from-port="2" to-layer="209" to-port="0" />
+		<edge from-layer="204" from-port="2" to-layer="206" to-port="0" />
+		<edge from-layer="205" from-port="0" to-layer="206" to-port="1" />
+		<edge from-layer="206" from-port="2" to-layer="208" to-port="0" />
+		<edge from-layer="207" from-port="0" to-layer="208" to-port="1" />
+		<edge from-layer="208" from-port="2" to-layer="209" to-port="1" />
+		<edge from-layer="209" from-port="2" to-layer="211" to-port="0" />
+		<edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
+		<edge from-layer="211" from-port="2" to-layer="213" to-port="0" />
+		<edge from-layer="212" from-port="0" to-layer="213" to-port="1" />
+		<edge from-layer="213" from-port="2" to-layer="214" to-port="1" />
+		<edge from-layer="214" from-port="2" to-layer="224" to-port="0" />
+		<edge from-layer="214" from-port="2" to-layer="216" to-port="0" />
+		<edge from-layer="215" from-port="0" to-layer="216" to-port="1" />
+		<edge from-layer="216" from-port="2" to-layer="218" to-port="0" />
+		<edge from-layer="217" from-port="0" to-layer="218" to-port="1" />
+		<edge from-layer="218" from-port="2" to-layer="219" to-port="0" />
+		<edge from-layer="219" from-port="1" to-layer="221" to-port="0" />
+		<edge from-layer="220" from-port="0" to-layer="221" to-port="1" />
+		<edge from-layer="221" from-port="2" to-layer="223" to-port="0" />
+		<edge from-layer="222" from-port="0" to-layer="223" to-port="1" />
+		<edge from-layer="223" from-port="2" to-layer="224" to-port="1" />
+		<edge from-layer="224" from-port="2" to-layer="225" to-port="1" />
+		<edge from-layer="225" from-port="2" to-layer="227" to-port="0" />
+		<edge from-layer="226" from-port="0" to-layer="227" to-port="1" />
+		<edge from-layer="227" from-port="2" to-layer="229" to-port="0" />
+		<edge from-layer="228" from-port="0" to-layer="229" to-port="1" />
+		<edge from-layer="229" from-port="2" to-layer="230" to-port="0" />
+		<edge from-layer="230" from-port="1" to-layer="232" to-port="0" />
+		<edge from-layer="231" from-port="0" to-layer="232" to-port="1" />
+		<edge from-layer="232" from-port="2" to-layer="234" to-port="0" />
+		<edge from-layer="233" from-port="0" to-layer="234" to-port="1" />
+		<edge from-layer="234" from-port="2" to-layer="235" to-port="0" />
+		<edge from-layer="235" from-port="1" to-layer="237" to-port="0" />
+		<edge from-layer="236" from-port="0" to-layer="237" to-port="1" />
+		<edge from-layer="237" from-port="2" to-layer="239" to-port="0" />
+		<edge from-layer="238" from-port="0" to-layer="239" to-port="1" />
+		<edge from-layer="239" from-port="2" to-layer="241" to-port="0" />
+		<edge from-layer="240" from-port="0" to-layer="241" to-port="1" />
+		<edge from-layer="241" from-port="2" to-layer="243" to-port="0" />
+		<edge from-layer="242" from-port="0" to-layer="243" to-port="1" />
+		<edge from-layer="243" from-port="2" to-layer="244" to-port="0" />
+		<edge from-layer="244" from-port="1" to-layer="245" to-port="0" />
+	</edges>
+	<rt_info>
+		<Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
+		<conversion_parameters>
+			<framework value="pytorch" />
+			<is_python_object value="True" />
+		</conversion_parameters>
+		<model_info>
+			<iou_threshold value="0.7" />
+			<labels value="high low" />
+			<model_type value="YOLO" />
+			<pad_value value="114" />
+			<reverse_input_channels value="YES" />
+			<scale_values value="255" />
+		</model_info>
+	</rt_info>
+</net>
diff --git a/openvino/model/feeder_id/metadata.yaml b/openvino/model/feeder_id/metadata.yaml
new file mode 100644
index 0000000..b68038b
--- /dev/null
+++ b/openvino/model/feeder_id/metadata.yaml
@@ -0,0 +1,15 @@
+description: Ultralytics YOLO11n-cls model trained on D:\椤圭洰璧勬枡\鏅鸿兘骞茬嚗璁惧\涓婃枡鏈轰綅缃甛image3
+author: Ultralytics
+date: '2025-04-21T17:41:27.908573'
+version: 8.3.53
+license: AGPL-3.0 License (https://ultralytics.com/license)
+docs: https://docs.ultralytics.com
+stride: 1
+task: classify
+batch: 1
+imgsz:
+- 640
+- 640
+names:
+  0: high
+  1: low
diff --git a/openvino/model/herb_id/best.bin b/openvino/model/herb_id/best.bin
new file mode 100644
index 0000000..d4c7e79
--- /dev/null
+++ b/openvino/model/herb_id/best.bin
Binary files differ
diff --git a/openvino/model/herb_id/best.xml b/openvino/model/herb_id/best.xml
new file mode 100644
index 0000000..975fc71
--- /dev/null
+++ b/openvino/model/herb_id/best.xml
@@ -0,0 +1,4486 @@
+<?xml version="1.0"?>
+<net name="Model0" version="11">
+	<layers>
+		<layer id="0" name="x" type="Parameter" version="opset1">
+			<data shape="1,3,640,640" element_type="f32" />
+			<output>
+				<port id="0" precision="FP32" names="x">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="1" name="self.model.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="48, 3, 3, 3" offset="0" size="5184" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.0.conv.weight">
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="2" name="__module.model.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="3" name="__module.model.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 48, 1, 1" offset="5184" size="192" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="4" name="__module.model.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="48_1">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="5" name="__module.model.9.conv.act/aten::silu_/Swish" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="48,input.1">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="6" name="self.model.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 48, 3, 3" offset="5376" size="165888" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.1.conv.weight">
+					<dim>96</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="7" name="__module.model.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="8" name="__module.model.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="171264" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="9" name="__module.model.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="62_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="10" name="__module.model.9.conv.act/aten::silu_/Swish_1" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="62,input.5">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="11" name="self.model.2.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 1, 1" offset="171648" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv1.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="12" name="__module.model.2.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="13" name="__module.model.2.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="208512" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="14" name="__module.model.2.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="82_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="15" name="__module.model.9.conv.act/aten::silu_/Swish_2" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="82,input.9">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="16" name="68" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="208896" size="8" />
+			<output>
+				<port id="0" precision="I64" names="68" />
+			</output>
+		</layer>
+		<layer id="17" name="Constant_187" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="208904" size="16" />
+			<output>
+				<port id="0" precision="I64" names="84">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="18" name="__module.model.2/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="86">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="4" precision="FP32" names="87,input.11">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="19" name="self.model.2.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="48, 48, 3, 3" offset="208920" size="82944" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv1.conv.weight">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="20" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="21" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 48, 1, 1" offset="291864" size="192" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="22" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="97_1">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="23" name="__module.model.9.conv.act/aten::silu_/Swish_3" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="97,input.13">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="24" name="self.model.2.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="48, 48, 3, 3" offset="292056" size="82944" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv2.conv.weight">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="25" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="26" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 48, 1, 1" offset="375000" size="192" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="27" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="106_1">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="28" name="__module.model.9.conv.act/aten::silu_/Swish_4" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="106,input.17">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="29" name="__module.model.2.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="108,input.19">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="30" name="self.model.2.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="48, 48, 3, 3" offset="375192" size="82944" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.1.cv1.conv.weight">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="31" name="__module.model.2.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="32" name="__module.model.2.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 48, 1, 1" offset="458136" size="192" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="33" name="__module.model.2.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="118_1">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="34" name="__module.model.9.conv.act/aten::silu_/Swish_5" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="118,input.21">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="35" name="self.model.2.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="48, 48, 3, 3" offset="458328" size="82944" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.1.cv2.conv.weight">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="36" name="__module.model.2.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>48</dim>
+					<dim>48</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="37" name="__module.model.2.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 48, 1, 1" offset="541272" size="192" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="38" name="__module.model.2.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="127_1">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="39" name="__module.model.9.conv.act/aten::silu_/Swish_6" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="127,input.25">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="40" name="__module.model.2.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="129">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="41" name="__module.model.2/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="3" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="4" precision="FP32" names="131,input.27">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="42" name="self.model.2.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 192, 1, 1" offset="541464" size="73728" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv2.conv.weight">
+					<dim>96</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="43" name="__module.model.2.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="44" name="__module.model.2.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="615192" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="45" name="__module.model.2.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="139_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="46" name="__module.model.9.conv.act/aten::silu_/Swish_7" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="139,input.29">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="47" name="self.model.3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 96, 3, 3" offset="615576" size="663552" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.3.conv.weight">
+					<dim>192</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="48" name="__module.model.3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="49" name="__module.model.3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="1279128" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="50" name="__module.model.3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="153_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="51" name="__module.model.9.conv.act/aten::silu_/Swish_8" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="153,input.33">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="52" name="self.model.4.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 1, 1" offset="1279896" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv1.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="53" name="__module.model.4.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="54" name="__module.model.4.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="1427352" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="55" name="__module.model.4.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="177_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="56" name="__module.model.9.conv.act/aten::silu_/Swish_9" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="177,input.37">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="57" name="159" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="208896" size="8" />
+			<output>
+				<port id="0" precision="I64" names="159" />
+			</output>
+		</layer>
+		<layer id="58" name="Constant_549" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="1428120" size="16" />
+			<output>
+				<port id="0" precision="I64" names="179">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="59" name="__module.model.4/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="181">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="4" precision="FP32" names="182,input.39">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="60" name="self.model.4.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="1428136" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv1.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="61" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="62" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="1759912" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="63" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="192_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="64" name="__module.model.9.conv.act/aten::silu_/Swish_10" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="192,input.41">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="65" name="self.model.4.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="1760296" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv2.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="66" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="67" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="2092072" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="68" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="201_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="69" name="__module.model.9.conv.act/aten::silu_/Swish_11" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="201,input.45">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="70" name="__module.model.4.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="203,input.47">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="71" name="self.model.4.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="2092456" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.1.cv1.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="72" name="__module.model.4.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="73" name="__module.model.4.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="2424232" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="74" name="__module.model.4.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="213_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="75" name="__module.model.9.conv.act/aten::silu_/Swish_12" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="213,input.49">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="76" name="self.model.4.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="2424616" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.1.cv2.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="77" name="__module.model.4.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="78" name="__module.model.4.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="2756392" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="79" name="__module.model.4.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="222_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="80" name="__module.model.9.conv.act/aten::silu_/Swish_13" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="222,input.53">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="81" name="__module.model.4.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="224,input.55">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="82" name="self.model.4.m.2.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="2756776" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.2.cv1.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="83" name="__module.model.4.m.2.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="84" name="__module.model.4.m.2.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="3088552" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="85" name="__module.model.4.m.2.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="234_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="86" name="__module.model.9.conv.act/aten::silu_/Swish_14" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="234,input.57">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="87" name="self.model.4.m.2.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="3088936" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.2.cv2.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="88" name="__module.model.4.m.2.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="89" name="__module.model.4.m.2.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="3420712" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="90" name="__module.model.4.m.2.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="243_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="91" name="__module.model.9.conv.act/aten::silu_/Swish_15" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="243,input.61">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="92" name="__module.model.4.m.2/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="245,input.63">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="93" name="self.model.4.m.3.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="3421096" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.3.cv1.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="94" name="__module.model.4.m.3.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="95" name="__module.model.4.m.3.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="3752872" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="96" name="__module.model.4.m.3.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="255_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="97" name="__module.model.9.conv.act/aten::silu_/Swish_16" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="255,input.65">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="98" name="self.model.4.m.3.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="96, 96, 3, 3" offset="3753256" size="331776" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.3.cv2.conv.weight">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="99" name="__module.model.4.m.3.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>96</dim>
+					<dim>96</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="100" name="__module.model.4.m.3.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 96, 1, 1" offset="4085032" size="384" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="101" name="__module.model.4.m.3.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="264_1">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="102" name="__module.model.9.conv.act/aten::silu_/Swish_17" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="264,input.69">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="103" name="__module.model.4.m.3/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="266">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="104" name="__module.model.4/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="3" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="4" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="5" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="6" precision="FP32" names="268,input.71">
+					<dim>1</dim>
+					<dim>576</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="105" name="self.model.4.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 576, 1, 1" offset="4085416" size="442368" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv2.conv.weight">
+					<dim>192</dim>
+					<dim>576</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="106" name="__module.model.4.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>576</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>576</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="107" name="__module.model.4.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="4527784" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="108" name="__module.model.4.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="276_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="109" name="__module.model.9.conv.act/aten::silu_/Swish_18" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="276,input.73">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="110" name="self.model.5.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="384, 192, 3, 3" offset="4528552" size="2654208" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.5.conv.weight">
+					<dim>384</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="111" name="__module.model.5.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>384</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="112" name="__module.model.5.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 384, 1, 1" offset="7182760" size="1536" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="113" name="__module.model.5.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="290_1">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="114" name="__module.model.9.conv.act/aten::silu_/Swish_19" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="290,input.77">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="115" name="self.model.6.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="384, 384, 1, 1" offset="7184296" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv1.conv.weight">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="116" name="__module.model.6.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="117" name="__module.model.6.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 384, 1, 1" offset="7774120" size="1536" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="118" name="__module.model.6.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="314_1">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="119" name="__module.model.9.conv.act/aten::silu_/Swish_20" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="314,input.81">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="120" name="296" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="208896" size="8" />
+			<output>
+				<port id="0" precision="I64" names="296" />
+			</output>
+		</layer>
+		<layer id="121" name="Constant_1107" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="7775656" size="16" />
+			<output>
+				<port id="0" precision="I64" names="316">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="122" name="__module.model.6/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="318">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="4" precision="FP32" names="319,input.83">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="123" name="self.model.6.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="7775672" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv1.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="124" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="125" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="9102776" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="126" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="329_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="127" name="__module.model.9.conv.act/aten::silu_/Swish_21" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="329,input.85">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="128" name="self.model.6.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="9103544" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv2.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="129" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="130" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="10430648" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="131" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="338_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="132" name="__module.model.9.conv.act/aten::silu_/Swish_22" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="338,input.89">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="133" name="__module.model.6.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="340,input.91">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="134" name="self.model.6.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="10431416" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.1.cv1.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="135" name="__module.model.6.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="136" name="__module.model.6.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="11758520" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="137" name="__module.model.6.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="350_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="138" name="__module.model.9.conv.act/aten::silu_/Swish_23" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="350,input.93">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="139" name="self.model.6.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="11759288" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.1.cv2.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="140" name="__module.model.6.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="141" name="__module.model.6.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="13086392" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="142" name="__module.model.6.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="359_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="143" name="__module.model.9.conv.act/aten::silu_/Swish_24" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="359,input.97">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="144" name="__module.model.6.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="361,input.99">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="145" name="self.model.6.m.2.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="13087160" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.2.cv1.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="146" name="__module.model.6.m.2.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="147" name="__module.model.6.m.2.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="14414264" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="148" name="__module.model.6.m.2.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="371_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="149" name="__module.model.9.conv.act/aten::silu_/Swish_25" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="371,input.101">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="150" name="self.model.6.m.2.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="14415032" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.2.cv2.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="151" name="__module.model.6.m.2.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="152" name="__module.model.6.m.2.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="15742136" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="153" name="__module.model.6.m.2.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="380_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="154" name="__module.model.9.conv.act/aten::silu_/Swish_26" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="380,input.105">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="155" name="__module.model.6.m.2/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="382,input.107">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="156" name="self.model.6.m.3.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="15742904" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.3.cv1.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="157" name="__module.model.6.m.3.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="158" name="__module.model.6.m.3.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="17070008" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="159" name="__module.model.6.m.3.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="392_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="160" name="__module.model.9.conv.act/aten::silu_/Swish_27" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="392,input.109">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="161" name="self.model.6.m.3.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="192, 192, 3, 3" offset="17070776" size="1327104" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.3.cv2.conv.weight">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="162" name="__module.model.6.m.3.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>192</dim>
+					<dim>192</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="163" name="__module.model.6.m.3.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 192, 1, 1" offset="18397880" size="768" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="164" name="__module.model.6.m.3.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="401_1">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="165" name="__module.model.9.conv.act/aten::silu_/Swish_28" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="401,input.113">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="166" name="__module.model.6.m.3/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="403">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="167" name="__module.model.6/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="3" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="4" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="5" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="6" precision="FP32" names="405,input.115">
+					<dim>1</dim>
+					<dim>1152</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="168" name="self.model.6.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="384, 1152, 1, 1" offset="18398648" size="1769472" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv2.conv.weight">
+					<dim>384</dim>
+					<dim>1152</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="169" name="__module.model.6.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1152</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>384</dim>
+					<dim>1152</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="170" name="__module.model.6.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 384, 1, 1" offset="20168120" size="1536" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="171" name="__module.model.6.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="413_1">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="172" name="__module.model.9.conv.act/aten::silu_/Swish_29" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="413,input.117">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="173" name="self.model.7.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="768, 384, 3, 3" offset="20169656" size="10616832" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.7.conv.weight">
+					<dim>768</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="174" name="__module.model.7.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>768</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="175" name="__module.model.7.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 768, 1, 1" offset="30786488" size="3072" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="176" name="__module.model.7.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="427_1">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="177" name="__module.model.9.conv.act/aten::silu_/Swish_30" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="427,input.121">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="178" name="self.model.8.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="768, 768, 1, 1" offset="30789560" size="2359296" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv1.conv.weight">
+					<dim>768</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="179" name="__module.model.8.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>768</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="180" name="__module.model.8.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 768, 1, 1" offset="33148856" size="3072" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="181" name="__module.model.8.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="447_1">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="182" name="__module.model.9.conv.act/aten::silu_/Swish_31" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="447,input.125">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="183" name="433" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="208896" size="8" />
+			<output>
+				<port id="0" precision="I64" names="433" />
+			</output>
+		</layer>
+		<layer id="184" name="Constant_1661" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="33151928" size="16" />
+			<output>
+				<port id="0" precision="I64" names="449">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="185" name="__module.model.8/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="451">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="4" precision="FP32" names="452,input.127">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="186" name="self.model.8.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="384, 384, 3, 3" offset="33151944" size="5308416" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv1.conv.weight">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="187" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="188" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 384, 1, 1" offset="38460360" size="1536" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="189" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="462_1">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="190" name="__module.model.9.conv.act/aten::silu_/Swish_32" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="462,input.129">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="191" name="self.model.8.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="384, 384, 3, 3" offset="38461896" size="5308416" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv2.conv.weight">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="192" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="193" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 384, 1, 1" offset="43770312" size="1536" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="194" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="471_1">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="195" name="__module.model.9.conv.act/aten::silu_/Swish_33" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="471,input.133">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="196" name="__module.model.8.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="473,input.135">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="197" name="self.model.8.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="384, 384, 3, 3" offset="43771848" size="5308416" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.1.cv1.conv.weight">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="198" name="__module.model.8.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="199" name="__module.model.8.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 384, 1, 1" offset="49080264" size="1536" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="200" name="__module.model.8.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="483_1">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="201" name="__module.model.9.conv.act/aten::silu_/Swish_34" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="483,input.137">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="202" name="self.model.8.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="384, 384, 3, 3" offset="49081800" size="5308416" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.1.cv2.conv.weight">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="203" name="__module.model.8.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>384</dim>
+					<dim>384</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="204" name="__module.model.8.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 384, 1, 1" offset="54390216" size="1536" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="205" name="__module.model.8.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="492_1">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="206" name="__module.model.9.conv.act/aten::silu_/Swish_35" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="492,input.141">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="207" name="__module.model.8.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="494">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="208" name="__module.model.8/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="3" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="4" precision="FP32" names="496,input.143">
+					<dim>1</dim>
+					<dim>1536</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="209" name="self.model.8.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="768, 1536, 1, 1" offset="54391752" size="4718592" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv2.conv.weight">
+					<dim>768</dim>
+					<dim>1536</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="210" name="__module.model.8.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1536</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>768</dim>
+					<dim>1536</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="211" name="__module.model.8.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 768, 1, 1" offset="59110344" size="3072" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="212" name="__module.model.8.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="504_1">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="213" name="__module.model.9.conv.act/aten::silu_/Swish_36" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="504,input.145">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="214" name="self.model.9.conv.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="1280, 768, 1, 1" offset="59113416" size="3932160" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.conv.conv.weight">
+					<dim>1280</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="215" name="__module.model.9.conv.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>768</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1280</dim>
+					<dim>768</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="216" name="__module.model.9.conv.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 1280, 1, 1" offset="63045576" size="5120" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="217" name="__module.model.9.conv.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="525_1">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="218" name="__module.model.9.conv.act/aten::silu_/Swish_37" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="525,input.149">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="219" name="Constant_5408" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="63050696" size="16" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="220" name="__module.model.9.pool/aten::adaptive_avg_pool2d/AdaptiveAvgPool" type="ReduceMean" version="opset1">
+			<data keep_dims="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="528">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="221" name="Concat_3692" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="63050712" size="16" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="222" name="__module.model.9/aten::flatten/Reshape" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="529,input.153">
+					<dim>1</dim>
+					<dim>1280</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="223" name="self.model.9.linear.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="656, 1280" offset="63050728" size="3358720" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.linear.weight">
+					<dim>656</dim>
+					<dim>1280</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="224" name="__module.model.9.linear/aten::linear/MatMul" type="MatMul" version="opset1">
+			<data transpose_a="false" transpose_b="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>656</dim>
+					<dim>1280</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="225" name="Constant_5426" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 656" offset="66409448" size="2624" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="226" name="__module.model.9.linear/aten::linear/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="533,x_1">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="227" name="__module.model.9/aten::softmax/Softmax" type="SoftMax" version="opset8">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="228" name="Result_2055" type="Result" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>656</dim>
+				</port>
+			</input>
+		</layer>
+	</layers>
+	<edges>
+		<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
+		<edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
+		<edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
+		<edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
+		<edge from-layer="4" from-port="2" to-layer="5" to-port="0" />
+		<edge from-layer="5" from-port="1" to-layer="7" to-port="0" />
+		<edge from-layer="6" from-port="0" to-layer="7" to-port="1" />
+		<edge from-layer="7" from-port="2" to-layer="9" to-port="0" />
+		<edge from-layer="8" from-port="0" to-layer="9" to-port="1" />
+		<edge from-layer="9" from-port="2" to-layer="10" to-port="0" />
+		<edge from-layer="10" from-port="1" to-layer="12" to-port="0" />
+		<edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
+		<edge from-layer="12" from-port="2" to-layer="14" to-port="0" />
+		<edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
+		<edge from-layer="14" from-port="2" to-layer="15" to-port="0" />
+		<edge from-layer="15" from-port="1" to-layer="18" to-port="0" />
+		<edge from-layer="16" from-port="0" to-layer="18" to-port="1" />
+		<edge from-layer="17" from-port="0" to-layer="18" to-port="2" />
+		<edge from-layer="18" from-port="4" to-layer="20" to-port="0" />
+		<edge from-layer="18" from-port="4" to-layer="29" to-port="0" />
+		<edge from-layer="18" from-port="3" to-layer="41" to-port="0" />
+		<edge from-layer="18" from-port="4" to-layer="41" to-port="1" />
+		<edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
+		<edge from-layer="20" from-port="2" to-layer="22" to-port="0" />
+		<edge from-layer="21" from-port="0" to-layer="22" to-port="1" />
+		<edge from-layer="22" from-port="2" to-layer="23" to-port="0" />
+		<edge from-layer="23" from-port="1" to-layer="25" to-port="0" />
+		<edge from-layer="24" from-port="0" to-layer="25" to-port="1" />
+		<edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
+		<edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
+		<edge from-layer="27" from-port="2" to-layer="28" to-port="0" />
+		<edge from-layer="28" from-port="1" to-layer="29" to-port="1" />
+		<edge from-layer="29" from-port="2" to-layer="40" to-port="0" />
+		<edge from-layer="29" from-port="2" to-layer="31" to-port="0" />
+		<edge from-layer="29" from-port="2" to-layer="41" to-port="2" />
+		<edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
+		<edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
+		<edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
+		<edge from-layer="33" from-port="2" to-layer="34" to-port="0" />
+		<edge from-layer="34" from-port="1" to-layer="36" to-port="0" />
+		<edge from-layer="35" from-port="0" to-layer="36" to-port="1" />
+		<edge from-layer="36" from-port="2" to-layer="38" to-port="0" />
+		<edge from-layer="37" from-port="0" to-layer="38" to-port="1" />
+		<edge from-layer="38" from-port="2" to-layer="39" to-port="0" />
+		<edge from-layer="39" from-port="1" to-layer="40" to-port="1" />
+		<edge from-layer="40" from-port="2" to-layer="41" to-port="3" />
+		<edge from-layer="41" from-port="4" to-layer="43" to-port="0" />
+		<edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
+		<edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
+		<edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
+		<edge from-layer="45" from-port="2" to-layer="46" to-port="0" />
+		<edge from-layer="46" from-port="1" to-layer="48" to-port="0" />
+		<edge from-layer="47" from-port="0" to-layer="48" to-port="1" />
+		<edge from-layer="48" from-port="2" to-layer="50" to-port="0" />
+		<edge from-layer="49" from-port="0" to-layer="50" to-port="1" />
+		<edge from-layer="50" from-port="2" to-layer="51" to-port="0" />
+		<edge from-layer="51" from-port="1" to-layer="53" to-port="0" />
+		<edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
+		<edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
+		<edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
+		<edge from-layer="55" from-port="2" to-layer="56" to-port="0" />
+		<edge from-layer="56" from-port="1" to-layer="59" to-port="0" />
+		<edge from-layer="57" from-port="0" to-layer="59" to-port="1" />
+		<edge from-layer="58" from-port="0" to-layer="59" to-port="2" />
+		<edge from-layer="59" from-port="4" to-layer="61" to-port="0" />
+		<edge from-layer="59" from-port="4" to-layer="70" to-port="0" />
+		<edge from-layer="59" from-port="3" to-layer="104" to-port="0" />
+		<edge from-layer="59" from-port="4" to-layer="104" to-port="1" />
+		<edge from-layer="60" from-port="0" to-layer="61" to-port="1" />
+		<edge from-layer="61" from-port="2" to-layer="63" to-port="0" />
+		<edge from-layer="62" from-port="0" to-layer="63" to-port="1" />
+		<edge from-layer="63" from-port="2" to-layer="64" to-port="0" />
+		<edge from-layer="64" from-port="1" to-layer="66" to-port="0" />
+		<edge from-layer="65" from-port="0" to-layer="66" to-port="1" />
+		<edge from-layer="66" from-port="2" to-layer="68" to-port="0" />
+		<edge from-layer="67" from-port="0" to-layer="68" to-port="1" />
+		<edge from-layer="68" from-port="2" to-layer="69" to-port="0" />
+		<edge from-layer="69" from-port="1" to-layer="70" to-port="1" />
+		<edge from-layer="70" from-port="2" to-layer="72" to-port="0" />
+		<edge from-layer="70" from-port="2" to-layer="81" to-port="0" />
+		<edge from-layer="70" from-port="2" to-layer="104" to-port="2" />
+		<edge from-layer="71" from-port="0" to-layer="72" to-port="1" />
+		<edge from-layer="72" from-port="2" to-layer="74" to-port="0" />
+		<edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
+		<edge from-layer="74" from-port="2" to-layer="75" to-port="0" />
+		<edge from-layer="75" from-port="1" to-layer="77" to-port="0" />
+		<edge from-layer="76" from-port="0" to-layer="77" to-port="1" />
+		<edge from-layer="77" from-port="2" to-layer="79" to-port="0" />
+		<edge from-layer="78" from-port="0" to-layer="79" to-port="1" />
+		<edge from-layer="79" from-port="2" to-layer="80" to-port="0" />
+		<edge from-layer="80" from-port="1" to-layer="81" to-port="1" />
+		<edge from-layer="81" from-port="2" to-layer="83" to-port="0" />
+		<edge from-layer="81" from-port="2" to-layer="92" to-port="0" />
+		<edge from-layer="81" from-port="2" to-layer="104" to-port="3" />
+		<edge from-layer="82" from-port="0" to-layer="83" to-port="1" />
+		<edge from-layer="83" from-port="2" to-layer="85" to-port="0" />
+		<edge from-layer="84" from-port="0" to-layer="85" to-port="1" />
+		<edge from-layer="85" from-port="2" to-layer="86" to-port="0" />
+		<edge from-layer="86" from-port="1" to-layer="88" to-port="0" />
+		<edge from-layer="87" from-port="0" to-layer="88" to-port="1" />
+		<edge from-layer="88" from-port="2" to-layer="90" to-port="0" />
+		<edge from-layer="89" from-port="0" to-layer="90" to-port="1" />
+		<edge from-layer="90" from-port="2" to-layer="91" to-port="0" />
+		<edge from-layer="91" from-port="1" to-layer="92" to-port="1" />
+		<edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
+		<edge from-layer="92" from-port="2" to-layer="103" to-port="0" />
+		<edge from-layer="92" from-port="2" to-layer="104" to-port="4" />
+		<edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
+		<edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
+		<edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
+		<edge from-layer="96" from-port="2" to-layer="97" to-port="0" />
+		<edge from-layer="97" from-port="1" to-layer="99" to-port="0" />
+		<edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
+		<edge from-layer="99" from-port="2" to-layer="101" to-port="0" />
+		<edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
+		<edge from-layer="101" from-port="2" to-layer="102" to-port="0" />
+		<edge from-layer="102" from-port="1" to-layer="103" to-port="1" />
+		<edge from-layer="103" from-port="2" to-layer="104" to-port="5" />
+		<edge from-layer="104" from-port="6" to-layer="106" to-port="0" />
+		<edge from-layer="105" from-port="0" to-layer="106" to-port="1" />
+		<edge from-layer="106" from-port="2" to-layer="108" to-port="0" />
+		<edge from-layer="107" from-port="0" to-layer="108" to-port="1" />
+		<edge from-layer="108" from-port="2" to-layer="109" to-port="0" />
+		<edge from-layer="109" from-port="1" to-layer="111" to-port="0" />
+		<edge from-layer="110" from-port="0" to-layer="111" to-port="1" />
+		<edge from-layer="111" from-port="2" to-layer="113" to-port="0" />
+		<edge from-layer="112" from-port="0" to-layer="113" to-port="1" />
+		<edge from-layer="113" from-port="2" to-layer="114" to-port="0" />
+		<edge from-layer="114" from-port="1" to-layer="116" to-port="0" />
+		<edge from-layer="115" from-port="0" to-layer="116" to-port="1" />
+		<edge from-layer="116" from-port="2" to-layer="118" to-port="0" />
+		<edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
+		<edge from-layer="118" from-port="2" to-layer="119" to-port="0" />
+		<edge from-layer="119" from-port="1" to-layer="122" to-port="0" />
+		<edge from-layer="120" from-port="0" to-layer="122" to-port="1" />
+		<edge from-layer="121" from-port="0" to-layer="122" to-port="2" />
+		<edge from-layer="122" from-port="4" to-layer="124" to-port="0" />
+		<edge from-layer="122" from-port="4" to-layer="133" to-port="0" />
+		<edge from-layer="122" from-port="3" to-layer="167" to-port="0" />
+		<edge from-layer="122" from-port="4" to-layer="167" to-port="1" />
+		<edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
+		<edge from-layer="124" from-port="2" to-layer="126" to-port="0" />
+		<edge from-layer="125" from-port="0" to-layer="126" to-port="1" />
+		<edge from-layer="126" from-port="2" to-layer="127" to-port="0" />
+		<edge from-layer="127" from-port="1" to-layer="129" to-port="0" />
+		<edge from-layer="128" from-port="0" to-layer="129" to-port="1" />
+		<edge from-layer="129" from-port="2" to-layer="131" to-port="0" />
+		<edge from-layer="130" from-port="0" to-layer="131" to-port="1" />
+		<edge from-layer="131" from-port="2" to-layer="132" to-port="0" />
+		<edge from-layer="132" from-port="1" to-layer="133" to-port="1" />
+		<edge from-layer="133" from-port="2" to-layer="144" to-port="0" />
+		<edge from-layer="133" from-port="2" to-layer="135" to-port="0" />
+		<edge from-layer="133" from-port="2" to-layer="167" to-port="2" />
+		<edge from-layer="134" from-port="0" to-layer="135" to-port="1" />
+		<edge from-layer="135" from-port="2" to-layer="137" to-port="0" />
+		<edge from-layer="136" from-port="0" to-layer="137" to-port="1" />
+		<edge from-layer="137" from-port="2" to-layer="138" to-port="0" />
+		<edge from-layer="138" from-port="1" to-layer="140" to-port="0" />
+		<edge from-layer="139" from-port="0" to-layer="140" to-port="1" />
+		<edge from-layer="140" from-port="2" to-layer="142" to-port="0" />
+		<edge from-layer="141" from-port="0" to-layer="142" to-port="1" />
+		<edge from-layer="142" from-port="2" to-layer="143" to-port="0" />
+		<edge from-layer="143" from-port="1" to-layer="144" to-port="1" />
+		<edge from-layer="144" from-port="2" to-layer="155" to-port="0" />
+		<edge from-layer="144" from-port="2" to-layer="146" to-port="0" />
+		<edge from-layer="144" from-port="2" to-layer="167" to-port="3" />
+		<edge from-layer="145" from-port="0" to-layer="146" to-port="1" />
+		<edge from-layer="146" from-port="2" to-layer="148" to-port="0" />
+		<edge from-layer="147" from-port="0" to-layer="148" to-port="1" />
+		<edge from-layer="148" from-port="2" to-layer="149" to-port="0" />
+		<edge from-layer="149" from-port="1" to-layer="151" to-port="0" />
+		<edge from-layer="150" from-port="0" to-layer="151" to-port="1" />
+		<edge from-layer="151" from-port="2" to-layer="153" to-port="0" />
+		<edge from-layer="152" from-port="0" to-layer="153" to-port="1" />
+		<edge from-layer="153" from-port="2" to-layer="154" to-port="0" />
+		<edge from-layer="154" from-port="1" to-layer="155" to-port="1" />
+		<edge from-layer="155" from-port="2" to-layer="157" to-port="0" />
+		<edge from-layer="155" from-port="2" to-layer="166" to-port="0" />
+		<edge from-layer="155" from-port="2" to-layer="167" to-port="4" />
+		<edge from-layer="156" from-port="0" to-layer="157" to-port="1" />
+		<edge from-layer="157" from-port="2" to-layer="159" to-port="0" />
+		<edge from-layer="158" from-port="0" to-layer="159" to-port="1" />
+		<edge from-layer="159" from-port="2" to-layer="160" to-port="0" />
+		<edge from-layer="160" from-port="1" to-layer="162" to-port="0" />
+		<edge from-layer="161" from-port="0" to-layer="162" to-port="1" />
+		<edge from-layer="162" from-port="2" to-layer="164" to-port="0" />
+		<edge from-layer="163" from-port="0" to-layer="164" to-port="1" />
+		<edge from-layer="164" from-port="2" to-layer="165" to-port="0" />
+		<edge from-layer="165" from-port="1" to-layer="166" to-port="1" />
+		<edge from-layer="166" from-port="2" to-layer="167" to-port="5" />
+		<edge from-layer="167" from-port="6" to-layer="169" to-port="0" />
+		<edge from-layer="168" from-port="0" to-layer="169" to-port="1" />
+		<edge from-layer="169" from-port="2" to-layer="171" to-port="0" />
+		<edge from-layer="170" from-port="0" to-layer="171" to-port="1" />
+		<edge from-layer="171" from-port="2" to-layer="172" to-port="0" />
+		<edge from-layer="172" from-port="1" to-layer="174" to-port="0" />
+		<edge from-layer="173" from-port="0" to-layer="174" to-port="1" />
+		<edge from-layer="174" from-port="2" to-layer="176" to-port="0" />
+		<edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
+		<edge from-layer="176" from-port="2" to-layer="177" to-port="0" />
+		<edge from-layer="177" from-port="1" to-layer="179" to-port="0" />
+		<edge from-layer="178" from-port="0" to-layer="179" to-port="1" />
+		<edge from-layer="179" from-port="2" to-layer="181" to-port="0" />
+		<edge from-layer="180" from-port="0" to-layer="181" to-port="1" />
+		<edge from-layer="181" from-port="2" to-layer="182" to-port="0" />
+		<edge from-layer="182" from-port="1" to-layer="185" to-port="0" />
+		<edge from-layer="183" from-port="0" to-layer="185" to-port="1" />
+		<edge from-layer="184" from-port="0" to-layer="185" to-port="2" />
+		<edge from-layer="185" from-port="4" to-layer="187" to-port="0" />
+		<edge from-layer="185" from-port="4" to-layer="196" to-port="0" />
+		<edge from-layer="185" from-port="3" to-layer="208" to-port="0" />
+		<edge from-layer="185" from-port="4" to-layer="208" to-port="1" />
+		<edge from-layer="186" from-port="0" to-layer="187" to-port="1" />
+		<edge from-layer="187" from-port="2" to-layer="189" to-port="0" />
+		<edge from-layer="188" from-port="0" to-layer="189" to-port="1" />
+		<edge from-layer="189" from-port="2" to-layer="190" to-port="0" />
+		<edge from-layer="190" from-port="1" to-layer="192" to-port="0" />
+		<edge from-layer="191" from-port="0" to-layer="192" to-port="1" />
+		<edge from-layer="192" from-port="2" to-layer="194" to-port="0" />
+		<edge from-layer="193" from-port="0" to-layer="194" to-port="1" />
+		<edge from-layer="194" from-port="2" to-layer="195" to-port="0" />
+		<edge from-layer="195" from-port="1" to-layer="196" to-port="1" />
+		<edge from-layer="196" from-port="2" to-layer="198" to-port="0" />
+		<edge from-layer="196" from-port="2" to-layer="207" to-port="0" />
+		<edge from-layer="196" from-port="2" to-layer="208" to-port="2" />
+		<edge from-layer="197" from-port="0" to-layer="198" to-port="1" />
+		<edge from-layer="198" from-port="2" to-layer="200" to-port="0" />
+		<edge from-layer="199" from-port="0" to-layer="200" to-port="1" />
+		<edge from-layer="200" from-port="2" to-layer="201" to-port="0" />
+		<edge from-layer="201" from-port="1" to-layer="203" to-port="0" />
+		<edge from-layer="202" from-port="0" to-layer="203" to-port="1" />
+		<edge from-layer="203" from-port="2" to-layer="205" to-port="0" />
+		<edge from-layer="204" from-port="0" to-layer="205" to-port="1" />
+		<edge from-layer="205" from-port="2" to-layer="206" to-port="0" />
+		<edge from-layer="206" from-port="1" to-layer="207" to-port="1" />
+		<edge from-layer="207" from-port="2" to-layer="208" to-port="3" />
+		<edge from-layer="208" from-port="4" to-layer="210" to-port="0" />
+		<edge from-layer="209" from-port="0" to-layer="210" to-port="1" />
+		<edge from-layer="210" from-port="2" to-layer="212" to-port="0" />
+		<edge from-layer="211" from-port="0" to-layer="212" to-port="1" />
+		<edge from-layer="212" from-port="2" to-layer="213" to-port="0" />
+		<edge from-layer="213" from-port="1" to-layer="215" to-port="0" />
+		<edge from-layer="214" from-port="0" to-layer="215" to-port="1" />
+		<edge from-layer="215" from-port="2" to-layer="217" to-port="0" />
+		<edge from-layer="216" from-port="0" to-layer="217" to-port="1" />
+		<edge from-layer="217" from-port="2" to-layer="218" to-port="0" />
+		<edge from-layer="218" from-port="1" to-layer="220" to-port="0" />
+		<edge from-layer="219" from-port="0" to-layer="220" to-port="1" />
+		<edge from-layer="220" from-port="2" to-layer="222" to-port="0" />
+		<edge from-layer="221" from-port="0" to-layer="222" to-port="1" />
+		<edge from-layer="222" from-port="2" to-layer="224" to-port="0" />
+		<edge from-layer="223" from-port="0" to-layer="224" to-port="1" />
+		<edge from-layer="224" from-port="2" to-layer="226" to-port="0" />
+		<edge from-layer="225" from-port="0" to-layer="226" to-port="1" />
+		<edge from-layer="226" from-port="2" to-layer="227" to-port="0" />
+		<edge from-layer="227" from-port="1" to-layer="228" to-port="0" />
+	</edges>
+	<rt_info>
+		<Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
+		<conversion_parameters>
+			<framework value="pytorch" />
+			<is_python_object value="True" />
+		</conversion_parameters>
+		<model_info>
+			<iou_threshold value="0.7" />
+			<labels value="Choulingdancao Dayezizhu aidicha aiye anxixiang awei badou baibeiyegen baibiandou baibu baifan baifuzi baiguo baihe baihelingzhi baihuadan baihuasheshecao baiji baijiangcao bailian baimagu baimaogen baiqian baishao baishihua baishiying baishouwu baitouweng baiwei baixianpi baiying baizhi baizhu bajiaofeng bajiaohuixiang bajiaolian bajitian banbianlian banfenghe banmao banxia banzhilian baomadingxiang baqia beidougen beiliujinu beishashen bianxu biba(bo) bichengqie biejia bimazi binglang bohe bohuohui buguzhi buzhaye cangerzi cangzhu caodoukou caoguo cebaiye chaihu changchunhua changshan chanhua chansha chansu chantui chenpi chenxiang cheqiancao cheqianzi chishao chishizhi chixiaodou chonglou chongweizi chongyayao chouwutong chuanbeimu chuanlianzi chuanmutong chuanposhi chuanshanjia chuanshanlong chuanwu chuanxinlian chuanxiong chuipencao chunpi chushizi cishi ciweipi ciwujia cixiancai congzi cuiyuncao cujiangcao dabogu dadouhuangjuan dafeiyangcao dafengai dafengzi dafupi dahuang daji dandouchi danfan danggui dangshen dangyao dannanxing danshen danzhuye daodou daokoucao daoya daqingye dasuan daxueteng dayeanye dazao dengxincao dengzhanxixin diburong diercao difengpi difuzi digupi dihuang dijincao dinggongteng dingxiang direngen dongchongxiacao dongfengju dongguapi dongkuizi donglingcao doukou duanxueliu duhuo duzhong duzhongye ebushicao eshen eshu fanbaicao fangfeng fangji fangjieshi fanshiliuye fanxieye feilongzhangxue feizi fenge fengfang fenghuangyi fengweicao fengxiangzhi fengyanguo foshou fuling fulonggan fupenzi fuping fushen fuxiaomai fuzi gancao gangbangui gangmeigen gangrengen ganjiang ganqi gansong gansui gaoben gaoliangjiang gegen gejie gonglaomu gouguye gouji gouqi goushen gouteng gualou guangdonghaitongpi guangdongliujinu guangdongwangbuliuxing guanghuoxiang guangzao guanhuangbo guanyejinsitao guanzhong guazijin guihua guijia guijianyu guipi guizhencao guizhi gujingcao gusuibu haigeke hailong haima haipiaoxiao haishen haitongpi haizao hancai hantaoye hehuamhua hehuanpi heidou heshi heshouwu hetaoren heye hezi hongdji hongdoukou honghua hongjingtian hongqi hongqu hongshen hongtiankui houpu houpuhua huaihua huaijiao huajiao huajuhong huangjing huangjingzi huanglian huangpihe huangqi huangqin huangshukui huangteng huangyaozi huashi hubeibeimu huercao hujiao hujisheng hulucha huomaren huotanmu hutuiziye huzhang jiangcan jianghuang jiangxiang jianhua jiaogulan jicai jidanhua jiegeng jigucao jiguxiang jili jindenglong jineijin jinfeicao jingjie jingtiansanqi jingucao jinguolan jinqiaomai jinsicao jintiesuo jinyingzi jinyinhua jishiteng jiubiying jiucengta jiujiechangpu jiulixiang jiulongteng jiuxiangchong jixuecao jixueteng jiyancao juanbai juemingzi juhe juhua juqu juqu2 ketengzi kuandonghua kuanjinteng kudiding kudingcha kugua kulianpi kumaicai kumu kunbu kushen kushilian kuxingren lajiao laliao lanbuzheng langdu langyupi laoguancao leigongteng leiwan lianfang liangmianzhen liangtoujian lianqiancao lianqiao lianzi lianzixin liaogewang lilu lingxiangcao lingxiaohua lingzhi liushenqu liuyuehan lizhihe longchi longdan longgu longkui longliye longyanrou loulu luganshi lugen luobumaye luofumu luohanguo luole luoshiteng lurong luxiancao luying lvcao lvdou mabiancao mabo machixian madouling mahuang mahuanggen maidong maiya manjingzi manshanhong maozhaocao maqianzi maweilian meiguihua meihua mengchong mianbixie mianbixiepian mimenghua mohanlian molihua mubiezi mudanpi mudingxiang muer mufangji mugua muhao muhudie mujingye mujinhua mujinpi muli mumianhua muxiang muzei nanbanlangen nanguazi nanshanzha nanshashen nanwuweizi naosha naoyanghua niubangzi niudali niuduteng niuhuang niuxi nuodaogen nvzhenzi oujie pangdahai peilan pianjianghuang pijiuhua pipaye pugongying qiancao qianghuo qianhu qianjinba qianjinzi qianliguang qiannianjian qianniuzi qianrihong qianshi qingfengteng qingguo qinghao qingmuxiang qingniudan qingpi qingtiankui qingyangshe qingyedan qinjiao qinpi qiyelian qiyeyizhihua quanshen qumai rendongteng renshen renshenye ricaogen rongshuye roucongrong roudoukou ruiren sanbaicao sanbaicaogen sanfensan sangbaipi sangshen sangye sangzhipian sankezhen sanleng sanqi sanqipian sanyaku shaji shancigu shandayan shandougen shanglu shannai shanyao shanzha shanzhanye shanzhima shanzhuyu sharen shegan sheliugu shemei shencha shengjiangpi shengma shenjincao shetui shicancao shichangpu shidi shidiaolan shihu shihu2 shijueming shijunzi shiliupi shinanteng shinanye shishangbai shisuan shiwei shouwuteng shuangshen shudihuang shuifeiji shuizhi shuqicao sigualuo sijiqing siyeshen songmu songxiang suanzaoren sumu suoluozi suoyang taibaimi taizishen tanxiang taoren taozhi tiandiding tiandong tianguadi tianguazi tianhuafen tianhusui tiankuizi tianma tiannanxing tianxianteng tianzhuhuang tiebaojin tiexian tongcao tougucao tubeimu tubiechong tujingpi tuniuexi walengzi wangbuliuxing wasong weilingxian wenjing wubeizi wugenteng wuhuaguo wuhuaguo2 wuhuanzi wujiapi wujiuzi wulingzhi wumei wushaoshe wutongzi wuyao wuzhimaotao wuzhuyu xiakucao xiangfu xiangru xiangsizi xiangyuan xianhecao xianmao xiaobogu xiaobopi xiaohuixiang xiaoji xiaotongcao xiatianwu xiebai xiecao xiguapi xiheliu xihuangcao xinyi xionghuang xishuguo xixiancao xixin xiyangshen xuancaogen xuanshen xuchangqing xuduan xuefengteng xueshangyizhihao xueyutan xungufeng yadanzi yanduzhong yangqishi yangticao yanhusuo yanwo yazhangmupi yazhicao yejuhua yexiazhu yimucao yinchaihu yinchen yiner yingchunhua yingsuke yingtaohe yinxingye yinyanghuo yiyiren yizhi yizhihuanghua yizhijian yousongjie yuanbaocao yuanhua yuansuizi yuanzhi yuejihua yuganzi yujin yuliren yumixu yuxingcao yuyejinhua yuyuliang yuzhizi yuzhouloulu yuzhu zaojiao zaojiao1 zaojiaoci zaojiaoci1 zelan zeqi zexie zhangmu zhebeimu zhenzhumu zhigancao zhihuangqi zhijuzi zhimu zhiqiao zhishanzhuyu zhishi zhizhuxiang zhizi zhongjiefeng zhongrushi zhujieshen zhuling zhumagen zhuru zhushagen zhuyazao zhuzishen zibeichi zicao zicaorong ziheche zishaohua zisugeng zisuye zisuzi ziyuan zizhuye zonglv zoumajian zoumatai" />
+			<model_type value="YOLO" />
+			<pad_value value="114" />
+			<reverse_input_channels value="YES" />
+			<scale_values value="255" />
+		</model_info>
+	</rt_info>
+</net>
diff --git a/openvino/model/herb_id/metadata.yaml b/openvino/model/herb_id/metadata.yaml
new file mode 100644
index 0000000..a2578be
--- /dev/null
+++ b/openvino/model/herb_id/metadata.yaml
@@ -0,0 +1,669 @@
+description: Ultralytics YOLOv8m-cls model trained on D:\res_pic2
+author: Ultralytics
+date: '2025-04-18T13:34:07.698889'
+version: 8.3.53
+license: AGPL-3.0 License (https://ultralytics.com/license)
+docs: https://docs.ultralytics.com
+stride: 1
+task: classify
+batch: 1
+imgsz:
+- 640
+- 640
+names:
+  0: Choulingdancao
+  1: Dayezizhu
+  2: aidicha
+  3: aiye
+  4: anxixiang
+  5: awei
+  6: badou
+  7: baibeiyegen
+  8: baibiandou
+  9: baibu
+  10: baifan
+  11: baifuzi
+  12: baiguo
+  13: baihe
+  14: baihelingzhi
+  15: baihuadan
+  16: baihuasheshecao
+  17: baiji
+  18: baijiangcao
+  19: bailian
+  20: baimagu
+  21: baimaogen
+  22: baiqian
+  23: baishao
+  24: baishihua
+  25: baishiying
+  26: baishouwu
+  27: baitouweng
+  28: baiwei
+  29: baixianpi
+  30: baiying
+  31: baizhi
+  32: baizhu
+  33: bajiaofeng
+  34: bajiaohuixiang
+  35: bajiaolian
+  36: bajitian
+  37: banbianlian
+  38: banfenghe
+  39: banmao
+  40: banxia
+  41: banzhilian
+  42: baomadingxiang
+  43: baqia
+  44: beidougen
+  45: beiliujinu
+  46: beishashen
+  47: bianxu
+  48: biba(bo)
+  49: bichengqie
+  50: biejia
+  51: bimazi
+  52: binglang
+  53: bohe
+  54: bohuohui
+  55: buguzhi
+  56: buzhaye
+  57: cangerzi
+  58: cangzhu
+  59: caodoukou
+  60: caoguo
+  61: cebaiye
+  62: chaihu
+  63: changchunhua
+  64: changshan
+  65: chanhua
+  66: chansha
+  67: chansu
+  68: chantui
+  69: chenpi
+  70: chenxiang
+  71: cheqiancao
+  72: cheqianzi
+  73: chishao
+  74: chishizhi
+  75: chixiaodou
+  76: chonglou
+  77: chongweizi
+  78: chongyayao
+  79: chouwutong
+  80: chuanbeimu
+  81: chuanlianzi
+  82: chuanmutong
+  83: chuanposhi
+  84: chuanshanjia
+  85: chuanshanlong
+  86: chuanwu
+  87: chuanxinlian
+  88: chuanxiong
+  89: chuipencao
+  90: chunpi
+  91: chushizi
+  92: cishi
+  93: ciweipi
+  94: ciwujia
+  95: cixiancai
+  96: congzi
+  97: cuiyuncao
+  98: cujiangcao
+  99: dabogu
+  100: dadouhuangjuan
+  101: dafeiyangcao
+  102: dafengai
+  103: dafengzi
+  104: dafupi
+  105: dahuang
+  106: daji
+  107: dandouchi
+  108: danfan
+  109: danggui
+  110: dangshen
+  111: dangyao
+  112: dannanxing
+  113: danshen
+  114: danzhuye
+  115: daodou
+  116: daokoucao
+  117: daoya
+  118: daqingye
+  119: dasuan
+  120: daxueteng
+  121: dayeanye
+  122: dazao
+  123: dengxincao
+  124: dengzhanxixin
+  125: diburong
+  126: diercao
+  127: difengpi
+  128: difuzi
+  129: digupi
+  130: dihuang
+  131: dijincao
+  132: dinggongteng
+  133: dingxiang
+  134: direngen
+  135: dongchongxiacao
+  136: dongfengju
+  137: dongguapi
+  138: dongkuizi
+  139: donglingcao
+  140: doukou
+  141: duanxueliu
+  142: duhuo
+  143: duzhong
+  144: duzhongye
+  145: ebushicao
+  146: eshen
+  147: eshu
+  148: fanbaicao
+  149: fangfeng
+  150: fangji
+  151: fangjieshi
+  152: fanshiliuye
+  153: fanxieye
+  154: feilongzhangxue
+  155: feizi
+  156: fenge
+  157: fengfang
+  158: fenghuangyi
+  159: fengweicao
+  160: fengxiangzhi
+  161: fengyanguo
+  162: foshou
+  163: fuling
+  164: fulonggan
+  165: fupenzi
+  166: fuping
+  167: fushen
+  168: fuxiaomai
+  169: fuzi
+  170: gancao
+  171: gangbangui
+  172: gangmeigen
+  173: gangrengen
+  174: ganjiang
+  175: ganqi
+  176: gansong
+  177: gansui
+  178: gaoben
+  179: gaoliangjiang
+  180: gegen
+  181: gejie
+  182: gonglaomu
+  183: gouguye
+  184: gouji
+  185: gouqi
+  186: goushen
+  187: gouteng
+  188: gualou
+  189: guangdonghaitongpi
+  190: guangdongliujinu
+  191: guangdongwangbuliuxing
+  192: guanghuoxiang
+  193: guangzao
+  194: guanhuangbo
+  195: guanyejinsitao
+  196: guanzhong
+  197: guazijin
+  198: guihua
+  199: guijia
+  200: guijianyu
+  201: guipi
+  202: guizhencao
+  203: guizhi
+  204: gujingcao
+  205: gusuibu
+  206: haigeke
+  207: hailong
+  208: haima
+  209: haipiaoxiao
+  210: haishen
+  211: haitongpi
+  212: haizao
+  213: hancai
+  214: hantaoye
+  215: hehuamhua
+  216: hehuanpi
+  217: heidou
+  218: heshi
+  219: heshouwu
+  220: hetaoren
+  221: heye
+  222: hezi
+  223: hongdji
+  224: hongdoukou
+  225: honghua
+  226: hongjingtian
+  227: hongqi
+  228: hongqu
+  229: hongshen
+  230: hongtiankui
+  231: houpu
+  232: houpuhua
+  233: huaihua
+  234: huaijiao
+  235: huajiao
+  236: huajuhong
+  237: huangjing
+  238: huangjingzi
+  239: huanglian
+  240: huangpihe
+  241: huangqi
+  242: huangqin
+  243: huangshukui
+  244: huangteng
+  245: huangyaozi
+  246: huashi
+  247: hubeibeimu
+  248: huercao
+  249: hujiao
+  250: hujisheng
+  251: hulucha
+  252: huomaren
+  253: huotanmu
+  254: hutuiziye
+  255: huzhang
+  256: jiangcan
+  257: jianghuang
+  258: jiangxiang
+  259: jianhua
+  260: jiaogulan
+  261: jicai
+  262: jidanhua
+  263: jiegeng
+  264: jigucao
+  265: jiguxiang
+  266: jili
+  267: jindenglong
+  268: jineijin
+  269: jinfeicao
+  270: jingjie
+  271: jingtiansanqi
+  272: jingucao
+  273: jinguolan
+  274: jinqiaomai
+  275: jinsicao
+  276: jintiesuo
+  277: jinyingzi
+  278: jinyinhua
+  279: jishiteng
+  280: jiubiying
+  281: jiucengta
+  282: jiujiechangpu
+  283: jiulixiang
+  284: jiulongteng
+  285: jiuxiangchong
+  286: jixuecao
+  287: jixueteng
+  288: jiyancao
+  289: juanbai
+  290: juemingzi
+  291: juhe
+  292: juhua
+  293: juqu
+  294: juqu2
+  295: ketengzi
+  296: kuandonghua
+  297: kuanjinteng
+  298: kudiding
+  299: kudingcha
+  300: kugua
+  301: kulianpi
+  302: kumaicai
+  303: kumu
+  304: kunbu
+  305: kushen
+  306: kushilian
+  307: kuxingren
+  308: lajiao
+  309: laliao
+  310: lanbuzheng
+  311: langdu
+  312: langyupi
+  313: laoguancao
+  314: leigongteng
+  315: leiwan
+  316: lianfang
+  317: liangmianzhen
+  318: liangtoujian
+  319: lianqiancao
+  320: lianqiao
+  321: lianzi
+  322: lianzixin
+  323: liaogewang
+  324: lilu
+  325: lingxiangcao
+  326: lingxiaohua
+  327: lingzhi
+  328: liushenqu
+  329: liuyuehan
+  330: lizhihe
+  331: longchi
+  332: longdan
+  333: longgu
+  334: longkui
+  335: longliye
+  336: longyanrou
+  337: loulu
+  338: luganshi
+  339: lugen
+  340: luobumaye
+  341: luofumu
+  342: luohanguo
+  343: luole
+  344: luoshiteng
+  345: lurong
+  346: luxiancao
+  347: luying
+  348: lvcao
+  349: lvdou
+  350: mabiancao
+  351: mabo
+  352: machixian
+  353: madouling
+  354: mahuang
+  355: mahuanggen
+  356: maidong
+  357: maiya
+  358: manjingzi
+  359: manshanhong
+  360: maozhaocao
+  361: maqianzi
+  362: maweilian
+  363: meiguihua
+  364: meihua
+  365: mengchong
+  366: mianbixie
+  367: mianbixiepian
+  368: mimenghua
+  369: mohanlian
+  370: molihua
+  371: mubiezi
+  372: mudanpi
+  373: mudingxiang
+  374: muer
+  375: mufangji
+  376: mugua
+  377: muhao
+  378: muhudie
+  379: mujingye
+  380: mujinhua
+  381: mujinpi
+  382: muli
+  383: mumianhua
+  384: muxiang
+  385: muzei
+  386: nanbanlangen
+  387: nanguazi
+  388: nanshanzha
+  389: nanshashen
+  390: nanwuweizi
+  391: naosha
+  392: naoyanghua
+  393: niubangzi
+  394: niudali
+  395: niuduteng
+  396: niuhuang
+  397: niuxi
+  398: nuodaogen
+  399: nvzhenzi
+  400: oujie
+  401: pangdahai
+  402: peilan
+  403: pianjianghuang
+  404: pijiuhua
+  405: pipaye
+  406: pugongying
+  407: qiancao
+  408: qianghuo
+  409: qianhu
+  410: qianjinba
+  411: qianjinzi
+  412: qianliguang
+  413: qiannianjian
+  414: qianniuzi
+  415: qianrihong
+  416: qianshi
+  417: qingfengteng
+  418: qingguo
+  419: qinghao
+  420: qingmuxiang
+  421: qingniudan
+  422: qingpi
+  423: qingtiankui
+  424: qingyangshe
+  425: qingyedan
+  426: qinjiao
+  427: qinpi
+  428: qiyelian
+  429: qiyeyizhihua
+  430: quanshen
+  431: qumai
+  432: rendongteng
+  433: renshen
+  434: renshenye
+  435: ricaogen
+  436: rongshuye
+  437: roucongrong
+  438: roudoukou
+  439: ruiren
+  440: sanbaicao
+  441: sanbaicaogen
+  442: sanfensan
+  443: sangbaipi
+  444: sangshen
+  445: sangye
+  446: sangzhipian
+  447: sankezhen
+  448: sanleng
+  449: sanqi
+  450: sanqipian
+  451: sanyaku
+  452: shaji
+  453: shancigu
+  454: shandayan
+  455: shandougen
+  456: shanglu
+  457: shannai
+  458: shanyao
+  459: shanzha
+  460: shanzhanye
+  461: shanzhima
+  462: shanzhuyu
+  463: sharen
+  464: shegan
+  465: sheliugu
+  466: shemei
+  467: shencha
+  468: shengjiangpi
+  469: shengma
+  470: shenjincao
+  471: shetui
+  472: shicancao
+  473: shichangpu
+  474: shidi
+  475: shidiaolan
+  476: shihu
+  477: shihu2
+  478: shijueming
+  479: shijunzi
+  480: shiliupi
+  481: shinanteng
+  482: shinanye
+  483: shishangbai
+  484: shisuan
+  485: shiwei
+  486: shouwuteng
+  487: shuangshen
+  488: shudihuang
+  489: shuifeiji
+  490: shuizhi
+  491: shuqicao
+  492: sigualuo
+  493: sijiqing
+  494: siyeshen
+  495: songmu
+  496: songxiang
+  497: suanzaoren
+  498: sumu
+  499: suoluozi
+  500: suoyang
+  501: taibaimi
+  502: taizishen
+  503: tanxiang
+  504: taoren
+  505: taozhi
+  506: tiandiding
+  507: tiandong
+  508: tianguadi
+  509: tianguazi
+  510: tianhuafen
+  511: tianhusui
+  512: tiankuizi
+  513: tianma
+  514: tiannanxing
+  515: tianxianteng
+  516: tianzhuhuang
+  517: tiebaojin
+  518: tiexian
+  519: tongcao
+  520: tougucao
+  521: tubeimu
+  522: tubiechong
+  523: tujingpi
+  524: tuniuexi
+  525: walengzi
+  526: wangbuliuxing
+  527: wasong
+  528: weilingxian
+  529: wenjing
+  530: wubeizi
+  531: wugenteng
+  532: wuhuaguo
+  533: wuhuaguo2
+  534: wuhuanzi
+  535: wujiapi
+  536: wujiuzi
+  537: wulingzhi
+  538: wumei
+  539: wushaoshe
+  540: wutongzi
+  541: wuyao
+  542: wuzhimaotao
+  543: wuzhuyu
+  544: xiakucao
+  545: xiangfu
+  546: xiangru
+  547: xiangsizi
+  548: xiangyuan
+  549: xianhecao
+  550: xianmao
+  551: xiaobogu
+  552: xiaobopi
+  553: xiaohuixiang
+  554: xiaoji
+  555: xiaotongcao
+  556: xiatianwu
+  557: xiebai
+  558: xiecao
+  559: xiguapi
+  560: xiheliu
+  561: xihuangcao
+  562: xinyi
+  563: xionghuang
+  564: xishuguo
+  565: xixiancao
+  566: xixin
+  567: xiyangshen
+  568: xuancaogen
+  569: xuanshen
+  570: xuchangqing
+  571: xuduan
+  572: xuefengteng
+  573: xueshangyizhihao
+  574: xueyutan
+  575: xungufeng
+  576: yadanzi
+  577: yanduzhong
+  578: yangqishi
+  579: yangticao
+  580: yanhusuo
+  581: yanwo
+  582: yazhangmupi
+  583: yazhicao
+  584: yejuhua
+  585: yexiazhu
+  586: yimucao
+  587: yinchaihu
+  588: yinchen
+  589: yiner
+  590: yingchunhua
+  591: yingsuke
+  592: yingtaohe
+  593: yinxingye
+  594: yinyanghuo
+  595: yiyiren
+  596: yizhi
+  597: yizhihuanghua
+  598: yizhijian
+  599: yousongjie
+  600: yuanbaocao
+  601: yuanhua
+  602: yuansuizi
+  603: yuanzhi
+  604: yuejihua
+  605: yuganzi
+  606: yujin
+  607: yuliren
+  608: yumixu
+  609: yuxingcao
+  610: yuyejinhua
+  611: yuyuliang
+  612: yuzhizi
+  613: yuzhouloulu
+  614: yuzhu
+  615: zaojiao
+  616: zaojiao1
+  617: zaojiaoci
+  618: zaojiaoci1
+  619: zelan
+  620: zeqi
+  621: zexie
+  622: zhangmu
+  623: zhebeimu
+  624: zhenzhumu
+  625: zhigancao
+  626: zhihuangqi
+  627: zhijuzi
+  628: zhimu
+  629: zhiqiao
+  630: zhishanzhuyu
+  631: zhishi
+  632: zhizhuxiang
+  633: zhizi
+  634: zhongjiefeng
+  635: zhongrushi
+  636: zhujieshen
+  637: zhuling
+  638: zhumagen
+  639: zhuru
+  640: zhushagen
+  641: zhuyazao
+  642: zhuzishen
+  643: zibeichi
+  644: zicao
+  645: zicaorong
+  646: ziheche
+  647: zishaohua
+  648: zisugeng
+  649: zisuye
+  650: zisuzi
+  651: ziyuan
+  652: zizhuye
+  653: zonglv
+  654: zoumajian
+  655: zoumatai
diff --git a/openvino/model/load_id/best.bin b/openvino/model/load_id/best.bin
new file mode 100644
index 0000000..4fd2bad
--- /dev/null
+++ b/openvino/model/load_id/best.bin
Binary files differ
diff --git a/openvino/model/load_id/best.xml b/openvino/model/load_id/best.xml
new file mode 100644
index 0000000..4e1085a
--- /dev/null
+++ b/openvino/model/load_id/best.xml
@@ -0,0 +1,4756 @@
+<?xml version="1.0"?>
+<net name="Model0" version="11">
+	<layers>
+		<layer id="0" name="x" type="Parameter" version="opset1">
+			<data shape="1,3,640,640" element_type="f32" />
+			<output>
+				<port id="0" precision="FP32" names="x">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="1" name="self.model.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 3, 3, 3" offset="0" size="1728" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.0.conv.weight">
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="2" name="__module.model.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="3" name="__module.model.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="1728" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="4" name="__module.model.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="51_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="5" name="__module.model.10.conv.act/aten::silu_/Swish" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="51,input.1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="6" name="self.model.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 16, 3, 3" offset="1792" size="18432" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.1.conv.weight">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="7" name="__module.model.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="8" name="__module.model.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="20224" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="9" name="__module.model.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="65_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="10" name="__module.model.10.conv.act/aten::silu_/Swish_1" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="65,input.5">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="11" name="self.model.2.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 1, 1" offset="20352" size="4096" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="12" name="__module.model.2.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="13" name="__module.model.2.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="24448" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="14" name="__module.model.2.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="83_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="15" name="__module.model.10.conv.act/aten::silu_/Swish_2" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="83,input.9">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="16" name="71" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="71" />
+			</output>
+		</layer>
+		<layer id="17" name="Constant_187" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="24584" size="16" />
+			<output>
+				<port id="0" precision="I64" names="85">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="18" name="__module.model.2/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="87">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="4" precision="FP32" names="88,input.11">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="19" name="self.model.2.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="8, 16, 3, 3" offset="24600" size="4608" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv1.conv.weight">
+					<dim>8</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="20" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>8</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="21" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 8, 1, 1" offset="29208" size="32" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="22" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="98_1">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="23" name="__module.model.10.conv.act/aten::silu_/Swish_3" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="98,input.13">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="24" name="self.model.2.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 8, 3, 3" offset="29240" size="4608" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv2.conv.weight">
+					<dim>16</dim>
+					<dim>8</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="25" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>8</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>8</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="26" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="33848" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="27" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="107_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="28" name="__module.model.10.conv.act/aten::silu_/Swish_4" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="107,input.17">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="29" name="__module.model.2.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="109">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="30" name="__module.model.2/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="111,input.19">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="31" name="self.model.2.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 48, 1, 1" offset="33912" size="12288" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="32" name="__module.model.2.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="33" name="__module.model.2.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="46200" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="34" name="__module.model.2.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="119_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="35" name="__module.model.10.conv.act/aten::silu_/Swish_5" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="119,input.21">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="36" name="self.model.3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="46456" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.3.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="37" name="__module.model.3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="38" name="__module.model.3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="193912" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="39" name="__module.model.3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="133_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="40" name="__module.model.10.conv.act/aten::silu_/Swish_6" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="133,input.25">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="41" name="self.model.4.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="194168" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="42" name="__module.model.4.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="43" name="__module.model.4.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="210552" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="44" name="__module.model.4.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="151_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="45" name="__module.model.10.conv.act/aten::silu_/Swish_7" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="151,input.29">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="46" name="139" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="139" />
+			</output>
+		</layer>
+		<layer id="47" name="Constant_445" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="210808" size="16" />
+			<output>
+				<port id="0" precision="I64" names="153">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="48" name="__module.model.4/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="155">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="4" precision="FP32" names="156,input.31">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="49" name="self.model.4.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 32, 3, 3" offset="210824" size="18432" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv1.conv.weight">
+					<dim>16</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="50" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="51" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="229256" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="52" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="166_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="53" name="__module.model.10.conv.act/aten::silu_/Swish_8" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="166,input.33">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="54" name="self.model.4.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 16, 3, 3" offset="229320" size="18432" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="55" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="56" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="247752" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="57" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="175_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="58" name="__module.model.10.conv.act/aten::silu_/Swish_9" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="175,input.37">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="59" name="__module.model.4.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="177">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="60" name="__module.model.4/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="179,input.39">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="61" name="self.model.4.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 96, 1, 1" offset="247880" size="49152" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="62" name="__module.model.4.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="63" name="__module.model.4.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="297032" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="64" name="__module.model.4.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="187_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="65" name="__module.model.10.conv.act/aten::silu_/Swish_10" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="187,input.41">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="66" name="self.model.5.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 3, 3" offset="297544" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.5.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="67" name="__module.model.5.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="68" name="__module.model.5.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="887368" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="69" name="__module.model.5.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="201_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="70" name="__module.model.10.conv.act/aten::silu_/Swish_11" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="201,input.45">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="71" name="self.model.6.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 1, 1" offset="887880" size="65536" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="72" name="__module.model.6.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="73" name="__module.model.6.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="953416" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="74" name="__module.model.6.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="219_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="75" name="__module.model.10.conv.act/aten::silu_/Swish_12" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="219,input.49">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="76" name="207" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="207" />
+			</output>
+		</layer>
+		<layer id="77" name="Constant_703" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="953928" size="16" />
+			<output>
+				<port id="0" precision="I64" names="221">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="78" name="__module.model.6/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="223">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="4" precision="FP32" names="224,input.51">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="79" name="self.model.6.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 64, 1, 1" offset="953944" size="8192" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="80" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="81" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="962136" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="82" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="236_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="83" name="__module.model.10.conv.act/aten::silu_/Swish_13" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="236,input.53">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="84" name="self.model.6.m.0.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="962264" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.0.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="85" name="__module.model.6.m.0.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="86" name="__module.model.6.m.0.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="999128" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="87" name="__module.model.6.m.0.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="249_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="88" name="__module.model.10.conv.act/aten::silu_/Swish_14" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="249,input.57">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="89" name="self.model.6.m.0.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="999256" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="90" name="__module.model.6.m.0.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="91" name="__module.model.6.m.0.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1036120" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="92" name="__module.model.6.m.0.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="258_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="93" name="__module.model.10.conv.act/aten::silu_/Swish_15" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="258,input.61">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="94" name="__module.model.6.m.0.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="260,input.63">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="95" name="self.model.6.m.0.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="1036248" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.1.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="96" name="__module.model.6.m.0.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="97" name="__module.model.6.m.0.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1073112" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="98" name="__module.model.6.m.0.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="270_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="99" name="__module.model.10.conv.act/aten::silu_/Swish_16" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="270,input.65">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="100" name="self.model.6.m.0.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="1073240" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.m.1.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="101" name="__module.model.6.m.0.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="102" name="__module.model.6.m.0.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1110104" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="103" name="__module.model.6.m.0.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="279_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="104" name="__module.model.10.conv.act/aten::silu_/Swish_17" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="279,input.69">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="105" name="__module.model.6.m.0.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="281">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="106" name="self.model.6.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 64, 1, 1" offset="1110232" size="8192" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="107" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="108" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="1118424" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="109" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="289_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="110" name="__module.model.10.conv.act/aten::silu_/Swish_18" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="289,input.71">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="111" name="__module.model.6.m.0/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="292,input.73">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="112" name="self.model.6.m.0.cv3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="1118552" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv3.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="113" name="__module.model.6.m.0.cv3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="114" name="__module.model.6.m.0.cv3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="1134936" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="115" name="__module.model.6.m.0.cv3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="300_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="116" name="__module.model.10.conv.act/aten::silu_/Swish_19" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="300,input.75">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="117" name="__module.model.6/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="303,input.77">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="118" name="self.model.6.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 192, 1, 1" offset="1135192" size="98304" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="119" name="__module.model.6.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="120" name="__module.model.6.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="1233496" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="121" name="__module.model.6.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="311_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="122" name="__module.model.10.conv.act/aten::silu_/Swish_20" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="311,input.79">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="123" name="self.model.7.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 128, 3, 3" offset="1234008" size="1179648" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.7.conv.weight">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="124" name="__module.model.7.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="125" name="__module.model.7.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="2413656" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="126" name="__module.model.7.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="325_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="127" name="__module.model.10.conv.act/aten::silu_/Swish_21" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="325,input.83">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="128" name="self.model.8.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 256, 1, 1" offset="2414680" size="262144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv1.conv.weight">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="129" name="__module.model.8.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="130" name="__module.model.8.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="2676824" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="131" name="__module.model.8.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="343_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="132" name="__module.model.10.conv.act/aten::silu_/Swish_22" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="343,input.87">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="133" name="331" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="331" />
+			</output>
+		</layer>
+		<layer id="134" name="Constant_1204" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="2677848" size="16" />
+			<output>
+				<port id="0" precision="I64" names="345">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="135" name="__module.model.8/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="347">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="4" precision="FP32" names="348,input.89">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="136" name="self.model.8.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 128, 1, 1" offset="2677864" size="32768" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="137" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="138" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="2710632" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="139" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="360_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="140" name="__module.model.10.conv.act/aten::silu_/Swish_23" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="360,input.91">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="141" name="self.model.8.m.0.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="2710888" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.0.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="142" name="__module.model.8.m.0.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="143" name="__module.model.8.m.0.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="2858344" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="144" name="__module.model.8.m.0.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="373_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="145" name="__module.model.10.conv.act/aten::silu_/Swish_24" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="373,input.95">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="146" name="self.model.8.m.0.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="2858600" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.0.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="147" name="__module.model.8.m.0.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="148" name="__module.model.8.m.0.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3006056" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="149" name="__module.model.8.m.0.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="382_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="150" name="__module.model.10.conv.act/aten::silu_/Swish_25" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="382,input.99">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="151" name="__module.model.8.m.0.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="384,input.101">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="152" name="self.model.8.m.0.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="3006312" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.1.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="153" name="__module.model.8.m.0.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="154" name="__module.model.8.m.0.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3153768" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="155" name="__module.model.8.m.0.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="394_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="156" name="__module.model.10.conv.act/aten::silu_/Swish_26" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="394,input.103">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="157" name="self.model.8.m.0.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="3154024" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.m.1.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="158" name="__module.model.8.m.0.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="159" name="__module.model.8.m.0.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3301480" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="160" name="__module.model.8.m.0.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="403_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="161" name="__module.model.10.conv.act/aten::silu_/Swish_27" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="403,input.107">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="162" name="__module.model.8.m.0.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="405">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="163" name="self.model.8.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 128, 1, 1" offset="3301736" size="32768" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="164" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="165" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="3334504" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="166" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="413_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="167" name="__module.model.10.conv.act/aten::silu_/Swish_28" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="413,input.109">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="168" name="__module.model.8.m.0/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="416,input.111">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="169" name="self.model.8.m.0.cv3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 1, 1" offset="3334760" size="65536" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv3.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="170" name="__module.model.8.m.0.cv3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="171" name="__module.model.8.m.0.cv3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="3400296" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="172" name="__module.model.8.m.0.cv3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="424_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="173" name="__module.model.10.conv.act/aten::silu_/Swish_29" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="424,input.113">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="174" name="__module.model.8/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="427,input.115">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="175" name="self.model.8.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 384, 1, 1" offset="3400808" size="393216" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv2.conv.weight">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="176" name="__module.model.8.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="177" name="__module.model.8.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="3794024" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="178" name="__module.model.8.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="435_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="179" name="__module.model.10.conv.act/aten::silu_/Swish_30" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="435,input.117">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="180" name="self.model.9.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 256, 1, 1" offset="3795048" size="262144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.cv1.conv.weight">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="181" name="__module.model.9.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="182" name="__module.model.9.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4057192" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="183" name="__module.model.9.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="460_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="184" name="__module.model.10.conv.act/aten::silu_/Swish_31" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="460,input.121">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="185" name="449" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="24576" size="8" />
+			<output>
+				<port id="0" precision="I64" names="449" />
+			</output>
+		</layer>
+		<layer id="186" name="Constant_1662" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="2677848" size="16" />
+			<output>
+				<port id="0" precision="I64" names="462">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="187" name="__module.model.9/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="464,a">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="4" precision="FP32" names="465,x.3">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="188" name="self.model.9.m.0.attn.qkv.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 128, 1, 1" offset="4058216" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.attn.qkv.conv.weight">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="189" name="__module.model.9.m.0.attn.qkv.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="190" name="__module.model.9.m.0.attn.qkv.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4189288" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="191" name="__module.model.9.m.0.attn.qkv.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="487,qkv.1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="192" name="Constant_6493" type="Const" version="opset1">
+			<data element_type="i64" shape="4" offset="4190312" size="32" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>4</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="193" name="__module.model.9.m.0.attn/aten::view/Reshape" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="489">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>128</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="194" name="437" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="4190344" size="8" />
+			<output>
+				<port id="0" precision="I64" names="437" />
+			</output>
+		</layer>
+		<layer id="195" name="Constant_1884" type="Const" version="opset1">
+			<data element_type="i64" shape="3" offset="4190352" size="24" />
+			<output>
+				<port id="0" precision="I64" names="490">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="196" name="__module.model.9.m.0.attn/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>128</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="492,q">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="4" precision="FP32" names="493,k">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="5" precision="FP32" names="494,v">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="197" name="Constant_6395" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 1, 1, 1" offset="4190376" size="4" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="198" name="Multiply_6371" type="Multiply" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="199" name="__module.model.9.m.0.attn/aten::mul/Multiply_1" type="MatMul" version="opset1">
+			<data transpose_a="true" transpose_b="false" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>32</dim>
+					<dim>400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="497,attn.1">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="200" name="__module.model.9.m.0.attn/aten::softmax/Softmax" type="SoftMax" version="opset8">
+			<data axis="-1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="498,attn.3">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="201" name="__module.model.9.m.0.attn/aten::matmul/MatMul_1" type="MatMul" version="opset1">
+			<data transpose_a="false" transpose_b="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>400</dim>
+					<dim>400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="500">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="202" name="__module.model.9.m.0.attn/aten::size/ShapeOf" type="ShapeOf" version="opset3">
+			<data output_type="i64" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="203" name="__module.model.9.m.0.attn/aten::view/Reshape_1" type="Reshape" version="opset1">
+			<data special_zero="false" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="502">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="204" name="__module.model.9.m.0.attn/aten::reshape/Reshape" type="Reshape" version="opset1">
+			<data special_zero="false" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>64</dim>
+					<dim>400</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="504,input.123">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="205" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 1, 1, 3, 3" offset="4190380" size="4608" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="206" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/GroupConvolution" type="GroupConvolution" version="opset1">
+			<data strides="1, 1" pads_begin="1, 1" pads_end="1, 1" dilations="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="207" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/Reshape_1" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4194988" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="208" name="__module.model.9.m.0.attn.pe.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="512">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="209" name="__module.model.9.m.0.attn/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="513,input.125">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="210" name="self.model.9.m.0.attn.proj.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 1, 1" offset="4195500" size="65536" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.attn.proj.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="211" name="__module.model.9.m.0.attn.proj.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="212" name="__module.model.9.m.0.attn.proj.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4261036" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="213" name="__module.model.9.m.0.attn.proj.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="521">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="214" name="__module.model.9.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="522,input.127">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="215" name="self.model.9.m.0.ffn.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 128, 1, 1" offset="4261548" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.ffn.0.conv.weight">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="216" name="__module.model.9.m.0.ffn.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="217" name="__module.model.9.m.0.ffn.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4392620" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="218" name="__module.model.9.m.0.ffn.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="532_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="219" name="__module.model.10.conv.act/aten::silu_/Swish_32" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="532,input.129">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="220" name="self.model.9.m.0.ffn.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 256, 1, 1" offset="4393644" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.m.0.ffn.1.conv.weight">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="221" name="__module.model.9.m.0.ffn.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="222" name="__module.model.9.m.0.ffn.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4524716" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="223" name="__module.model.9.m.0.ffn.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="542">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="224" name="__module.model.9.m.0/aten::add/Add_1" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="543,b">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="225" name="__module.model.9/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="545,input.133">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="226" name="self.model.9.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 256, 1, 1" offset="4525228" size="262144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.cv2.conv.weight">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="227" name="__module.model.9.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="228" name="__module.model.9.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4787372" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="229" name="__module.model.9.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="553_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="230" name="__module.model.10.conv.act/aten::silu_/Swish_33" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="553,input.135">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="231" name="self.model.10.conv.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="1280, 256, 1, 1" offset="4788396" size="1310720" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.10.conv.conv.weight">
+					<dim>1280</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="232" name="__module.model.10.conv.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1280</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="233" name="__module.model.10.conv.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 1280, 1, 1" offset="6099116" size="5120" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="234" name="__module.model.10.conv.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="574_1">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="235" name="__module.model.10.conv.act/aten::silu_/Swish_34" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="574,input.139">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="236" name="Constant_6367" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="6104236" size="16" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="237" name="__module.model.10.pool/aten::adaptive_avg_pool2d/AdaptiveAvgPool" type="ReduceMean" version="opset1">
+			<data keep_dims="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="577">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="238" name="Concat_4406" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="6104252" size="16" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="239" name="__module.model.10/aten::flatten/Reshape" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="578,input.143">
+					<dim>1</dim>
+					<dim>1280</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="240" name="self.model.10.linear.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="2, 1280" offset="6104268" size="10240" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.10.linear.weight">
+					<dim>2</dim>
+					<dim>1280</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="241" name="__module.model.10.linear/aten::linear/MatMul" type="MatMul" version="opset1">
+			<data transpose_a="false" transpose_b="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1280</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>2</dim>
+					<dim>1280</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="242" name="Constant_6396" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 2" offset="6114508" size="8" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="243" name="__module.model.10.linear/aten::linear/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="582,x_1">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="244" name="__module.model.10/aten::softmax/Softmax" type="SoftMax" version="opset8">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="245" name="Result_2508" type="Result" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+				</port>
+			</input>
+		</layer>
+	</layers>
+	<edges>
+		<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
+		<edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
+		<edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
+		<edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
+		<edge from-layer="4" from-port="2" to-layer="5" to-port="0" />
+		<edge from-layer="5" from-port="1" to-layer="7" to-port="0" />
+		<edge from-layer="6" from-port="0" to-layer="7" to-port="1" />
+		<edge from-layer="7" from-port="2" to-layer="9" to-port="0" />
+		<edge from-layer="8" from-port="0" to-layer="9" to-port="1" />
+		<edge from-layer="9" from-port="2" to-layer="10" to-port="0" />
+		<edge from-layer="10" from-port="1" to-layer="12" to-port="0" />
+		<edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
+		<edge from-layer="12" from-port="2" to-layer="14" to-port="0" />
+		<edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
+		<edge from-layer="14" from-port="2" to-layer="15" to-port="0" />
+		<edge from-layer="15" from-port="1" to-layer="18" to-port="0" />
+		<edge from-layer="16" from-port="0" to-layer="18" to-port="1" />
+		<edge from-layer="17" from-port="0" to-layer="18" to-port="2" />
+		<edge from-layer="18" from-port="4" to-layer="20" to-port="0" />
+		<edge from-layer="18" from-port="4" to-layer="29" to-port="0" />
+		<edge from-layer="18" from-port="3" to-layer="30" to-port="0" />
+		<edge from-layer="18" from-port="4" to-layer="30" to-port="1" />
+		<edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
+		<edge from-layer="20" from-port="2" to-layer="22" to-port="0" />
+		<edge from-layer="21" from-port="0" to-layer="22" to-port="1" />
+		<edge from-layer="22" from-port="2" to-layer="23" to-port="0" />
+		<edge from-layer="23" from-port="1" to-layer="25" to-port="0" />
+		<edge from-layer="24" from-port="0" to-layer="25" to-port="1" />
+		<edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
+		<edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
+		<edge from-layer="27" from-port="2" to-layer="28" to-port="0" />
+		<edge from-layer="28" from-port="1" to-layer="29" to-port="1" />
+		<edge from-layer="29" from-port="2" to-layer="30" to-port="2" />
+		<edge from-layer="30" from-port="3" to-layer="32" to-port="0" />
+		<edge from-layer="31" from-port="0" to-layer="32" to-port="1" />
+		<edge from-layer="32" from-port="2" to-layer="34" to-port="0" />
+		<edge from-layer="33" from-port="0" to-layer="34" to-port="1" />
+		<edge from-layer="34" from-port="2" to-layer="35" to-port="0" />
+		<edge from-layer="35" from-port="1" to-layer="37" to-port="0" />
+		<edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
+		<edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
+		<edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
+		<edge from-layer="39" from-port="2" to-layer="40" to-port="0" />
+		<edge from-layer="40" from-port="1" to-layer="42" to-port="0" />
+		<edge from-layer="41" from-port="0" to-layer="42" to-port="1" />
+		<edge from-layer="42" from-port="2" to-layer="44" to-port="0" />
+		<edge from-layer="43" from-port="0" to-layer="44" to-port="1" />
+		<edge from-layer="44" from-port="2" to-layer="45" to-port="0" />
+		<edge from-layer="45" from-port="1" to-layer="48" to-port="0" />
+		<edge from-layer="46" from-port="0" to-layer="48" to-port="1" />
+		<edge from-layer="47" from-port="0" to-layer="48" to-port="2" />
+		<edge from-layer="48" from-port="4" to-layer="50" to-port="0" />
+		<edge from-layer="48" from-port="4" to-layer="59" to-port="0" />
+		<edge from-layer="48" from-port="3" to-layer="60" to-port="0" />
+		<edge from-layer="48" from-port="4" to-layer="60" to-port="1" />
+		<edge from-layer="49" from-port="0" to-layer="50" to-port="1" />
+		<edge from-layer="50" from-port="2" to-layer="52" to-port="0" />
+		<edge from-layer="51" from-port="0" to-layer="52" to-port="1" />
+		<edge from-layer="52" from-port="2" to-layer="53" to-port="0" />
+		<edge from-layer="53" from-port="1" to-layer="55" to-port="0" />
+		<edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
+		<edge from-layer="55" from-port="2" to-layer="57" to-port="0" />
+		<edge from-layer="56" from-port="0" to-layer="57" to-port="1" />
+		<edge from-layer="57" from-port="2" to-layer="58" to-port="0" />
+		<edge from-layer="58" from-port="1" to-layer="59" to-port="1" />
+		<edge from-layer="59" from-port="2" to-layer="60" to-port="2" />
+		<edge from-layer="60" from-port="3" to-layer="62" to-port="0" />
+		<edge from-layer="61" from-port="0" to-layer="62" to-port="1" />
+		<edge from-layer="62" from-port="2" to-layer="64" to-port="0" />
+		<edge from-layer="63" from-port="0" to-layer="64" to-port="1" />
+		<edge from-layer="64" from-port="2" to-layer="65" to-port="0" />
+		<edge from-layer="65" from-port="1" to-layer="67" to-port="0" />
+		<edge from-layer="66" from-port="0" to-layer="67" to-port="1" />
+		<edge from-layer="67" from-port="2" to-layer="69" to-port="0" />
+		<edge from-layer="68" from-port="0" to-layer="69" to-port="1" />
+		<edge from-layer="69" from-port="2" to-layer="70" to-port="0" />
+		<edge from-layer="70" from-port="1" to-layer="72" to-port="0" />
+		<edge from-layer="71" from-port="0" to-layer="72" to-port="1" />
+		<edge from-layer="72" from-port="2" to-layer="74" to-port="0" />
+		<edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
+		<edge from-layer="74" from-port="2" to-layer="75" to-port="0" />
+		<edge from-layer="75" from-port="1" to-layer="78" to-port="0" />
+		<edge from-layer="76" from-port="0" to-layer="78" to-port="1" />
+		<edge from-layer="77" from-port="0" to-layer="78" to-port="2" />
+		<edge from-layer="78" from-port="4" to-layer="80" to-port="0" />
+		<edge from-layer="78" from-port="3" to-layer="117" to-port="0" />
+		<edge from-layer="78" from-port="4" to-layer="117" to-port="1" />
+		<edge from-layer="78" from-port="4" to-layer="107" to-port="0" />
+		<edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
+		<edge from-layer="80" from-port="2" to-layer="82" to-port="0" />
+		<edge from-layer="81" from-port="0" to-layer="82" to-port="1" />
+		<edge from-layer="82" from-port="2" to-layer="83" to-port="0" />
+		<edge from-layer="83" from-port="1" to-layer="85" to-port="0" />
+		<edge from-layer="83" from-port="1" to-layer="94" to-port="0" />
+		<edge from-layer="84" from-port="0" to-layer="85" to-port="1" />
+		<edge from-layer="85" from-port="2" to-layer="87" to-port="0" />
+		<edge from-layer="86" from-port="0" to-layer="87" to-port="1" />
+		<edge from-layer="87" from-port="2" to-layer="88" to-port="0" />
+		<edge from-layer="88" from-port="1" to-layer="90" to-port="0" />
+		<edge from-layer="89" from-port="0" to-layer="90" to-port="1" />
+		<edge from-layer="90" from-port="2" to-layer="92" to-port="0" />
+		<edge from-layer="91" from-port="0" to-layer="92" to-port="1" />
+		<edge from-layer="92" from-port="2" to-layer="93" to-port="0" />
+		<edge from-layer="93" from-port="1" to-layer="94" to-port="1" />
+		<edge from-layer="94" from-port="2" to-layer="105" to-port="0" />
+		<edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
+		<edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
+		<edge from-layer="96" from-port="2" to-layer="98" to-port="0" />
+		<edge from-layer="97" from-port="0" to-layer="98" to-port="1" />
+		<edge from-layer="98" from-port="2" to-layer="99" to-port="0" />
+		<edge from-layer="99" from-port="1" to-layer="101" to-port="0" />
+		<edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
+		<edge from-layer="101" from-port="2" to-layer="103" to-port="0" />
+		<edge from-layer="102" from-port="0" to-layer="103" to-port="1" />
+		<edge from-layer="103" from-port="2" to-layer="104" to-port="0" />
+		<edge from-layer="104" from-port="1" to-layer="105" to-port="1" />
+		<edge from-layer="105" from-port="2" to-layer="111" to-port="0" />
+		<edge from-layer="106" from-port="0" to-layer="107" to-port="1" />
+		<edge from-layer="107" from-port="2" to-layer="109" to-port="0" />
+		<edge from-layer="108" from-port="0" to-layer="109" to-port="1" />
+		<edge from-layer="109" from-port="2" to-layer="110" to-port="0" />
+		<edge from-layer="110" from-port="1" to-layer="111" to-port="1" />
+		<edge from-layer="111" from-port="2" to-layer="113" to-port="0" />
+		<edge from-layer="112" from-port="0" to-layer="113" to-port="1" />
+		<edge from-layer="113" from-port="2" to-layer="115" to-port="0" />
+		<edge from-layer="114" from-port="0" to-layer="115" to-port="1" />
+		<edge from-layer="115" from-port="2" to-layer="116" to-port="0" />
+		<edge from-layer="116" from-port="1" to-layer="117" to-port="2" />
+		<edge from-layer="117" from-port="3" to-layer="119" to-port="0" />
+		<edge from-layer="118" from-port="0" to-layer="119" to-port="1" />
+		<edge from-layer="119" from-port="2" to-layer="121" to-port="0" />
+		<edge from-layer="120" from-port="0" to-layer="121" to-port="1" />
+		<edge from-layer="121" from-port="2" to-layer="122" to-port="0" />
+		<edge from-layer="122" from-port="1" to-layer="124" to-port="0" />
+		<edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
+		<edge from-layer="124" from-port="2" to-layer="126" to-port="0" />
+		<edge from-layer="125" from-port="0" to-layer="126" to-port="1" />
+		<edge from-layer="126" from-port="2" to-layer="127" to-port="0" />
+		<edge from-layer="127" from-port="1" to-layer="129" to-port="0" />
+		<edge from-layer="128" from-port="0" to-layer="129" to-port="1" />
+		<edge from-layer="129" from-port="2" to-layer="131" to-port="0" />
+		<edge from-layer="130" from-port="0" to-layer="131" to-port="1" />
+		<edge from-layer="131" from-port="2" to-layer="132" to-port="0" />
+		<edge from-layer="132" from-port="1" to-layer="135" to-port="0" />
+		<edge from-layer="133" from-port="0" to-layer="135" to-port="1" />
+		<edge from-layer="134" from-port="0" to-layer="135" to-port="2" />
+		<edge from-layer="135" from-port="4" to-layer="137" to-port="0" />
+		<edge from-layer="135" from-port="4" to-layer="174" to-port="1" />
+		<edge from-layer="135" from-port="4" to-layer="164" to-port="0" />
+		<edge from-layer="135" from-port="3" to-layer="174" to-port="0" />
+		<edge from-layer="136" from-port="0" to-layer="137" to-port="1" />
+		<edge from-layer="137" from-port="2" to-layer="139" to-port="0" />
+		<edge from-layer="138" from-port="0" to-layer="139" to-port="1" />
+		<edge from-layer="139" from-port="2" to-layer="140" to-port="0" />
+		<edge from-layer="140" from-port="1" to-layer="142" to-port="0" />
+		<edge from-layer="140" from-port="1" to-layer="151" to-port="0" />
+		<edge from-layer="141" from-port="0" to-layer="142" to-port="1" />
+		<edge from-layer="142" from-port="2" to-layer="144" to-port="0" />
+		<edge from-layer="143" from-port="0" to-layer="144" to-port="1" />
+		<edge from-layer="144" from-port="2" to-layer="145" to-port="0" />
+		<edge from-layer="145" from-port="1" to-layer="147" to-port="0" />
+		<edge from-layer="146" from-port="0" to-layer="147" to-port="1" />
+		<edge from-layer="147" from-port="2" to-layer="149" to-port="0" />
+		<edge from-layer="148" from-port="0" to-layer="149" to-port="1" />
+		<edge from-layer="149" from-port="2" to-layer="150" to-port="0" />
+		<edge from-layer="150" from-port="1" to-layer="151" to-port="1" />
+		<edge from-layer="151" from-port="2" to-layer="153" to-port="0" />
+		<edge from-layer="151" from-port="2" to-layer="162" to-port="0" />
+		<edge from-layer="152" from-port="0" to-layer="153" to-port="1" />
+		<edge from-layer="153" from-port="2" to-layer="155" to-port="0" />
+		<edge from-layer="154" from-port="0" to-layer="155" to-port="1" />
+		<edge from-layer="155" from-port="2" to-layer="156" to-port="0" />
+		<edge from-layer="156" from-port="1" to-layer="158" to-port="0" />
+		<edge from-layer="157" from-port="0" to-layer="158" to-port="1" />
+		<edge from-layer="158" from-port="2" to-layer="160" to-port="0" />
+		<edge from-layer="159" from-port="0" to-layer="160" to-port="1" />
+		<edge from-layer="160" from-port="2" to-layer="161" to-port="0" />
+		<edge from-layer="161" from-port="1" to-layer="162" to-port="1" />
+		<edge from-layer="162" from-port="2" to-layer="168" to-port="0" />
+		<edge from-layer="163" from-port="0" to-layer="164" to-port="1" />
+		<edge from-layer="164" from-port="2" to-layer="166" to-port="0" />
+		<edge from-layer="165" from-port="0" to-layer="166" to-port="1" />
+		<edge from-layer="166" from-port="2" to-layer="167" to-port="0" />
+		<edge from-layer="167" from-port="1" to-layer="168" to-port="1" />
+		<edge from-layer="168" from-port="2" to-layer="170" to-port="0" />
+		<edge from-layer="169" from-port="0" to-layer="170" to-port="1" />
+		<edge from-layer="170" from-port="2" to-layer="172" to-port="0" />
+		<edge from-layer="171" from-port="0" to-layer="172" to-port="1" />
+		<edge from-layer="172" from-port="2" to-layer="173" to-port="0" />
+		<edge from-layer="173" from-port="1" to-layer="174" to-port="2" />
+		<edge from-layer="174" from-port="3" to-layer="176" to-port="0" />
+		<edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
+		<edge from-layer="176" from-port="2" to-layer="178" to-port="0" />
+		<edge from-layer="177" from-port="0" to-layer="178" to-port="1" />
+		<edge from-layer="178" from-port="2" to-layer="179" to-port="0" />
+		<edge from-layer="179" from-port="1" to-layer="181" to-port="0" />
+		<edge from-layer="180" from-port="0" to-layer="181" to-port="1" />
+		<edge from-layer="181" from-port="2" to-layer="183" to-port="0" />
+		<edge from-layer="182" from-port="0" to-layer="183" to-port="1" />
+		<edge from-layer="183" from-port="2" to-layer="184" to-port="0" />
+		<edge from-layer="184" from-port="1" to-layer="187" to-port="0" />
+		<edge from-layer="185" from-port="0" to-layer="187" to-port="1" />
+		<edge from-layer="186" from-port="0" to-layer="187" to-port="2" />
+		<edge from-layer="187" from-port="4" to-layer="189" to-port="0" />
+		<edge from-layer="187" from-port="4" to-layer="214" to-port="0" />
+		<edge from-layer="187" from-port="4" to-layer="202" to-port="0" />
+		<edge from-layer="187" from-port="3" to-layer="225" to-port="0" />
+		<edge from-layer="188" from-port="0" to-layer="189" to-port="1" />
+		<edge from-layer="189" from-port="2" to-layer="191" to-port="0" />
+		<edge from-layer="190" from-port="0" to-layer="191" to-port="1" />
+		<edge from-layer="191" from-port="2" to-layer="193" to-port="0" />
+		<edge from-layer="192" from-port="0" to-layer="193" to-port="1" />
+		<edge from-layer="193" from-port="2" to-layer="196" to-port="0" />
+		<edge from-layer="194" from-port="0" to-layer="196" to-port="1" />
+		<edge from-layer="195" from-port="0" to-layer="196" to-port="2" />
+		<edge from-layer="196" from-port="4" to-layer="198" to-port="0" />
+		<edge from-layer="196" from-port="3" to-layer="199" to-port="0" />
+		<edge from-layer="196" from-port="5" to-layer="201" to-port="0" />
+		<edge from-layer="196" from-port="5" to-layer="204" to-port="0" />
+		<edge from-layer="197" from-port="0" to-layer="198" to-port="1" />
+		<edge from-layer="198" from-port="2" to-layer="199" to-port="1" />
+		<edge from-layer="199" from-port="2" to-layer="200" to-port="0" />
+		<edge from-layer="200" from-port="1" to-layer="201" to-port="1" />
+		<edge from-layer="201" from-port="2" to-layer="203" to-port="0" />
+		<edge from-layer="202" from-port="1" to-layer="204" to-port="1" />
+		<edge from-layer="202" from-port="1" to-layer="203" to-port="1" />
+		<edge from-layer="203" from-port="2" to-layer="209" to-port="0" />
+		<edge from-layer="204" from-port="2" to-layer="206" to-port="0" />
+		<edge from-layer="205" from-port="0" to-layer="206" to-port="1" />
+		<edge from-layer="206" from-port="2" to-layer="208" to-port="0" />
+		<edge from-layer="207" from-port="0" to-layer="208" to-port="1" />
+		<edge from-layer="208" from-port="2" to-layer="209" to-port="1" />
+		<edge from-layer="209" from-port="2" to-layer="211" to-port="0" />
+		<edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
+		<edge from-layer="211" from-port="2" to-layer="213" to-port="0" />
+		<edge from-layer="212" from-port="0" to-layer="213" to-port="1" />
+		<edge from-layer="213" from-port="2" to-layer="214" to-port="1" />
+		<edge from-layer="214" from-port="2" to-layer="224" to-port="0" />
+		<edge from-layer="214" from-port="2" to-layer="216" to-port="0" />
+		<edge from-layer="215" from-port="0" to-layer="216" to-port="1" />
+		<edge from-layer="216" from-port="2" to-layer="218" to-port="0" />
+		<edge from-layer="217" from-port="0" to-layer="218" to-port="1" />
+		<edge from-layer="218" from-port="2" to-layer="219" to-port="0" />
+		<edge from-layer="219" from-port="1" to-layer="221" to-port="0" />
+		<edge from-layer="220" from-port="0" to-layer="221" to-port="1" />
+		<edge from-layer="221" from-port="2" to-layer="223" to-port="0" />
+		<edge from-layer="222" from-port="0" to-layer="223" to-port="1" />
+		<edge from-layer="223" from-port="2" to-layer="224" to-port="1" />
+		<edge from-layer="224" from-port="2" to-layer="225" to-port="1" />
+		<edge from-layer="225" from-port="2" to-layer="227" to-port="0" />
+		<edge from-layer="226" from-port="0" to-layer="227" to-port="1" />
+		<edge from-layer="227" from-port="2" to-layer="229" to-port="0" />
+		<edge from-layer="228" from-port="0" to-layer="229" to-port="1" />
+		<edge from-layer="229" from-port="2" to-layer="230" to-port="0" />
+		<edge from-layer="230" from-port="1" to-layer="232" to-port="0" />
+		<edge from-layer="231" from-port="0" to-layer="232" to-port="1" />
+		<edge from-layer="232" from-port="2" to-layer="234" to-port="0" />
+		<edge from-layer="233" from-port="0" to-layer="234" to-port="1" />
+		<edge from-layer="234" from-port="2" to-layer="235" to-port="0" />
+		<edge from-layer="235" from-port="1" to-layer="237" to-port="0" />
+		<edge from-layer="236" from-port="0" to-layer="237" to-port="1" />
+		<edge from-layer="237" from-port="2" to-layer="239" to-port="0" />
+		<edge from-layer="238" from-port="0" to-layer="239" to-port="1" />
+		<edge from-layer="239" from-port="2" to-layer="241" to-port="0" />
+		<edge from-layer="240" from-port="0" to-layer="241" to-port="1" />
+		<edge from-layer="241" from-port="2" to-layer="243" to-port="0" />
+		<edge from-layer="242" from-port="0" to-layer="243" to-port="1" />
+		<edge from-layer="243" from-port="2" to-layer="244" to-port="0" />
+		<edge from-layer="244" from-port="1" to-layer="245" to-port="0" />
+	</edges>
+	<rt_info>
+		<Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
+		<conversion_parameters>
+			<framework value="pytorch" />
+			<is_python_object value="True" />
+		</conversion_parameters>
+		<model_info>
+			<iou_threshold value="0.7" />
+			<labels value="meishangliao shangliao" />
+			<model_type value="YOLO" />
+			<pad_value value="114" />
+			<reverse_input_channels value="YES" />
+			<scale_values value="255" />
+		</model_info>
+	</rt_info>
+</net>
diff --git a/openvino/model/load_id/metadata.yaml b/openvino/model/load_id/metadata.yaml
new file mode 100644
index 0000000..722ce8c
--- /dev/null
+++ b/openvino/model/load_id/metadata.yaml
@@ -0,0 +1,15 @@
+description: Ultralytics YOLO11n-cls model trained on D:\椤圭洰璧勬枡\鏅鸿兘骞茬嚗璁惧\涓婃枡妫�娴嬫ā鍨媆tongjitang2
+author: Ultralytics
+date: '2025-04-21T17:26:08.188697'
+version: 8.3.53
+license: AGPL-3.0 License (https://ultralytics.com/license)
+docs: https://docs.ultralytics.com
+stride: 1
+task: classify
+batch: 1
+imgsz:
+- 640
+- 640
+names:
+  0: meishangliao
+  1: shangliao
diff --git a/openvino/model/safe_det/best.bin b/openvino/model/safe_det/best.bin
new file mode 100644
index 0000000..bc2845a
--- /dev/null
+++ b/openvino/model/safe_det/best.bin
Binary files differ
diff --git a/openvino/model/safe_det/best.xml b/openvino/model/safe_det/best.xml
new file mode 100644
index 0000000..495cd61
--- /dev/null
+++ b/openvino/model/safe_det/best.xml
@@ -0,0 +1,7975 @@
+<?xml version="1.0"?>
+<net name="Model0" version="11">
+	<layers>
+		<layer id="0" name="x" type="Parameter" version="opset1">
+			<data shape="1,3,640,640" element_type="f32" />
+			<output>
+				<port id="0" precision="FP32" names="x">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="1" name="__module.model.22/aten::unsqueeze/Unsqueeze" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 2, 8400" offset="0" size="67200" />
+			<output>
+				<port id="0" precision="FP32" names="943,anchor_points">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="2" name="self.model.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 3, 3, 3" offset="67200" size="1728" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.0.conv.weight">
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="3" name="__module.model.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>3</dim>
+					<dim>640</dim>
+					<dim>640</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="4" name="__module.model.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="68928" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="5" name="__module.model.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="89_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="6" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="89,input.1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="7" name="self.model.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 16, 3, 3" offset="68992" size="18432" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.1.conv.weight">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="8" name="__module.model.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>320</dim>
+					<dim>320</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="9" name="__module.model.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="87424" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="10" name="__module.model.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="103_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="11" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_1" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="103,input.5">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="12" name="self.model.2.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 1, 1" offset="87552" size="4096" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="13" name="__module.model.2.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="14" name="__module.model.2.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="91648" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="15" name="__module.model.2.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="121_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="16" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_2" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="121,input.9">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="17" name="109" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="109" />
+			</output>
+		</layer>
+		<layer id="18" name="Constant_213" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="91784" size="16" />
+			<output>
+				<port id="0" precision="I64" names="123">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="19" name="__module.model.2/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="125">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="4" precision="FP32" names="126,input.11">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="20" name="self.model.2.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 16, 3, 3" offset="91800" size="9216" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv1.conv.weight">
+					<dim>16</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="21" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="22" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="101016" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="23" name="__module.model.2.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="136_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="24" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_3" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="136,input.13">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="25" name="self.model.2.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="16, 16, 3, 3" offset="101080" size="9216" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.m.0.cv2.conv.weight">
+					<dim>16</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="26" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>16</dim>
+					<dim>16</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="27" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="110296" size="64" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="28" name="__module.model.2.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="145_1">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="29" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_4" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="145,input.17">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="30" name="__module.model.2.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="147">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="31" name="__module.model.2/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="149,input.19">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="32" name="self.model.2.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 48, 1, 1" offset="110360" size="6144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.2.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="33" name="__module.model.2.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>48</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>48</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="34" name="__module.model.2.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="116504" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="35" name="__module.model.2.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="157_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="36" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_5" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="157,input.21">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="37" name="self.model.3.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 32, 3, 3" offset="116632" size="73728" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.3.conv.weight">
+					<dim>64</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="38" name="__module.model.3.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>160</dim>
+					<dim>160</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="39" name="__module.model.3.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="190360" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="40" name="__module.model.3.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="171_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="41" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_6" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="171,input.25">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="42" name="self.model.4.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="190616" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="43" name="__module.model.4.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="44" name="__module.model.4.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="207000" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="45" name="__module.model.4.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="191_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="46" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_7" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="191,input.29">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="47" name="177" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="177" />
+			</output>
+		</layer>
+		<layer id="48" name="Constant_473" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="207256" size="16" />
+			<output>
+				<port id="0" precision="I64" names="193">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="49" name="__module.model.4/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="195">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="4" precision="FP32" names="196,input.31">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="50" name="self.model.4.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="207272" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="51" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="52" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="244136" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="53" name="__module.model.4.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="206_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="54" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_8" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="206,input.33">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="55" name="self.model.4.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="244264" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="56" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="57" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="281128" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="58" name="__module.model.4.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="215_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="59" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_9" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="215,input.37">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="60" name="__module.model.4.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="217,input.39">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="61" name="self.model.4.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="281256" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.1.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="62" name="__module.model.4.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="63" name="__module.model.4.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="318120" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="64" name="__module.model.4.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="227_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="65" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_10" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="227,input.41">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="66" name="self.model.4.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="318248" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.m.1.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="67" name="__module.model.4.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="68" name="__module.model.4.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="355112" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="69" name="__module.model.4.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="236_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="70" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_11" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="236,input.45">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="71" name="__module.model.4.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="238">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="72" name="__module.model.4/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="3" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="4" precision="FP32" names="240,input.47">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="73" name="self.model.4.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 128, 1, 1" offset="355240" size="32768" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.4.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="74" name="__module.model.4.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="75" name="__module.model.4.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="388008" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="76" name="__module.model.4.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="248_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="77" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_12" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="248,input.49">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="78" name="self.model.5.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 64, 3, 3" offset="388264" size="294912" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.5.conv.weight">
+					<dim>128</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="79" name="__module.model.5.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="80" name="__module.model.5.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="683176" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="81" name="__module.model.5.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="262_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="82" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_13" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="262,input.53">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="83" name="self.model.6.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 1, 1" offset="683688" size="65536" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="84" name="__module.model.6.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="85" name="__module.model.6.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="749224" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="86" name="__module.model.6.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="282_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="87" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_14" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="282,input.57">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="88" name="268" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="268" />
+			</output>
+		</layer>
+		<layer id="89" name="Constant_831" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="749736" size="16" />
+			<output>
+				<port id="0" precision="I64" names="284">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="90" name="__module.model.6/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="286">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="4" precision="FP32" names="287,input.59">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="91" name="self.model.6.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="749752" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="92" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="93" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="897208" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="94" name="__module.model.6.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="297_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="95" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_15" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="297,input.61">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="96" name="self.model.6.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="897464" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.0.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="97" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="98" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="1044920" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="99" name="__module.model.6.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="306_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="100" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_16" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="306,input.65">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="101" name="__module.model.6.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="308,input.67">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="102" name="self.model.6.m.1.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="1045176" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.1.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="103" name="__module.model.6.m.1.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="104" name="__module.model.6.m.1.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="1192632" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="105" name="__module.model.6.m.1.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="318_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="106" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_17" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="318,input.69">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="107" name="self.model.6.m.1.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="1192888" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.m.1.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="108" name="__module.model.6.m.1.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="109" name="__module.model.6.m.1.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="1340344" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="110" name="__module.model.6.m.1.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="327_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="111" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_18" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="327,input.73">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="112" name="__module.model.6.m.1/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="329">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="113" name="__module.model.6/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="3" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="4" precision="FP32" names="331,input.75">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="114" name="self.model.6.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 256, 1, 1" offset="1340600" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.6.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="115" name="__module.model.6.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="116" name="__module.model.6.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="1471672" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="117" name="__module.model.6.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="339_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="118" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_19" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="339,input.77">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="119" name="self.model.7.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 128, 3, 3" offset="1472184" size="1179648" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.7.conv.weight">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="120" name="__module.model.7.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="121" name="__module.model.7.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="2651832" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="122" name="__module.model.7.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="353_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="123" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_20" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="353,input.81">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="124" name="self.model.8.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 256, 1, 1" offset="2652856" size="262144" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv1.conv.weight">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="125" name="__module.model.8.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="126" name="__module.model.8.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="2915000" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="127" name="__module.model.8.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="371_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="128" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_21" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="371,input.85">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="129" name="359" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="359" />
+			</output>
+		</layer>
+		<layer id="130" name="Constant_1187" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="2916024" size="16" />
+			<output>
+				<port id="0" precision="I64" names="373">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="131" name="__module.model.8/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="375">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="4" precision="FP32" names="376,input.87">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="132" name="self.model.8.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 3, 3" offset="2916040" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="133" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="134" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="3505864" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="135" name="__module.model.8.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="386_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="136" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_22" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="386,input.89">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="137" name="self.model.8.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 3, 3" offset="3506376" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.m.0.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="138" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="139" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4096200" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="140" name="__module.model.8.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="395_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="141" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_23" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="395,input.93">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="142" name="__module.model.8.m.0/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="397">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="143" name="__module.model.8/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="399,input.95">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="144" name="self.model.8.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 384, 1, 1" offset="4096712" size="393216" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.8.cv2.conv.weight">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="145" name="__module.model.8.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="146" name="__module.model.8.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="4489928" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="147" name="__module.model.8.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="407_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="148" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_24" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="407,input.97">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="149" name="self.model.9.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 256, 1, 1" offset="4490952" size="131072" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="150" name="__module.model.9.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="151" name="__module.model.9.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="4622024" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="152" name="__module.model.9.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="425_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="153" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_25" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="425,input.101">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="154" name="__module.model.9.m/aten::max_pool2d/MaxPool" type="MaxPool" version="opset14">
+			<data strides="1, 1" dilations="1, 1" pads_begin="2, 2" pads_end="2, 2" kernel="5, 5" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="2" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="431,input.105">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="I64">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="155" name="__module.model.9.m/aten::max_pool2d/MaxPool_1" type="MaxPool" version="opset14">
+			<data strides="1, 1" dilations="1, 1" pads_begin="2, 2" pads_end="2, 2" kernel="5, 5" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="2" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="436,input.107">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="I64">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="156" name="__module.model.9.m/aten::max_pool2d/MaxPool_2" type="MaxPool" version="opset14">
+			<data strides="1, 1" dilations="1, 1" pads_begin="2, 2" pads_end="2, 2" kernel="5, 5" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="2" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="441">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="I64">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="157" name="__module.model.9/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="3" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="4" precision="FP32" names="443,input.109">
+					<dim>1</dim>
+					<dim>512</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="158" name="self.model.9.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 512, 1, 1" offset="4622536" size="524288" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.9.cv2.conv.weight">
+					<dim>256</dim>
+					<dim>512</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="159" name="__module.model.9.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>512</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>512</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="160" name="__module.model.9.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="5146824" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="161" name="__module.model.9.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="451_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="162" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_26" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="451,input.111">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="163" name="__module.model.10/aten::upsample_nearest2d/Multiply" type="Const" version="opset1">
+			<data element_type="f32" shape="2" offset="5147848" size="8" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="164" name="Constant_1519" type="Const" version="opset1">
+			<data element_type="i32" shape="2" offset="5147856" size="8" />
+			<output>
+				<port id="0" precision="I32">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="165" name="__module.model.10/aten::upsample_nearest2d/Interpolate" type="Interpolate" version="opset11">
+			<data mode="nearest" shape_calculation_mode="scales" coordinate_transformation_mode="asymmetric" nearest_mode="floor" antialias="false" pads_begin="0, 0, 0, 0" pads_end="0, 0, 0, 0" cube_coeff="-0.75" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>2</dim>
+				</port>
+				<port id="2" precision="I32">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="456">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="166" name="__module.model.11/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="459,input.115">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="167" name="self.model.12.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 384, 1, 1" offset="5147864" size="196608" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.12.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="168" name="__module.model.12.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="169" name="__module.model.12.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="5344472" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="170" name="__module.model.12.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="476_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="171" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_27" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="476,input.117">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="172" name="464" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="464" />
+			</output>
+		</layer>
+		<layer id="173" name="Constant_1593" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="749736" size="16" />
+			<output>
+				<port id="0" precision="I64" names="478">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="174" name="__module.model.12/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="480">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="4" precision="FP32" names="481,input.119">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="175" name="self.model.12.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="5344984" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.12.m.0.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="176" name="__module.model.12.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="177" name="__module.model.12.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="5492440" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="178" name="__module.model.12.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="491_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="179" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_28" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="491,input.121">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="180" name="self.model.12.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="5492696" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.12.m.0.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="181" name="__module.model.12.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="182" name="__module.model.12.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="5640152" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="183" name="__module.model.12.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="500_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="184" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_29" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="500,input.125">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="185" name="__module.model.12/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="503,input.127">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="186" name="self.model.12.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 192, 1, 1" offset="5640408" size="98304" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.12.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="187" name="__module.model.12.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="188" name="__module.model.12.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="5738712" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="189" name="__module.model.12.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="511_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="190" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_30" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="511,input.129">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="191" name="__module.model.13/aten::upsample_nearest2d/Multiply" type="Const" version="opset1">
+			<data element_type="f32" shape="2" offset="5147848" size="8" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="192" name="Constant_1744" type="Const" version="opset1">
+			<data element_type="i32" shape="2" offset="5147856" size="8" />
+			<output>
+				<port id="0" precision="I32">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="193" name="__module.model.13/aten::upsample_nearest2d/Interpolate" type="Interpolate" version="opset11">
+			<data mode="nearest" shape_calculation_mode="scales" coordinate_transformation_mode="asymmetric" nearest_mode="floor" antialias="false" pads_begin="0, 0, 0, 0" pads_end="0, 0, 0, 0" cube_coeff="-0.75" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>2</dim>
+				</port>
+				<port id="2" precision="I32">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="516">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="194" name="__module.model.14/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="519,input.133">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="195" name="self.model.15.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 192, 1, 1" offset="5739224" size="49152" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.15.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="196" name="__module.model.15.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="197" name="__module.model.15.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="5788376" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="198" name="__module.model.15.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="536_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="199" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_31" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="536,input.135">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="200" name="524" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="524" />
+			</output>
+		</layer>
+		<layer id="201" name="Constant_1818" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="207256" size="16" />
+			<output>
+				<port id="0" precision="I64" names="538">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="202" name="__module.model.15/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="540">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="4" precision="FP32" names="541,input.137">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="203" name="self.model.15.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="5788632" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.15.m.0.cv1.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="204" name="__module.model.15.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="205" name="__module.model.15.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="5825496" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="206" name="__module.model.15.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="551_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="207" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_32" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="551,input.139">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="208" name="self.model.15.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="32, 32, 3, 3" offset="5825624" size="36864" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.15.m.0.cv2.conv.weight">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="209" name="__module.model.15.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>32</dim>
+					<dim>32</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="210" name="__module.model.15.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 32, 1, 1" offset="5862488" size="128" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="211" name="__module.model.15.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="560_1">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="212" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_33" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="560,input.143">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="213" name="__module.model.15/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>32</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="563,input.145">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="214" name="self.model.15.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 96, 1, 1" offset="5862616" size="24576" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.15.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="215" name="__module.model.15.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>96</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>96</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="216" name="__module.model.15.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="5887192" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="217" name="__module.model.15.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="571_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="218" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_34" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="571,input.147">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="219" name="self.model.22.cv2.0.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="5887448" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.0.0.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="220" name="__module.model.22.cv2.0.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="221" name="__module.model.22.cv2.0.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="6034904" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="222" name="__module.model.22.cv2.0.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="751_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="223" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_35" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="751,input.189">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="224" name="self.model.22.cv2.0.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="6035160" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.0.1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="225" name="__module.model.22.cv2.0.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="226" name="__module.model.22.cv2.0.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="6182616" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="227" name="__module.model.22.cv2.0.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="760_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="228" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_36" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="760,input.193">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="229" name="self.model.22.cv2.0.2.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="6182872" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.0.2.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="230" name="__module.model.22.cv2.0.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="231" name="__module.model.22.cv2.0.2/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="6199256" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="232" name="__module.model.22.cv2.0.2/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="768">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="233" name="self.model.22.cv3.0.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 64, 3, 3" offset="6199512" size="195840" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.0.0.conv.weight">
+					<dim>85</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="234" name="__module.model.22.cv3.0.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="235" name="__module.model.22.cv3.0.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="6395352" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="236" name="__module.model.22.cv3.0.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="779_1">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="237" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_37" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="779,input.197">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="238" name="self.model.22.cv3.0.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 85, 3, 3" offset="6395692" size="260100" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.0.1.conv.weight">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="239" name="__module.model.22.cv3.0.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="240" name="__module.model.22.cv3.0.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="6655792" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="241" name="__module.model.22.cv3.0.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="788_1">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="242" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_38" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="788,input.201">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="243" name="self.model.22.cv3.0.2.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 85, 1, 1" offset="6656132" size="28900" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.0.2.weight">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="244" name="__module.model.22.cv3.0.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="245" name="__module.model.22.cv3.0.2/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="6685032" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="246" name="__module.model.22.cv3.0.2/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="796">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="247" name="__module.model.22/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="798,xi.1">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="248" name="__module.model.22/prim::ListConstruct" type="Const" version="opset1">
+			<data element_type="i64" shape="3" offset="6685372" size="24" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="249" name="__module.model.22/aten::view/Reshape" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="917">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>6400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="250" name="self.model.16.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="6685396" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.16.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="251" name="__module.model.16.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="252" name="__module.model.16.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="6832852" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="253" name="__module.model.16.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="585_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="254" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_39" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="585,input.151">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="255" name="__module.model.17/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="589,input.153">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="256" name="self.model.18.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 192, 1, 1" offset="6833108" size="98304" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.18.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="257" name="__module.model.18.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="258" name="__module.model.18.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="6931412" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="259" name="__module.model.18.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="606_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="260" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_40" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="606,input.155">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="261" name="594" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="594" />
+			</output>
+		</layer>
+		<layer id="262" name="Constant_2075" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="749736" size="16" />
+			<output>
+				<port id="0" precision="I64" names="608">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="263" name="__module.model.18/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="610">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="4" precision="FP32" names="611,input.157">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="264" name="self.model.18.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="6931924" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.18.m.0.cv1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="265" name="__module.model.18.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="266" name="__module.model.18.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="7079380" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="267" name="__module.model.18.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="621_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="268" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_41" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="621,input.159">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="269" name="self.model.18.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="7079636" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.18.m.0.cv2.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="270" name="__module.model.18.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="271" name="__module.model.18.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="7227092" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="272" name="__module.model.18.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="630_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="273" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_42" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="630,input.163">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="274" name="__module.model.18/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="633,input.165">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="275" name="self.model.18.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 192, 1, 1" offset="7227348" size="98304" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.18.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="276" name="__module.model.18.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>192</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>192</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="277" name="__module.model.18.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="7325652" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="278" name="__module.model.18.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="641_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="279" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_43" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="641,input.167">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="280" name="self.model.22.cv2.1.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 128, 3, 3" offset="7326164" size="294912" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.1.0.conv.weight">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="281" name="__module.model.22.cv2.1.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="282" name="__module.model.22.cv2.1.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="7621076" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="283" name="__module.model.22.cv2.1.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="809_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="284" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_44" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="809,input.205">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="285" name="self.model.22.cv2.1.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="7621332" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.1.1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="286" name="__module.model.22.cv2.1.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="287" name="__module.model.22.cv2.1.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="7768788" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="288" name="__module.model.22.cv2.1.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="818_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="289" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_45" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="818,input.209">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="290" name="self.model.22.cv2.1.2.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="7769044" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.1.2.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="291" name="__module.model.22.cv2.1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="292" name="__module.model.22.cv2.1.2/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="7785428" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="293" name="__module.model.22.cv2.1.2/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="826">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="294" name="self.model.22.cv3.1.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 128, 3, 3" offset="7785684" size="391680" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.1.0.conv.weight">
+					<dim>85</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="295" name="__module.model.22.cv3.1.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="296" name="__module.model.22.cv3.1.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="8177364" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="297" name="__module.model.22.cv3.1.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="837_1">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="298" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_46" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="837,input.213">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="299" name="self.model.22.cv3.1.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 85, 3, 3" offset="8177704" size="260100" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.1.1.conv.weight">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="300" name="__module.model.22.cv3.1.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="301" name="__module.model.22.cv3.1.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="8437804" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="302" name="__module.model.22.cv3.1.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="846_1">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="303" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_47" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="846,input.217">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="304" name="self.model.22.cv3.1.2.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 85, 1, 1" offset="8438144" size="28900" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.1.2.weight">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="305" name="__module.model.22.cv3.1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="306" name="__module.model.22.cv3.1.2/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="8467044" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="307" name="__module.model.22.cv3.1.2/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="854">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="308" name="__module.model.22/aten::cat/Concat_1" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="856,xi.3">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="309" name="Constant_8179" type="Const" version="opset1">
+			<data element_type="i64" shape="3" offset="6685372" size="24" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="310" name="__module.model.22/aten::view/Reshape_1" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="919">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>1600</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="311" name="self.model.19.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 3, 3" offset="8467384" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.19.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="312" name="__module.model.19.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>40</dim>
+					<dim>40</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="313" name="__module.model.19.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="9057208" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="314" name="__module.model.19.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="655_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="315" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_48" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="655,input.171">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="316" name="__module.model.20/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="659,input.173">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="317" name="self.model.21.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 384, 1, 1" offset="9057720" size="393216" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.21.cv1.conv.weight">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="318" name="__module.model.21.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="319" name="__module.model.21.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="9450936" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="320" name="__module.model.21.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="676_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="321" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_49" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="676,input.175">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="322" name="664" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="664" />
+			</output>
+		</layer>
+		<layer id="323" name="Constant_2332" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="2916024" size="16" />
+			<output>
+				<port id="0" precision="I64" names="678">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="324" name="__module.model.21/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="680">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="4" precision="FP32" names="681,input.177">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="325" name="self.model.21.m.0.cv1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 3, 3" offset="9451960" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.21.m.0.cv1.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="326" name="__module.model.21.m.0.cv1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="327" name="__module.model.21.m.0.cv1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="10041784" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="328" name="__module.model.21.m.0.cv1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="691_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="329" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_50" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="691,input.179">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="330" name="self.model.21.m.0.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="128, 128, 3, 3" offset="10042296" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.21.m.0.cv2.conv.weight">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="331" name="__module.model.21.m.0.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>128</dim>
+					<dim>128</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="332" name="__module.model.21.m.0.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 128, 1, 1" offset="10632120" size="512" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="333" name="__module.model.21.m.0.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="700_1">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="334" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_51" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="700,input.183">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="335" name="__module.model.21/aten::cat/Concat" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>128</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="703,input.185">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="336" name="self.model.21.cv2.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="256, 384, 1, 1" offset="10632632" size="393216" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.21.cv2.conv.weight">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="337" name="__module.model.21.cv2.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>384</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>256</dim>
+					<dim>384</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="338" name="__module.model.21.cv2.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 256, 1, 1" offset="11025848" size="1024" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="339" name="__module.model.21.cv2.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="711_1">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="340" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_52" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="711,input.187">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="341" name="self.model.22.cv2.2.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 256, 3, 3" offset="11026872" size="589824" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.2.0.conv.weight">
+					<dim>64</dim>
+					<dim>256</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="342" name="__module.model.22.cv2.2.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>256</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="343" name="__module.model.22.cv2.2.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="11616696" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="344" name="__module.model.22.cv2.2.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="867_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="345" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_53" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="867,input.223">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="346" name="self.model.22.cv2.2.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 3, 3" offset="11616952" size="147456" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.2.1.conv.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="347" name="__module.model.22.cv2.2.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="348" name="__module.model.22.cv2.2.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="11764408" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="349" name="__module.model.22.cv2.2.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="876_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="350" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_54" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="876,input.227">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="351" name="self.model.22.cv2.2.2.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="64, 64, 1, 1" offset="11764664" size="16384" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv2.2.2.weight">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="352" name="__module.model.22.cv2.2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>64</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="353" name="__module.model.22.cv2.2.2/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 64, 1, 1" offset="11781048" size="256" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="354" name="__module.model.22.cv2.2.2/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="884">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="355" name="self.model.22.cv3.2.0.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 256, 3, 3" offset="11781304" size="783360" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.2.0.conv.weight">
+					<dim>85</dim>
+					<dim>256</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="356" name="__module.model.22.cv3.2.0.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>256</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>256</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="357" name="__module.model.22.cv3.2.0.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="12564664" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="358" name="__module.model.22.cv3.2.0.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="895_1">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="359" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_55" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="895,input.231">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="360" name="self.model.22.cv3.2.1.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 85, 3, 3" offset="12565004" size="260100" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.2.1.conv.weight">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="361" name="__module.model.22.cv3.2.1.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>3</dim>
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="362" name="__module.model.22.cv3.2.1.conv/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="12825104" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="363" name="__module.model.22.cv3.2.1.conv/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="904_1">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="364" name="__module.model.22.cv3.2.1.act/aten::silu_/Swish_56" type="Swish" version="opset4">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="904,input.235">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="365" name="self.model.22.cv3.2.2.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="85, 85, 1, 1" offset="12825444" size="28900" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.cv3.2.2.weight">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="366" name="__module.model.22.cv3.2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>85</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="367" name="__module.model.22.cv3.2.2/aten::_convolution/Reshape" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 85, 1, 1" offset="12854344" size="340" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="368" name="__module.model.22.cv3.2.2/aten::_convolution/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="912">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="369" name="__module.model.22/aten::cat/Concat_2" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="914,xi">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="370" name="Constant_8180" type="Const" version="opset1">
+			<data element_type="i64" shape="3" offset="6685372" size="24" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="371" name="__module.model.22/aten::view/Reshape_2" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>20</dim>
+					<dim>20</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="921">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="372" name="__module.model.22/aten::cat/Concat_3" type="Concat" version="opset1">
+			<data axis="2" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>6400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>1600</dim>
+				</port>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="923">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="373" name="724" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="91776" size="8" />
+			<output>
+				<port id="0" precision="I64" names="724" />
+			</output>
+		</layer>
+		<layer id="374" name="Constant_3375" type="Const" version="opset1">
+			<data element_type="i64" shape="2" offset="12854684" size="16" />
+			<output>
+				<port id="0" precision="I64" names="924">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="375" name="__module.model.22/prim::ListUnpack" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I64">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="926,x_1">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="4" precision="FP32" names="927,cls">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="376" name="Constant_8181" type="Const" version="opset1">
+			<data element_type="i64" shape="4" offset="12854700" size="32" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>4</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="377" name="__module.model.22.dfl/aten::view/Reshape" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="932">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>16</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="378" name="__module.model.22.dfl/aten::transpose/Constant" type="Const" version="opset1">
+			<data element_type="i32" shape="4" offset="12854732" size="16" />
+			<output>
+				<port id="0" precision="I32">
+					<dim>4</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="379" name="__module.model.22.dfl/aten::transpose/Transpose" type="Transpose" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>16</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="I32">
+					<dim>4</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="933">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="380" name="__module.model.22.dfl/aten::softmax/Softmax" type="SoftMax" version="opset8">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="934,input">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="381" name="self.model.22.dfl.conv.weight" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 16, 1, 1" offset="12854748" size="64" />
+			<output>
+				<port id="0" precision="FP32" names="self.model.22.dfl.conv.weight">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="382" name="__module.model.22.dfl.conv/aten::_convolution/Convolution" type="Convolution" version="opset1">
+			<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>16</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="940">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="383" name="Constant_8182" type="Const" version="opset1">
+			<data element_type="i64" shape="3" offset="12854812" size="24" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="384" name="__module.model.22.dfl/aten::view/Reshape_1" type="Reshape" version="opset1">
+			<data special_zero="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="942,distance">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="385" name="ShapeOf_8160" type="ShapeOf" version="opset3">
+			<data output_type="i64" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>149</dim>
+					<dim>80</dim>
+					<dim>80</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="I64">
+					<dim>4</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="386" name="Constant_8161" type="Const" version="opset1">
+			<data element_type="i64" shape="1" offset="12854836" size="8" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="387" name="Constant_8162" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="12854836" size="8" />
+			<output>
+				<port id="0" precision="I64" />
+			</output>
+		</layer>
+		<layer id="388" name="Gather_8163" type="Gather" version="opset8">
+			<data batch_dims="0" />
+			<input>
+				<port id="0" precision="I64">
+					<dim>4</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>1</dim>
+				</port>
+				<port id="2" precision="I64" />
+			</input>
+			<output>
+				<port id="3" precision="I64" names="915,929">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="389" name="Constant_3458" type="Const" version="opset1">
+			<data element_type="i64" shape="1" offset="12854844" size="8" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="390" name="ShapeOf_8168" type="ShapeOf" version="opset3">
+			<data output_type="i64" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>64</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="I64">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="391" name="Constant_8169" type="Const" version="opset1">
+			<data element_type="i64" shape="1" offset="12854852" size="8" />
+			<output>
+				<port id="0" precision="I64">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="392" name="Constant_8170" type="Const" version="opset1">
+			<data element_type="i64" shape="" offset="12854836" size="8" />
+			<output>
+				<port id="0" precision="I64" />
+			</output>
+		</layer>
+		<layer id="393" name="Gather_8171" type="Gather" version="opset8">
+			<data batch_dims="0" />
+			<input>
+				<port id="0" precision="I64">
+					<dim>3</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>1</dim>
+				</port>
+				<port id="2" precision="I64" />
+			</input>
+			<output>
+				<port id="3" precision="I64" names="930">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="394" name="__module.model.22.dfl/prim::ListConstruct_1" type="Concat" version="opset1">
+			<data axis="0" />
+			<input>
+				<port id="0" precision="I64">
+					<dim>1</dim>
+				</port>
+				<port id="1" precision="I64">
+					<dim>1</dim>
+				</port>
+				<port id="2" precision="I64">
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="I64">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="395" name="Convert_8178" type="Convert" version="opset1">
+			<data destination_type="i32" />
+			<input>
+				<port id="0" precision="I64">
+					<dim>3</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="I32">
+					<dim>3</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="396" name="Constant_4138" type="Const" version="opset1">
+			<data element_type="i32" shape="1" offset="12854860" size="4" />
+			<output>
+				<port id="0" precision="I32">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="397" name="__module.model.22/prim::ListUnpack/Gather" type="Gather" version="opset8">
+			<data batch_dims="0" />
+			<input>
+				<port id="0" precision="I32">
+					<dim>3</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I32">
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="I32" />
+			</output>
+		</layer>
+		<layer id="398" name="__module.model.22/prim::ListUnpack/Convert" type="Const" version="opset1">
+			<data element_type="i32" shape="" offset="12854864" size="4" />
+			<output>
+				<port id="0" precision="I32" />
+			</output>
+		</layer>
+		<layer id="399" name="__module.model.22/prim::ListUnpack/Divide" type="Divide" version="opset1">
+			<data auto_broadcast="numpy" m_pythondiv="true" />
+			<input>
+				<port id="0" precision="I32" />
+				<port id="1" precision="I32" />
+			</input>
+			<output>
+				<port id="2" precision="I32" />
+			</output>
+		</layer>
+		<layer id="400" name="__module.model.22/prim::ListUnpack/Mod" type="Mod" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="I32" />
+				<port id="1" precision="I32" />
+			</input>
+			<output>
+				<port id="2" precision="I32" />
+			</output>
+		</layer>
+		<layer id="401" name="__module.model.22/prim::ListUnpack/Greater" type="Greater" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="I32" />
+				<port id="1" precision="I32">
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="BOOL">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="402" name="__module.model.22/prim::ListUnpack/Convert_0" type="Convert" version="opset1">
+			<data destination_type="i32" />
+			<input>
+				<port id="0" precision="BOOL">
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="I32">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="403" name="__module.model.22/prim::ListUnpack/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="I32" />
+				<port id="1" precision="I32">
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="I32">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="404" name="Constant_4139" type="Const" version="opset1">
+			<data element_type="i32" shape="1" offset="12854868" size="4" />
+			<output>
+				<port id="0" precision="I32">
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="405" name="__module.model.22/prim::ListUnpack/Concat" type="Concat" version="opset1">
+			<data axis="0" />
+			<input>
+				<port id="0" precision="I32">
+					<dim>1</dim>
+				</port>
+				<port id="1" precision="I32">
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="I32">
+					<dim>2</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="406" name="__module.model.22/prim::ListUnpack/VariadicSplit" type="VariadicSplit" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="I64" />
+				<port id="2" precision="I32">
+					<dim>2</dim>
+				</port>
+			</input>
+			<output>
+				<port id="3" precision="FP32" names="945,lt">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="4" precision="FP32" names="946,rb">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="407" name="__module.model.22/aten::sub/Subtract" type="Subtract" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="947,x1y1">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="408" name="__module.model.22/aten::add/Add" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="948,x2y2">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="409" name="__module.model.22/aten::add/Add_1" type="Add" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="949">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="410" name="Constant_8080" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 1, 1" offset="12854872" size="4" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="411" name="__module.model.22/aten::div/Divide" type="Divide" version="opset1">
+			<data auto_broadcast="numpy" m_pythondiv="true" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>1</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="950,c_xy">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="412" name="__module.model.22/aten::sub/Subtract_1" type="Subtract" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="951,wh">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="413" name="__module.model.22/aten::cat/Concat_4" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>2</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="953">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="414" name="Constant_8081" type="Const" version="opset1">
+			<data element_type="f32" shape="1, 1, 8400" offset="12854876" size="33600" />
+			<output>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="415" name="__module.model.22/aten::mul/Multiply" type="Multiply" version="opset1">
+			<data auto_broadcast="numpy" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>1</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32" names="954,dbox">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="416" name="__module.model.22/aten::sigmoid/Sigmoid" type="Sigmoid" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="1" precision="FP32" names="955">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="417" name="__module.model.22/aten::cat/Concat_5" type="Concat" version="opset1">
+			<data axis="1" />
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>4</dim>
+					<dim>8400</dim>
+				</port>
+				<port id="1" precision="FP32">
+					<dim>1</dim>
+					<dim>85</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+			<output>
+				<port id="2" precision="FP32">
+					<dim>1</dim>
+					<dim>89</dim>
+					<dim>8400</dim>
+				</port>
+			</output>
+		</layer>
+		<layer id="418" name="Result_3512" type="Result" version="opset1">
+			<input>
+				<port id="0" precision="FP32">
+					<dim>1</dim>
+					<dim>89</dim>
+					<dim>8400</dim>
+				</port>
+			</input>
+		</layer>
+	</layers>
+	<edges>
+		<edge from-layer="0" from-port="0" to-layer="3" to-port="0" />
+		<edge from-layer="1" from-port="0" to-layer="408" to-port="0" />
+		<edge from-layer="1" from-port="0" to-layer="407" to-port="0" />
+		<edge from-layer="2" from-port="0" to-layer="3" to-port="1" />
+		<edge from-layer="3" from-port="2" to-layer="5" to-port="0" />
+		<edge from-layer="4" from-port="0" to-layer="5" to-port="1" />
+		<edge from-layer="5" from-port="2" to-layer="6" to-port="0" />
+		<edge from-layer="6" from-port="1" to-layer="8" to-port="0" />
+		<edge from-layer="7" from-port="0" to-layer="8" to-port="1" />
+		<edge from-layer="8" from-port="2" to-layer="10" to-port="0" />
+		<edge from-layer="9" from-port="0" to-layer="10" to-port="1" />
+		<edge from-layer="10" from-port="2" to-layer="11" to-port="0" />
+		<edge from-layer="11" from-port="1" to-layer="13" to-port="0" />
+		<edge from-layer="12" from-port="0" to-layer="13" to-port="1" />
+		<edge from-layer="13" from-port="2" to-layer="15" to-port="0" />
+		<edge from-layer="14" from-port="0" to-layer="15" to-port="1" />
+		<edge from-layer="15" from-port="2" to-layer="16" to-port="0" />
+		<edge from-layer="16" from-port="1" to-layer="19" to-port="0" />
+		<edge from-layer="17" from-port="0" to-layer="19" to-port="1" />
+		<edge from-layer="18" from-port="0" to-layer="19" to-port="2" />
+		<edge from-layer="19" from-port="4" to-layer="21" to-port="0" />
+		<edge from-layer="19" from-port="4" to-layer="31" to-port="1" />
+		<edge from-layer="19" from-port="3" to-layer="31" to-port="0" />
+		<edge from-layer="19" from-port="4" to-layer="30" to-port="0" />
+		<edge from-layer="20" from-port="0" to-layer="21" to-port="1" />
+		<edge from-layer="21" from-port="2" to-layer="23" to-port="0" />
+		<edge from-layer="22" from-port="0" to-layer="23" to-port="1" />
+		<edge from-layer="23" from-port="2" to-layer="24" to-port="0" />
+		<edge from-layer="24" from-port="1" to-layer="26" to-port="0" />
+		<edge from-layer="25" from-port="0" to-layer="26" to-port="1" />
+		<edge from-layer="26" from-port="2" to-layer="28" to-port="0" />
+		<edge from-layer="27" from-port="0" to-layer="28" to-port="1" />
+		<edge from-layer="28" from-port="2" to-layer="29" to-port="0" />
+		<edge from-layer="29" from-port="1" to-layer="30" to-port="1" />
+		<edge from-layer="30" from-port="2" to-layer="31" to-port="2" />
+		<edge from-layer="31" from-port="3" to-layer="33" to-port="0" />
+		<edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
+		<edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
+		<edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
+		<edge from-layer="35" from-port="2" to-layer="36" to-port="0" />
+		<edge from-layer="36" from-port="1" to-layer="38" to-port="0" />
+		<edge from-layer="37" from-port="0" to-layer="38" to-port="1" />
+		<edge from-layer="38" from-port="2" to-layer="40" to-port="0" />
+		<edge from-layer="39" from-port="0" to-layer="40" to-port="1" />
+		<edge from-layer="40" from-port="2" to-layer="41" to-port="0" />
+		<edge from-layer="41" from-port="1" to-layer="43" to-port="0" />
+		<edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
+		<edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
+		<edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
+		<edge from-layer="45" from-port="2" to-layer="46" to-port="0" />
+		<edge from-layer="46" from-port="1" to-layer="49" to-port="0" />
+		<edge from-layer="47" from-port="0" to-layer="49" to-port="1" />
+		<edge from-layer="48" from-port="0" to-layer="49" to-port="2" />
+		<edge from-layer="49" from-port="4" to-layer="51" to-port="0" />
+		<edge from-layer="49" from-port="4" to-layer="60" to-port="0" />
+		<edge from-layer="49" from-port="3" to-layer="72" to-port="0" />
+		<edge from-layer="49" from-port="4" to-layer="72" to-port="1" />
+		<edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
+		<edge from-layer="51" from-port="2" to-layer="53" to-port="0" />
+		<edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
+		<edge from-layer="53" from-port="2" to-layer="54" to-port="0" />
+		<edge from-layer="54" from-port="1" to-layer="56" to-port="0" />
+		<edge from-layer="55" from-port="0" to-layer="56" to-port="1" />
+		<edge from-layer="56" from-port="2" to-layer="58" to-port="0" />
+		<edge from-layer="57" from-port="0" to-layer="58" to-port="1" />
+		<edge from-layer="58" from-port="2" to-layer="59" to-port="0" />
+		<edge from-layer="59" from-port="1" to-layer="60" to-port="1" />
+		<edge from-layer="60" from-port="2" to-layer="62" to-port="0" />
+		<edge from-layer="60" from-port="2" to-layer="71" to-port="0" />
+		<edge from-layer="60" from-port="2" to-layer="72" to-port="2" />
+		<edge from-layer="61" from-port="0" to-layer="62" to-port="1" />
+		<edge from-layer="62" from-port="2" to-layer="64" to-port="0" />
+		<edge from-layer="63" from-port="0" to-layer="64" to-port="1" />
+		<edge from-layer="64" from-port="2" to-layer="65" to-port="0" />
+		<edge from-layer="65" from-port="1" to-layer="67" to-port="0" />
+		<edge from-layer="66" from-port="0" to-layer="67" to-port="1" />
+		<edge from-layer="67" from-port="2" to-layer="69" to-port="0" />
+		<edge from-layer="68" from-port="0" to-layer="69" to-port="1" />
+		<edge from-layer="69" from-port="2" to-layer="70" to-port="0" />
+		<edge from-layer="70" from-port="1" to-layer="71" to-port="1" />
+		<edge from-layer="71" from-port="2" to-layer="72" to-port="3" />
+		<edge from-layer="72" from-port="4" to-layer="74" to-port="0" />
+		<edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
+		<edge from-layer="74" from-port="2" to-layer="76" to-port="0" />
+		<edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
+		<edge from-layer="76" from-port="2" to-layer="77" to-port="0" />
+		<edge from-layer="77" from-port="1" to-layer="79" to-port="0" />
+		<edge from-layer="77" from-port="1" to-layer="194" to-port="1" />
+		<edge from-layer="78" from-port="0" to-layer="79" to-port="1" />
+		<edge from-layer="79" from-port="2" to-layer="81" to-port="0" />
+		<edge from-layer="80" from-port="0" to-layer="81" to-port="1" />
+		<edge from-layer="81" from-port="2" to-layer="82" to-port="0" />
+		<edge from-layer="82" from-port="1" to-layer="84" to-port="0" />
+		<edge from-layer="83" from-port="0" to-layer="84" to-port="1" />
+		<edge from-layer="84" from-port="2" to-layer="86" to-port="0" />
+		<edge from-layer="85" from-port="0" to-layer="86" to-port="1" />
+		<edge from-layer="86" from-port="2" to-layer="87" to-port="0" />
+		<edge from-layer="87" from-port="1" to-layer="90" to-port="0" />
+		<edge from-layer="88" from-port="0" to-layer="90" to-port="1" />
+		<edge from-layer="89" from-port="0" to-layer="90" to-port="2" />
+		<edge from-layer="90" from-port="4" to-layer="92" to-port="0" />
+		<edge from-layer="90" from-port="3" to-layer="113" to-port="0" />
+		<edge from-layer="90" from-port="4" to-layer="101" to-port="0" />
+		<edge from-layer="90" from-port="4" to-layer="113" to-port="1" />
+		<edge from-layer="91" from-port="0" to-layer="92" to-port="1" />
+		<edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
+		<edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
+		<edge from-layer="94" from-port="2" to-layer="95" to-port="0" />
+		<edge from-layer="95" from-port="1" to-layer="97" to-port="0" />
+		<edge from-layer="96" from-port="0" to-layer="97" to-port="1" />
+		<edge from-layer="97" from-port="2" to-layer="99" to-port="0" />
+		<edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
+		<edge from-layer="99" from-port="2" to-layer="100" to-port="0" />
+		<edge from-layer="100" from-port="1" to-layer="101" to-port="1" />
+		<edge from-layer="101" from-port="2" to-layer="103" to-port="0" />
+		<edge from-layer="101" from-port="2" to-layer="112" to-port="0" />
+		<edge from-layer="101" from-port="2" to-layer="113" to-port="2" />
+		<edge from-layer="102" from-port="0" to-layer="103" to-port="1" />
+		<edge from-layer="103" from-port="2" to-layer="105" to-port="0" />
+		<edge from-layer="104" from-port="0" to-layer="105" to-port="1" />
+		<edge from-layer="105" from-port="2" to-layer="106" to-port="0" />
+		<edge from-layer="106" from-port="1" to-layer="108" to-port="0" />
+		<edge from-layer="107" from-port="0" to-layer="108" to-port="1" />
+		<edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
+		<edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
+		<edge from-layer="110" from-port="2" to-layer="111" to-port="0" />
+		<edge from-layer="111" from-port="1" to-layer="112" to-port="1" />
+		<edge from-layer="112" from-port="2" to-layer="113" to-port="3" />
+		<edge from-layer="113" from-port="4" to-layer="115" to-port="0" />
+		<edge from-layer="114" from-port="0" to-layer="115" to-port="1" />
+		<edge from-layer="115" from-port="2" to-layer="117" to-port="0" />
+		<edge from-layer="116" from-port="0" to-layer="117" to-port="1" />
+		<edge from-layer="117" from-port="2" to-layer="118" to-port="0" />
+		<edge from-layer="118" from-port="1" to-layer="120" to-port="0" />
+		<edge from-layer="118" from-port="1" to-layer="166" to-port="1" />
+		<edge from-layer="119" from-port="0" to-layer="120" to-port="1" />
+		<edge from-layer="120" from-port="2" to-layer="122" to-port="0" />
+		<edge from-layer="121" from-port="0" to-layer="122" to-port="1" />
+		<edge from-layer="122" from-port="2" to-layer="123" to-port="0" />
+		<edge from-layer="123" from-port="1" to-layer="125" to-port="0" />
+		<edge from-layer="124" from-port="0" to-layer="125" to-port="1" />
+		<edge from-layer="125" from-port="2" to-layer="127" to-port="0" />
+		<edge from-layer="126" from-port="0" to-layer="127" to-port="1" />
+		<edge from-layer="127" from-port="2" to-layer="128" to-port="0" />
+		<edge from-layer="128" from-port="1" to-layer="131" to-port="0" />
+		<edge from-layer="129" from-port="0" to-layer="131" to-port="1" />
+		<edge from-layer="130" from-port="0" to-layer="131" to-port="2" />
+		<edge from-layer="131" from-port="4" to-layer="133" to-port="0" />
+		<edge from-layer="131" from-port="4" to-layer="142" to-port="0" />
+		<edge from-layer="131" from-port="3" to-layer="143" to-port="0" />
+		<edge from-layer="131" from-port="4" to-layer="143" to-port="1" />
+		<edge from-layer="132" from-port="0" to-layer="133" to-port="1" />
+		<edge from-layer="133" from-port="2" to-layer="135" to-port="0" />
+		<edge from-layer="134" from-port="0" to-layer="135" to-port="1" />
+		<edge from-layer="135" from-port="2" to-layer="136" to-port="0" />
+		<edge from-layer="136" from-port="1" to-layer="138" to-port="0" />
+		<edge from-layer="137" from-port="0" to-layer="138" to-port="1" />
+		<edge from-layer="138" from-port="2" to-layer="140" to-port="0" />
+		<edge from-layer="139" from-port="0" to-layer="140" to-port="1" />
+		<edge from-layer="140" from-port="2" to-layer="141" to-port="0" />
+		<edge from-layer="141" from-port="1" to-layer="142" to-port="1" />
+		<edge from-layer="142" from-port="2" to-layer="143" to-port="2" />
+		<edge from-layer="143" from-port="3" to-layer="145" to-port="0" />
+		<edge from-layer="144" from-port="0" to-layer="145" to-port="1" />
+		<edge from-layer="145" from-port="2" to-layer="147" to-port="0" />
+		<edge from-layer="146" from-port="0" to-layer="147" to-port="1" />
+		<edge from-layer="147" from-port="2" to-layer="148" to-port="0" />
+		<edge from-layer="148" from-port="1" to-layer="150" to-port="0" />
+		<edge from-layer="149" from-port="0" to-layer="150" to-port="1" />
+		<edge from-layer="150" from-port="2" to-layer="152" to-port="0" />
+		<edge from-layer="151" from-port="0" to-layer="152" to-port="1" />
+		<edge from-layer="152" from-port="2" to-layer="153" to-port="0" />
+		<edge from-layer="153" from-port="1" to-layer="154" to-port="0" />
+		<edge from-layer="153" from-port="1" to-layer="157" to-port="0" />
+		<edge from-layer="154" from-port="1" to-layer="155" to-port="0" />
+		<edge from-layer="154" from-port="1" to-layer="157" to-port="1" />
+		<edge from-layer="155" from-port="1" to-layer="156" to-port="0" />
+		<edge from-layer="155" from-port="1" to-layer="157" to-port="2" />
+		<edge from-layer="156" from-port="1" to-layer="157" to-port="3" />
+		<edge from-layer="157" from-port="4" to-layer="159" to-port="0" />
+		<edge from-layer="158" from-port="0" to-layer="159" to-port="1" />
+		<edge from-layer="159" from-port="2" to-layer="161" to-port="0" />
+		<edge from-layer="160" from-port="0" to-layer="161" to-port="1" />
+		<edge from-layer="161" from-port="2" to-layer="162" to-port="0" />
+		<edge from-layer="162" from-port="1" to-layer="165" to-port="0" />
+		<edge from-layer="162" from-port="1" to-layer="316" to-port="1" />
+		<edge from-layer="163" from-port="0" to-layer="165" to-port="1" />
+		<edge from-layer="164" from-port="0" to-layer="165" to-port="2" />
+		<edge from-layer="165" from-port="3" to-layer="166" to-port="0" />
+		<edge from-layer="166" from-port="2" to-layer="168" to-port="0" />
+		<edge from-layer="167" from-port="0" to-layer="168" to-port="1" />
+		<edge from-layer="168" from-port="2" to-layer="170" to-port="0" />
+		<edge from-layer="169" from-port="0" to-layer="170" to-port="1" />
+		<edge from-layer="170" from-port="2" to-layer="171" to-port="0" />
+		<edge from-layer="171" from-port="1" to-layer="174" to-port="0" />
+		<edge from-layer="172" from-port="0" to-layer="174" to-port="1" />
+		<edge from-layer="173" from-port="0" to-layer="174" to-port="2" />
+		<edge from-layer="174" from-port="4" to-layer="176" to-port="0" />
+		<edge from-layer="174" from-port="3" to-layer="185" to-port="0" />
+		<edge from-layer="174" from-port="4" to-layer="185" to-port="1" />
+		<edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
+		<edge from-layer="176" from-port="2" to-layer="178" to-port="0" />
+		<edge from-layer="177" from-port="0" to-layer="178" to-port="1" />
+		<edge from-layer="178" from-port="2" to-layer="179" to-port="0" />
+		<edge from-layer="179" from-port="1" to-layer="181" to-port="0" />
+		<edge from-layer="180" from-port="0" to-layer="181" to-port="1" />
+		<edge from-layer="181" from-port="2" to-layer="183" to-port="0" />
+		<edge from-layer="182" from-port="0" to-layer="183" to-port="1" />
+		<edge from-layer="183" from-port="2" to-layer="184" to-port="0" />
+		<edge from-layer="184" from-port="1" to-layer="185" to-port="2" />
+		<edge from-layer="185" from-port="3" to-layer="187" to-port="0" />
+		<edge from-layer="186" from-port="0" to-layer="187" to-port="1" />
+		<edge from-layer="187" from-port="2" to-layer="189" to-port="0" />
+		<edge from-layer="188" from-port="0" to-layer="189" to-port="1" />
+		<edge from-layer="189" from-port="2" to-layer="190" to-port="0" />
+		<edge from-layer="190" from-port="1" to-layer="193" to-port="0" />
+		<edge from-layer="190" from-port="1" to-layer="255" to-port="1" />
+		<edge from-layer="191" from-port="0" to-layer="193" to-port="1" />
+		<edge from-layer="192" from-port="0" to-layer="193" to-port="2" />
+		<edge from-layer="193" from-port="3" to-layer="194" to-port="0" />
+		<edge from-layer="194" from-port="2" to-layer="196" to-port="0" />
+		<edge from-layer="195" from-port="0" to-layer="196" to-port="1" />
+		<edge from-layer="196" from-port="2" to-layer="198" to-port="0" />
+		<edge from-layer="197" from-port="0" to-layer="198" to-port="1" />
+		<edge from-layer="198" from-port="2" to-layer="199" to-port="0" />
+		<edge from-layer="199" from-port="1" to-layer="202" to-port="0" />
+		<edge from-layer="200" from-port="0" to-layer="202" to-port="1" />
+		<edge from-layer="201" from-port="0" to-layer="202" to-port="2" />
+		<edge from-layer="202" from-port="4" to-layer="204" to-port="0" />
+		<edge from-layer="202" from-port="3" to-layer="213" to-port="0" />
+		<edge from-layer="202" from-port="4" to-layer="213" to-port="1" />
+		<edge from-layer="203" from-port="0" to-layer="204" to-port="1" />
+		<edge from-layer="204" from-port="2" to-layer="206" to-port="0" />
+		<edge from-layer="205" from-port="0" to-layer="206" to-port="1" />
+		<edge from-layer="206" from-port="2" to-layer="207" to-port="0" />
+		<edge from-layer="207" from-port="1" to-layer="209" to-port="0" />
+		<edge from-layer="208" from-port="0" to-layer="209" to-port="1" />
+		<edge from-layer="209" from-port="2" to-layer="211" to-port="0" />
+		<edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
+		<edge from-layer="211" from-port="2" to-layer="212" to-port="0" />
+		<edge from-layer="212" from-port="1" to-layer="213" to-port="2" />
+		<edge from-layer="213" from-port="3" to-layer="215" to-port="0" />
+		<edge from-layer="214" from-port="0" to-layer="215" to-port="1" />
+		<edge from-layer="215" from-port="2" to-layer="217" to-port="0" />
+		<edge from-layer="216" from-port="0" to-layer="217" to-port="1" />
+		<edge from-layer="217" from-port="2" to-layer="218" to-port="0" />
+		<edge from-layer="218" from-port="1" to-layer="220" to-port="0" />
+		<edge from-layer="218" from-port="1" to-layer="251" to-port="0" />
+		<edge from-layer="218" from-port="1" to-layer="234" to-port="0" />
+		<edge from-layer="219" from-port="0" to-layer="220" to-port="1" />
+		<edge from-layer="220" from-port="2" to-layer="222" to-port="0" />
+		<edge from-layer="221" from-port="0" to-layer="222" to-port="1" />
+		<edge from-layer="222" from-port="2" to-layer="223" to-port="0" />
+		<edge from-layer="223" from-port="1" to-layer="225" to-port="0" />
+		<edge from-layer="224" from-port="0" to-layer="225" to-port="1" />
+		<edge from-layer="225" from-port="2" to-layer="227" to-port="0" />
+		<edge from-layer="226" from-port="0" to-layer="227" to-port="1" />
+		<edge from-layer="227" from-port="2" to-layer="228" to-port="0" />
+		<edge from-layer="228" from-port="1" to-layer="230" to-port="0" />
+		<edge from-layer="229" from-port="0" to-layer="230" to-port="1" />
+		<edge from-layer="230" from-port="2" to-layer="232" to-port="0" />
+		<edge from-layer="231" from-port="0" to-layer="232" to-port="1" />
+		<edge from-layer="232" from-port="2" to-layer="247" to-port="0" />
+		<edge from-layer="233" from-port="0" to-layer="234" to-port="1" />
+		<edge from-layer="234" from-port="2" to-layer="236" to-port="0" />
+		<edge from-layer="235" from-port="0" to-layer="236" to-port="1" />
+		<edge from-layer="236" from-port="2" to-layer="237" to-port="0" />
+		<edge from-layer="237" from-port="1" to-layer="239" to-port="0" />
+		<edge from-layer="238" from-port="0" to-layer="239" to-port="1" />
+		<edge from-layer="239" from-port="2" to-layer="241" to-port="0" />
+		<edge from-layer="240" from-port="0" to-layer="241" to-port="1" />
+		<edge from-layer="241" from-port="2" to-layer="242" to-port="0" />
+		<edge from-layer="242" from-port="1" to-layer="244" to-port="0" />
+		<edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
+		<edge from-layer="244" from-port="2" to-layer="246" to-port="0" />
+		<edge from-layer="245" from-port="0" to-layer="246" to-port="1" />
+		<edge from-layer="246" from-port="2" to-layer="247" to-port="1" />
+		<edge from-layer="247" from-port="2" to-layer="249" to-port="0" />
+		<edge from-layer="247" from-port="2" to-layer="385" to-port="0" />
+		<edge from-layer="248" from-port="0" to-layer="249" to-port="1" />
+		<edge from-layer="249" from-port="2" to-layer="372" to-port="0" />
+		<edge from-layer="250" from-port="0" to-layer="251" to-port="1" />
+		<edge from-layer="251" from-port="2" to-layer="253" to-port="0" />
+		<edge from-layer="252" from-port="0" to-layer="253" to-port="1" />
+		<edge from-layer="253" from-port="2" to-layer="254" to-port="0" />
+		<edge from-layer="254" from-port="1" to-layer="255" to-port="0" />
+		<edge from-layer="255" from-port="2" to-layer="257" to-port="0" />
+		<edge from-layer="256" from-port="0" to-layer="257" to-port="1" />
+		<edge from-layer="257" from-port="2" to-layer="259" to-port="0" />
+		<edge from-layer="258" from-port="0" to-layer="259" to-port="1" />
+		<edge from-layer="259" from-port="2" to-layer="260" to-port="0" />
+		<edge from-layer="260" from-port="1" to-layer="263" to-port="0" />
+		<edge from-layer="261" from-port="0" to-layer="263" to-port="1" />
+		<edge from-layer="262" from-port="0" to-layer="263" to-port="2" />
+		<edge from-layer="263" from-port="4" to-layer="265" to-port="0" />
+		<edge from-layer="263" from-port="3" to-layer="274" to-port="0" />
+		<edge from-layer="263" from-port="4" to-layer="274" to-port="1" />
+		<edge from-layer="264" from-port="0" to-layer="265" to-port="1" />
+		<edge from-layer="265" from-port="2" to-layer="267" to-port="0" />
+		<edge from-layer="266" from-port="0" to-layer="267" to-port="1" />
+		<edge from-layer="267" from-port="2" to-layer="268" to-port="0" />
+		<edge from-layer="268" from-port="1" to-layer="270" to-port="0" />
+		<edge from-layer="269" from-port="0" to-layer="270" to-port="1" />
+		<edge from-layer="270" from-port="2" to-layer="272" to-port="0" />
+		<edge from-layer="271" from-port="0" to-layer="272" to-port="1" />
+		<edge from-layer="272" from-port="2" to-layer="273" to-port="0" />
+		<edge from-layer="273" from-port="1" to-layer="274" to-port="2" />
+		<edge from-layer="274" from-port="3" to-layer="276" to-port="0" />
+		<edge from-layer="275" from-port="0" to-layer="276" to-port="1" />
+		<edge from-layer="276" from-port="2" to-layer="278" to-port="0" />
+		<edge from-layer="277" from-port="0" to-layer="278" to-port="1" />
+		<edge from-layer="278" from-port="2" to-layer="279" to-port="0" />
+		<edge from-layer="279" from-port="1" to-layer="281" to-port="0" />
+		<edge from-layer="279" from-port="1" to-layer="295" to-port="0" />
+		<edge from-layer="279" from-port="1" to-layer="312" to-port="0" />
+		<edge from-layer="280" from-port="0" to-layer="281" to-port="1" />
+		<edge from-layer="281" from-port="2" to-layer="283" to-port="0" />
+		<edge from-layer="282" from-port="0" to-layer="283" to-port="1" />
+		<edge from-layer="283" from-port="2" to-layer="284" to-port="0" />
+		<edge from-layer="284" from-port="1" to-layer="286" to-port="0" />
+		<edge from-layer="285" from-port="0" to-layer="286" to-port="1" />
+		<edge from-layer="286" from-port="2" to-layer="288" to-port="0" />
+		<edge from-layer="287" from-port="0" to-layer="288" to-port="1" />
+		<edge from-layer="288" from-port="2" to-layer="289" to-port="0" />
+		<edge from-layer="289" from-port="1" to-layer="291" to-port="0" />
+		<edge from-layer="290" from-port="0" to-layer="291" to-port="1" />
+		<edge from-layer="291" from-port="2" to-layer="293" to-port="0" />
+		<edge from-layer="292" from-port="0" to-layer="293" to-port="1" />
+		<edge from-layer="293" from-port="2" to-layer="308" to-port="0" />
+		<edge from-layer="294" from-port="0" to-layer="295" to-port="1" />
+		<edge from-layer="295" from-port="2" to-layer="297" to-port="0" />
+		<edge from-layer="296" from-port="0" to-layer="297" to-port="1" />
+		<edge from-layer="297" from-port="2" to-layer="298" to-port="0" />
+		<edge from-layer="298" from-port="1" to-layer="300" to-port="0" />
+		<edge from-layer="299" from-port="0" to-layer="300" to-port="1" />
+		<edge from-layer="300" from-port="2" to-layer="302" to-port="0" />
+		<edge from-layer="301" from-port="0" to-layer="302" to-port="1" />
+		<edge from-layer="302" from-port="2" to-layer="303" to-port="0" />
+		<edge from-layer="303" from-port="1" to-layer="305" to-port="0" />
+		<edge from-layer="304" from-port="0" to-layer="305" to-port="1" />
+		<edge from-layer="305" from-port="2" to-layer="307" to-port="0" />
+		<edge from-layer="306" from-port="0" to-layer="307" to-port="1" />
+		<edge from-layer="307" from-port="2" to-layer="308" to-port="1" />
+		<edge from-layer="308" from-port="2" to-layer="310" to-port="0" />
+		<edge from-layer="309" from-port="0" to-layer="310" to-port="1" />
+		<edge from-layer="310" from-port="2" to-layer="372" to-port="1" />
+		<edge from-layer="311" from-port="0" to-layer="312" to-port="1" />
+		<edge from-layer="312" from-port="2" to-layer="314" to-port="0" />
+		<edge from-layer="313" from-port="0" to-layer="314" to-port="1" />
+		<edge from-layer="314" from-port="2" to-layer="315" to-port="0" />
+		<edge from-layer="315" from-port="1" to-layer="316" to-port="0" />
+		<edge from-layer="316" from-port="2" to-layer="318" to-port="0" />
+		<edge from-layer="317" from-port="0" to-layer="318" to-port="1" />
+		<edge from-layer="318" from-port="2" to-layer="320" to-port="0" />
+		<edge from-layer="319" from-port="0" to-layer="320" to-port="1" />
+		<edge from-layer="320" from-port="2" to-layer="321" to-port="0" />
+		<edge from-layer="321" from-port="1" to-layer="324" to-port="0" />
+		<edge from-layer="322" from-port="0" to-layer="324" to-port="1" />
+		<edge from-layer="323" from-port="0" to-layer="324" to-port="2" />
+		<edge from-layer="324" from-port="4" to-layer="326" to-port="0" />
+		<edge from-layer="324" from-port="3" to-layer="335" to-port="0" />
+		<edge from-layer="324" from-port="4" to-layer="335" to-port="1" />
+		<edge from-layer="325" from-port="0" to-layer="326" to-port="1" />
+		<edge from-layer="326" from-port="2" to-layer="328" to-port="0" />
+		<edge from-layer="327" from-port="0" to-layer="328" to-port="1" />
+		<edge from-layer="328" from-port="2" to-layer="329" to-port="0" />
+		<edge from-layer="329" from-port="1" to-layer="331" to-port="0" />
+		<edge from-layer="330" from-port="0" to-layer="331" to-port="1" />
+		<edge from-layer="331" from-port="2" to-layer="333" to-port="0" />
+		<edge from-layer="332" from-port="0" to-layer="333" to-port="1" />
+		<edge from-layer="333" from-port="2" to-layer="334" to-port="0" />
+		<edge from-layer="334" from-port="1" to-layer="335" to-port="2" />
+		<edge from-layer="335" from-port="3" to-layer="337" to-port="0" />
+		<edge from-layer="336" from-port="0" to-layer="337" to-port="1" />
+		<edge from-layer="337" from-port="2" to-layer="339" to-port="0" />
+		<edge from-layer="338" from-port="0" to-layer="339" to-port="1" />
+		<edge from-layer="339" from-port="2" to-layer="340" to-port="0" />
+		<edge from-layer="340" from-port="1" to-layer="342" to-port="0" />
+		<edge from-layer="340" from-port="1" to-layer="356" to-port="0" />
+		<edge from-layer="341" from-port="0" to-layer="342" to-port="1" />
+		<edge from-layer="342" from-port="2" to-layer="344" to-port="0" />
+		<edge from-layer="343" from-port="0" to-layer="344" to-port="1" />
+		<edge from-layer="344" from-port="2" to-layer="345" to-port="0" />
+		<edge from-layer="345" from-port="1" to-layer="347" to-port="0" />
+		<edge from-layer="346" from-port="0" to-layer="347" to-port="1" />
+		<edge from-layer="347" from-port="2" to-layer="349" to-port="0" />
+		<edge from-layer="348" from-port="0" to-layer="349" to-port="1" />
+		<edge from-layer="349" from-port="2" to-layer="350" to-port="0" />
+		<edge from-layer="350" from-port="1" to-layer="352" to-port="0" />
+		<edge from-layer="351" from-port="0" to-layer="352" to-port="1" />
+		<edge from-layer="352" from-port="2" to-layer="354" to-port="0" />
+		<edge from-layer="353" from-port="0" to-layer="354" to-port="1" />
+		<edge from-layer="354" from-port="2" to-layer="369" to-port="0" />
+		<edge from-layer="355" from-port="0" to-layer="356" to-port="1" />
+		<edge from-layer="356" from-port="2" to-layer="358" to-port="0" />
+		<edge from-layer="357" from-port="0" to-layer="358" to-port="1" />
+		<edge from-layer="358" from-port="2" to-layer="359" to-port="0" />
+		<edge from-layer="359" from-port="1" to-layer="361" to-port="0" />
+		<edge from-layer="360" from-port="0" to-layer="361" to-port="1" />
+		<edge from-layer="361" from-port="2" to-layer="363" to-port="0" />
+		<edge from-layer="362" from-port="0" to-layer="363" to-port="1" />
+		<edge from-layer="363" from-port="2" to-layer="364" to-port="0" />
+		<edge from-layer="364" from-port="1" to-layer="366" to-port="0" />
+		<edge from-layer="365" from-port="0" to-layer="366" to-port="1" />
+		<edge from-layer="366" from-port="2" to-layer="368" to-port="0" />
+		<edge from-layer="367" from-port="0" to-layer="368" to-port="1" />
+		<edge from-layer="368" from-port="2" to-layer="369" to-port="1" />
+		<edge from-layer="369" from-port="2" to-layer="371" to-port="0" />
+		<edge from-layer="370" from-port="0" to-layer="371" to-port="1" />
+		<edge from-layer="371" from-port="2" to-layer="372" to-port="2" />
+		<edge from-layer="372" from-port="3" to-layer="375" to-port="0" />
+		<edge from-layer="373" from-port="0" to-layer="375" to-port="1" />
+		<edge from-layer="373" from-port="0" to-layer="397" to-port="1" />
+		<edge from-layer="373" from-port="0" to-layer="406" to-port="1" />
+		<edge from-layer="374" from-port="0" to-layer="375" to-port="2" />
+		<edge from-layer="375" from-port="3" to-layer="377" to-port="0" />
+		<edge from-layer="375" from-port="4" to-layer="416" to-port="0" />
+		<edge from-layer="375" from-port="3" to-layer="390" to-port="0" />
+		<edge from-layer="376" from-port="0" to-layer="377" to-port="1" />
+		<edge from-layer="377" from-port="2" to-layer="379" to-port="0" />
+		<edge from-layer="378" from-port="0" to-layer="379" to-port="1" />
+		<edge from-layer="379" from-port="2" to-layer="380" to-port="0" />
+		<edge from-layer="380" from-port="1" to-layer="382" to-port="0" />
+		<edge from-layer="381" from-port="0" to-layer="382" to-port="1" />
+		<edge from-layer="382" from-port="2" to-layer="384" to-port="0" />
+		<edge from-layer="383" from-port="0" to-layer="384" to-port="1" />
+		<edge from-layer="384" from-port="2" to-layer="406" to-port="0" />
+		<edge from-layer="385" from-port="1" to-layer="388" to-port="0" />
+		<edge from-layer="386" from-port="0" to-layer="388" to-port="1" />
+		<edge from-layer="387" from-port="0" to-layer="388" to-port="2" />
+		<edge from-layer="388" from-port="3" to-layer="394" to-port="0" />
+		<edge from-layer="389" from-port="0" to-layer="394" to-port="1" />
+		<edge from-layer="390" from-port="1" to-layer="393" to-port="0" />
+		<edge from-layer="391" from-port="0" to-layer="393" to-port="1" />
+		<edge from-layer="392" from-port="0" to-layer="393" to-port="2" />
+		<edge from-layer="393" from-port="3" to-layer="394" to-port="2" />
+		<edge from-layer="394" from-port="3" to-layer="395" to-port="0" />
+		<edge from-layer="395" from-port="1" to-layer="397" to-port="0" />
+		<edge from-layer="396" from-port="0" to-layer="397" to-port="2" />
+		<edge from-layer="396" from-port="0" to-layer="401" to-port="1" />
+		<edge from-layer="397" from-port="3" to-layer="399" to-port="0" />
+		<edge from-layer="397" from-port="3" to-layer="400" to-port="0" />
+		<edge from-layer="398" from-port="0" to-layer="399" to-port="1" />
+		<edge from-layer="398" from-port="0" to-layer="400" to-port="1" />
+		<edge from-layer="399" from-port="2" to-layer="403" to-port="0" />
+		<edge from-layer="400" from-port="2" to-layer="401" to-port="0" />
+		<edge from-layer="401" from-port="2" to-layer="402" to-port="0" />
+		<edge from-layer="402" from-port="1" to-layer="403" to-port="1" />
+		<edge from-layer="403" from-port="2" to-layer="405" to-port="0" />
+		<edge from-layer="404" from-port="0" to-layer="405" to-port="1" />
+		<edge from-layer="405" from-port="2" to-layer="406" to-port="2" />
+		<edge from-layer="406" from-port="3" to-layer="407" to-port="1" />
+		<edge from-layer="406" from-port="4" to-layer="408" to-port="1" />
+		<edge from-layer="407" from-port="2" to-layer="409" to-port="0" />
+		<edge from-layer="407" from-port="2" to-layer="412" to-port="1" />
+		<edge from-layer="408" from-port="2" to-layer="409" to-port="1" />
+		<edge from-layer="408" from-port="2" to-layer="412" to-port="0" />
+		<edge from-layer="409" from-port="2" to-layer="411" to-port="0" />
+		<edge from-layer="410" from-port="0" to-layer="411" to-port="1" />
+		<edge from-layer="411" from-port="2" to-layer="413" to-port="0" />
+		<edge from-layer="412" from-port="2" to-layer="413" to-port="1" />
+		<edge from-layer="413" from-port="2" to-layer="415" to-port="0" />
+		<edge from-layer="414" from-port="0" to-layer="415" to-port="1" />
+		<edge from-layer="415" from-port="2" to-layer="417" to-port="0" />
+		<edge from-layer="416" from-port="1" to-layer="417" to-port="1" />
+		<edge from-layer="417" from-port="2" to-layer="418" to-port="0" />
+	</edges>
+	<rt_info>
+		<Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
+		<conversion_parameters>
+			<framework value="pytorch" />
+			<is_python_object value="True" />
+		</conversion_parameters>
+		<model_info>
+			<iou_threshold value="0.7" />
+			<labels value="person bicycle car motorcycle airplane bus train truck boat traffic_light fire_hydrant stop_sign parking_meter bench bird cat dog horse sheep cow elephant bear zebra giraffe backpack umbrella handbag tie suitcase frisbee skis snowboard sports_ball kite baseball_bat baseball_glove skateboard surfboard tennis_racket bottle wine_glass cup fork knife spoon bowl banana apple sandwich orange broccoli carrot hot_dog pizza donut cake chair couch potted_plant bed dining_table toilet tv laptop mouse remote keyboard cell_phone microwave oven toaster sink refrigerator book clock vase scissors teddy_bear hair_drier toothbrush basket herb hoister netBoard roller" />
+			<model_type value="YOLO" />
+			<pad_value value="114" />
+			<resize_type value="fit_to_window_letterbox" />
+			<reverse_input_channels value="YES" />
+			<scale_values value="255" />
+		</model_info>
+	</rt_info>
+</net>
diff --git a/openvino/model/safe_det/metadata.yaml b/openvino/model/safe_det/metadata.yaml
new file mode 100644
index 0000000..7e1797e
--- /dev/null
+++ b/openvino/model/safe_det/metadata.yaml
@@ -0,0 +1,98 @@
+description: Ultralytics best model trained on D:/椤圭洰璧勬枡/鏅鸿兘骞茬嚗璁惧/瀹夊叏杩為攣妯″瀷/datasets/datasets/data.yaml
+author: Ultralytics
+date: '2025-04-17T16:29:33.343378'
+version: 8.3.53
+license: AGPL-3.0 License (https://ultralytics.com/license)
+docs: https://docs.ultralytics.com
+stride: 32
+task: detect
+batch: 1
+imgsz:
+- 640
+- 640
+names:
+  0: person
+  1: bicycle
+  2: car
+  3: motorcycle
+  4: airplane
+  5: bus
+  6: train
+  7: truck
+  8: boat
+  9: traffic light
+  10: fire hydrant
+  11: stop sign
+  12: parking meter
+  13: bench
+  14: bird
+  15: cat
+  16: dog
+  17: horse
+  18: sheep
+  19: cow
+  20: elephant
+  21: bear
+  22: zebra
+  23: giraffe
+  24: backpack
+  25: umbrella
+  26: handbag
+  27: tie
+  28: suitcase
+  29: frisbee
+  30: skis
+  31: snowboard
+  32: sports ball
+  33: kite
+  34: baseball bat
+  35: baseball glove
+  36: skateboard
+  37: surfboard
+  38: tennis racket
+  39: bottle
+  40: wine glass
+  41: cup
+  42: fork
+  43: knife
+  44: spoon
+  45: bowl
+  46: banana
+  47: apple
+  48: sandwich
+  49: orange
+  50: broccoli
+  51: carrot
+  52: hot dog
+  53: pizza
+  54: donut
+  55: cake
+  56: chair
+  57: couch
+  58: potted plant
+  59: bed
+  60: dining table
+  61: toilet
+  62: tv
+  63: laptop
+  64: mouse
+  65: remote
+  66: keyboard
+  67: cell phone
+  68: microwave
+  69: oven
+  70: toaster
+  71: sink
+  72: refrigerator
+  73: book
+  74: clock
+  75: vase
+  76: scissors
+  77: teddy bear
+  78: hair drier
+  79: toothbrush
+  80: basket
+  81: herb
+  82: hoister
+  83: netBoard
+  84: roller
diff --git a/openvino/openvino_test.py b/openvino/openvino_test.py
new file mode 100644
index 0000000..ddc2bdd
--- /dev/null
+++ b/openvino/openvino_test.py
@@ -0,0 +1,89 @@
+import numpy as np
+import cv2, time
+import yaml
+from openvino.runtime import Core
+
+# 璇诲彇閰嶇疆鏂囦欢
+def read_config(file_path='model/safe_det/metadata.yaml'):
+    with open(file_path, 'r',encoding="utf-8") as file:
+        config = yaml.safe_load(file)
+    return config
+
+
+MODEL_NAME = "model/best.xml"
+
+CLASSES = read_config()['names']
+colors = np.random.uniform(0, 255, size=(len(CLASSES), 3))
+
+
+
+def draw_bounding_box(img, class_id, confidence, x, y, x_plus_w, y_plus_h):
+    label = f'{CLASSES[class_id]} ({confidence:.2f})'
+    color = colors[class_id]
+    cv2.rectangle(img, (x, y), (x_plus_w, y_plus_h), color, 2)
+    cv2.putText(img, label, (x - 10, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
+
+
+# 瀹炰緥鍖朇ore瀵硅薄
+core = Core()
+# 杞藉叆骞剁紪璇戞ā鍨�
+# 鍔犺浇妯″瀷
+model_path = "model/safe_det/best.xml"  # 鏇挎崲涓轰綘鐨勬ā鍨嬭矾寰�
+model = core.read_model(model=model_path)
+net = core.compile_model(model=model, device_name="CPU")
+# 鑾峰緱妯″瀷杈撳嚭鑺傜偣
+output_node = net.outputs[0]  # yolov8n鍙湁涓�涓緭鍑鸿妭鐐�
+ir = net.create_infer_request()
+cap = cv2.VideoCapture(0)
+
+while True:
+    start = time.time()
+    ret, frame = cap.read()
+    if not ret:
+        break
+    [height, width, _] = frame.shape
+    length = max((height, width))
+    image = np.zeros((length, length, 3), np.uint8)
+    image[0:height, 0:width] = frame
+    scale = length / 640
+
+    blob = cv2.dnn.blobFromImage(image, scalefactor=1 / 255, size=(640, 640), swapRB=True)
+    outputs = ir.infer(blob)[output_node]
+    outputs = np.array([cv2.transpose(outputs[0])])
+    rows = outputs.shape[1]
+
+    boxes = []
+    scores = []
+    class_ids = []
+
+    for i in range(rows):
+        classes_scores = outputs[0][i][4:]
+        (minScore, maxScore, minClassLoc, (x, maxClassIndex)) = cv2.minMaxLoc(classes_scores)
+        if maxScore >= 0.25:
+            box = [outputs[0][i][0] - (0.5 * outputs[0][i][2]), outputs[0][i][1] - (0.5 * outputs[0][i][3]),
+                   outputs[0][i][2], outputs[0][i][3]]
+            boxes.append(box)
+            scores.append(maxScore)
+            class_ids.append(maxClassIndex)
+
+    result_boxes = cv2.dnn.NMSBoxes(boxes, scores, 0.25, 0.45, 0.5)
+
+    for i in range(len(result_boxes)):
+        index = result_boxes[i]
+        box = boxes[index]
+        draw_bounding_box(frame, class_ids[index], scores[index], round(box[0] * scale), round(box[1] * scale),
+                          round((box[0] + box[2]) * scale), round((box[1] + box[3]) * scale))
+    end = time.time()
+    print("start", start)
+    print("end", end)
+    print("time",end - start)
+    # show FPS
+    fps = (1 / (end - start))
+    fps_label = "%.2f FPS" % fps
+    cv2.putText(frame, fps_label, (10, 25), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
+    cv2.imshow('YOLOv8 OpenVINO Infer Demo on AIxBoard', frame)
+    # wait key for ending
+    if cv2.waitKey(1) > -1:
+        cap.release()
+        cv2.destroyAllWindows()
+        break
diff --git a/openvino/readme b/openvino/readme
new file mode 100644
index 0000000..68a1c83
--- /dev/null
+++ b/openvino/readme
@@ -0,0 +1 @@
+pyinstaller .\herb_ai.py --collect-all openvino
\ No newline at end of file
diff --git a/openvino/safety_detect.py b/openvino/safety_detect.py
new file mode 100644
index 0000000..f4d6e6d
--- /dev/null
+++ b/openvino/safety_detect.py
@@ -0,0 +1,103 @@
+import cv2
+import numpy as np
+import yaml
+from openvino.runtime import Core
+
+
+class SAFETY_DETECT:
+
+    def __init__(self, path, conf_thres=0.35, iou_thres=0.5):
+        self.conf_threshold = conf_thres
+        self.iou_threshold = iou_thres
+
+        # Initialize model
+        self.initialize_model(path)
+        self.color_palette = [(np.random.randint(0, 255), np.random.randint(0, 255), np.random.randint(0, 255)) for _ in
+                              range(100)]
+
+    def __call__(self, image):
+        return self.detect_objects(image)
+
+    def read_config(self, path):
+        file_path = path+'/metadata.yaml'
+        with open(file_path, 'r', encoding="utf-8") as file:
+            config = yaml.safe_load(file)
+        return config
+
+    def initialize_model(self, path):
+        model_path = path + '/best.xml'
+        # Initialize OpenVINO Runtime
+        self.core = Core()
+        # Load the model
+        self.model = self.core.read_model(model=model_path)
+        # Compile the model
+        self.compiled_model = self.core.compile_model(model=self.model, device_name="CPU")
+        # Get input and output layers
+        self.input_layer = self.compiled_model.input(0)
+        self.output_layer = self.compiled_model.output(0)
+        # Get class names
+        self.class_names = CLASSES = self.read_config(path)['names']
+
+    def detect_objects(self, frame):
+        # 鑾峰緱妯″瀷杈撳嚭鑺傜偣
+
+        ir = self.compiled_model.create_infer_request()
+        [height, width, _] = frame.shape
+        length = max((height, width))
+        image = np.zeros((length, length, 3), np.uint8)
+        image[0:height, 0:width] = frame
+        self.scale = length / 640
+
+        blob = cv2.dnn.blobFromImage(image, scalefactor=1 / 255, size=(640, 640), swapRB=True)
+        outputs = ir.infer(blob)[self.output_layer]
+        outputs = np.array([cv2.transpose(outputs[0])])
+        rows = outputs.shape[1]
+
+        boxes = []
+        scores = []
+        class_ids = []
+
+        for i in range(rows):
+            classes_scores = outputs[0][i][4:]
+            (minScore, maxScore, minClassLoc, (x, maxClassIndex)) = cv2.minMaxLoc(classes_scores)
+            if maxScore >= 0.25:
+                box = [outputs[0][i][0] - (0.5 * outputs[0][i][2]), outputs[0][i][1] - (0.5 * outputs[0][i][3]),
+                       outputs[0][i][2], outputs[0][i][3]]
+                boxes.append(box)
+                scores.append(maxScore)
+                class_ids.append(maxClassIndex)
+
+
+
+        return boxes, scores, class_ids
+
+    def draw_bounding_box(self, img, class_id, confidence, x, y, x_plus_w, y_plus_h):
+
+        label = f'{self.class_names[class_id]} ({confidence:.2f})'
+        color = self.color_palette[class_id]
+        cv2.rectangle(img, (x, y), (x_plus_w, y_plus_h), color, 2)
+        cv2.putText(img, label, (x - 10, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
+
+    def draw_detections(self, frame, class_ids, scores, boxes):
+        result_boxes = cv2.dnn.NMSBoxes(boxes, scores, 0.25, 0.45, 0.5)
+
+        for i in range(len(result_boxes)):
+            index = result_boxes[i]
+            box = boxes[index]
+            self.draw_bounding_box(frame, class_ids[index], scores[index], round(box[0] * self.scale), round(box[1] * self.scale),
+                              round((box[0] + box[2]) * self.scale), round((box[1] + box[3]) * self.scale))
+        return frame
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/openvino/test.py b/openvino/test.py
new file mode 100644
index 0000000..03f57a7
--- /dev/null
+++ b/openvino/test.py
@@ -0,0 +1,129 @@
+import time
+
+import cv2
+import numpy as np
+from openvino.runtime import Core
+
+
+
+def preprocess_image(image, input_size=(640, 640)):
+
+    img = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)  # 杞崲涓� RGB
+    img_resized = cv2.resize(img, input_size)  # 璋冩暣澶у皬
+    img_normalized = img_resized / 255.0  # 褰掍竴鍖�
+    img_transposed = np.transpose(img_normalized, (2, 0, 1))  # HWC -> CHW
+    img_batch = np.expand_dims(img_transposed, axis=0).astype(np.float32)  # 澧炲姞 batch 缁村害
+    return img_batch, img_resized
+
+def postprocess(output, conf_threshold=0.5, iou_threshold=0.4):
+    # 瑙f瀽杈撳嚭
+    boxes = []
+    scores = []
+    class_ids = []
+
+    for detection in output[0]:
+        confidence = detection[4]
+        if confidence > conf_threshold:
+            class_id = np.argmax(detection[5:])
+            cx, cy, w, h = detection[:4]  # 涓績鐐瑰拰瀹介珮
+            x_min = int((cx - w / 2) * original_image.shape[1])  # 杞崲鍥炲師濮嬪浘鍍忓昂瀵�
+            y_min = int((cy - h / 2) * original_image.shape[0])
+            width = int(w * original_image.shape[1])
+            height = int(h * original_image.shape[0])
+
+            boxes.append([x_min, y_min, width, height])
+            scores.append(confidence)
+            class_ids.append(class_id)
+
+    # 闈炴瀬澶у�兼姂鍒� (NMS)
+    indices = cv2.dnn.NMSBoxes(boxes, scores, conf_threshold, iou_threshold)
+
+    final_boxes = []
+    for idx in indices:
+        box = boxes[idx]
+        final_boxes.append({
+            "box": box,
+            "score": scores[idx],
+            "class_id": class_ids[idx]
+        })
+
+    return final_boxes
+
+
+def draw_detections(image, detections):
+    for det in detections:
+        x_min, y_min, width, height = det["box"]
+        class_id = det["class_id"]
+        score = det["score"]
+
+        # 纭繚鍧愭爣鍜屽昂瀵告槸鏁存暟
+        x_min = int(x_min)
+        y_min = int(y_min)
+        width = int(width)
+        height = int(height)
+
+        # 缁樺埗杈圭晫妗�
+        cv2.rectangle(image, (x_min, y_min), (x_min + width, y_min + height), (0, 255, 0), 2)
+
+        # 鏄剧ず绫诲埆鍜岀疆淇″害
+        label = f"Class {class_id}: {score:.2f}"
+        cv2.putText(image, label, (x_min, y_min - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 0), 2)
+
+    return image
+
+
+if __name__ == "__main__":
+    # 鍒濆鍖� OpenVINO Runtime
+    core = Core()
+
+    # 鍔犺浇妯″瀷
+    model_path = "model/safe_det/best.xml"  # 鏇挎崲涓轰綘鐨勬ā鍨嬭矾寰�
+    model = core.read_model(model=model_path)
+    compiled_model = core.compile_model(model=model, device_name="CPU")  # 璁惧鍙互鏄� "CPU", "GPU", "MYRIAD" 绛�
+
+    # 鑾峰彇杈撳叆鍜岃緭鍑哄眰
+    input_layer = compiled_model.input(0)
+    output_layer = compiled_model.output(0)
+
+    # 鎵撳嵃杈撳叆杈撳嚭淇℃伅
+    print(f"Input shape: {input_layer.shape}")
+    print(f"Output shape: {output_layer.shape}")
+
+    # 棰勫鐞嗗浘鍍忥紝鏀逛负浠庢憚鍍忓ご鑾峰彇
+    cap = cv2.VideoCapture(0)
+
+    # 鍒濆鍖栧彉閲�
+    start_time = time.time()  # 璁板綍寮�濮嬫椂闂�
+    frame_count = 0  # 甯ц鏁板櫒
+
+    while True:
+        ret, frame = cap.read()
+        if not ret:
+            break
+
+        input_data, original_image = preprocess_image(frame)
+
+        # 鎺ㄧ悊
+        results = compiled_model([input_data])[output_layer]
+        # 杈撳嚭缁撴灉
+        print(results)
+        detections = postprocess(results)
+        # 鎵撳嵃妫�娴嬬粨鏋�
+        for det in detections:
+            print(det)
+        draw_img = draw_detections(frame, detections)
+
+        # 璁$畻FPS
+        frame_count += 1
+        elapsed_time = time.time() - start_time
+
+        fps = frame_count / elapsed_time
+        # start_time = time.time()  # 閲嶇疆寮�濮嬫椂闂�
+        # frame_count = 0  # 閲嶇疆甯ц鏁板櫒
+
+        # 鏄剧ずFPS
+        cv2.putText(draw_img, f"FPS: {fps:.2f}", (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
+
+        cv2.imshow("Detections", draw_img)
+        if cv2.waitKey(1) & 0xFF == ord('q'):
+            break

--
Gitblit v1.9.3