From 6628f663b636675bcaea316f2deaddf337de480e Mon Sep 17 00:00:00 2001
From: baoshiwei <baoshiwei@shlanbao.cn>
Date: 星期五, 13 三月 2026 10:23:31 +0800
Subject: [PATCH] feat(米重分析): 新增稳态识别和预测功能页面并优化现有模型

---
 app/pages/metered_weight_deep_learning.py |  832 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 files changed, 832 insertions(+), 0 deletions(-)

diff --git a/app/pages/metered_weight_deep_learning.py b/app/pages/metered_weight_deep_learning.py
new file mode 100644
index 0000000..e9eef5b
--- /dev/null
+++ b/app/pages/metered_weight_deep_learning.py
@@ -0,0 +1,832 @@
+import streamlit as st
+import plotly.express as px
+import plotly.graph_objects as go
+import pandas as pd
+import numpy as np
+import joblib
+import os
+from datetime import datetime, timedelta
+from app.services.extruder_service import ExtruderService
+from app.services.main_process_service import MainProcessService
+from sklearn.preprocessing import StandardScaler, MinMaxScaler
+from sklearn.model_selection import train_test_split
+from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
+
+# 瀵煎叆绋虫�佽瘑鍒姛鑳�
+class SteadyStateDetector:
+    def __init__(self):
+        pass
+    
+    def detect_steady_state(self, df, weight_col='绫抽噸', window_size=20, std_threshold=0.5, duration_threshold=60):
+        """
+        绋虫�佽瘑鍒�昏緫锛氭爣璁扮背閲嶆暟鎹腑鐨勭ǔ鎬佹
+        :param df: 鍖呭惈绫抽噸鏁版嵁鐨勬暟鎹
+        :param weight_col: 绫抽噸鍒楀悕
+        :param window_size: 婊戝姩绐楀彛澶у皬锛堢锛�
+        :param std_threshold: 鏍囧噯宸槇鍊�
+        :param duration_threshold: 绋虫�佹寔缁椂闂撮槇鍊硷紙绉掞級
+        :param trend_threshold: 瓒嬪娍闃堝�硷紙缁濆鍊硷級
+        :return: 鍖呭惈绋虫�佹爣璁扮殑鏁版嵁妗嗗拰绋虫�佷俊鎭�
+        """
+        if df is None or df.empty:
+            return df, []
+        
+        # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+        df['time'] = pd.to_datetime(df['time'])
+        
+        # 璁$畻婊氬姩缁熻閲�
+        df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std()
+        df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean()
+        
+        # 璁$畻娉㈠姩鑼冨洿
+        df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100
+        df['fluctuation_range'] = df['fluctuation_range'].fillna(0)
+        
+        # 璁$畻瓒嬪娍
+        # df['trend'] = df[weight_col].diff().rolling(window=window_size, min_periods=5).mean()
+        # df['trend'] = df['trend'].fillna(0)
+        # df['trend_strength'] = (abs(df['trend']) / df['rolling_mean']) * 100
+        # df['trend_strength'] = df['trend_strength'].fillna(0)
+        
+        # 鏍囪绋虫�佺偣
+        df['is_steady'] = 0
+        steady_condition = (
+            (df['fluctuation_range'] < std_threshold) & 
+            (df[weight_col] >= 0.1) 
+        )
+        df.loc[steady_condition, 'is_steady'] = 1
+        
+        # 璇嗗埆杩炵画绋虫�佹
+        steady_segments = []
+        current_segment = {}
+        
+        for i, row in df.iterrows():
+            if row['is_steady'] == 1:
+                if not current_segment:
+                    current_segment = {
+                        'start_time': row['time'],
+                        'start_idx': i,
+                        'weights': [row[weight_col]]
+                    }
+                else:
+                    current_segment['weights'].append(row[weight_col])
+            else:
+                if current_segment:
+                    current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time']
+                    current_segment['end_idx'] = i-1
+                    duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+                    
+                    if duration >= duration_threshold:
+                        weights_array = np.array(current_segment['weights'])
+                        current_segment['duration'] = duration
+                        current_segment['mean_weight'] = np.mean(weights_array)
+                        current_segment['std_weight'] = np.std(weights_array)
+                        current_segment['min_weight'] = np.min(weights_array)
+                        current_segment['max_weight'] = np.max(weights_array)
+                        current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+                        
+                        # 璁$畻缃俊搴�
+                        confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+                        confidence = max(50, min(100, confidence))
+                        current_segment['confidence'] = confidence
+                        
+                        steady_segments.append(current_segment)
+                    
+                    current_segment = {}
+        
+        # 澶勭悊鏈�鍚庝竴涓ǔ鎬佹
+        if current_segment:
+            current_segment['end_time'] = df['time'].iloc[-1]
+            current_segment['end_idx'] = len(df) - 1
+            duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+            
+            if duration >= duration_threshold:
+                weights_array = np.array(current_segment['weights'])
+                current_segment['duration'] = duration
+                current_segment['mean_weight'] = np.mean(weights_array)
+                current_segment['std_weight'] = np.std(weights_array)
+                current_segment['min_weight'] = np.min(weights_array)
+                current_segment['max_weight'] = np.max(weights_array)
+                current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+                
+                confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+                confidence = max(50, min(100, confidence))
+                current_segment['confidence'] = confidence
+                
+                steady_segments.append(current_segment)
+        
+        # 鍦ㄦ暟鎹涓爣璁板畬鏁寸殑绋虫�佹
+        for segment in steady_segments:
+            df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1
+        
+        return df, steady_segments
+
+# 灏濊瘯瀵煎叆娣卞害瀛︿範搴�
+use_deep_learning = False
+try:
+    import torch
+    import torch.nn as nn
+    import torch.optim as optim
+    use_deep_learning = True
+    # 妫�娴婫PU鏄惁鍙敤
+    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+    print(f"浣跨敤璁惧: {device}")
+    
+    # PyTorch娣卞害瀛︿範妯″瀷瀹氫箟
+    class LSTMModel(nn.Module):
+        def __init__(self, input_dim, hidden_dim=64, num_layers=2):
+            super(LSTMModel, self).__init__()
+            self.lstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True)
+            self.fc1 = nn.Linear(hidden_dim, 32)
+            self.dropout = nn.Dropout(0.2)
+            self.fc2 = nn.Linear(32, 1)
+        
+        def forward(self, x):
+            out, _ = self.lstm(x)
+            out = out[:, -1, :]
+            out = torch.relu(self.fc1(out))
+            out = self.dropout(out)
+            out = self.fc2(out)
+            return out
+
+    class GRUModel(nn.Module):
+        def __init__(self, input_dim, hidden_dim=64, num_layers=2):
+            super(GRUModel, self).__init__()
+            self.gru = nn.GRU(input_dim, hidden_dim, num_layers, batch_first=True)
+            self.fc1 = nn.Linear(hidden_dim, 32)
+            self.dropout = nn.Dropout(0.2)
+            self.fc2 = nn.Linear(32, 1)
+        
+        def forward(self, x):
+            out, _ = self.gru(x)
+            out = out[:, -1, :]
+            out = torch.relu(self.fc1(out))
+            out = self.dropout(out)
+            out = self.fc2(out)
+            return out
+
+    class BiLSTMModel(nn.Module):
+        def __init__(self, input_dim, hidden_dim=64, num_layers=2):
+            super(BiLSTMModel, self).__init__()
+            self.bilstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True, bidirectional=True)
+            self.fc1 = nn.Linear(hidden_dim * 2, 32)
+            self.dropout = nn.Dropout(0.2)
+            self.fc2 = nn.Linear(32, 1)
+        
+        def forward(self, x):
+            out, _ = self.bilstm(x)
+            out = out[:, -1, :]
+            out = torch.relu(self.fc1(out))
+            out = self.dropout(out)
+            out = self.fc2(out)
+            return out
+    
+    st.success(f"浣跨敤璁惧: {device}")
+except ImportError:
+    st.warning("鏈娴嬪埌PyTorch锛屾繁搴﹀涔犳ā鍨嬪皢涓嶅彲鐢ㄣ�傝瀹夎pytorch浠ヤ娇鐢↙STM/GRU妯″瀷銆�")
+
+def show_metered_weight_deep_learning():
+    # 鍒濆鍖栨湇鍔�
+    extruder_service = ExtruderService()
+    main_process_service = MainProcessService()
+
+    # 椤甸潰鏍囬
+    st.title("绫抽噸娣卞害瀛︿範棰勬祴")
+
+    # 鍒濆鍖栦細璇濈姸鎬�
+    if 'mdl_start_date' not in st.session_state:
+        st.session_state['mdl_start_date'] = datetime.now().date() - timedelta(days=7)
+    if 'mdl_end_date' not in st.session_state:
+        st.session_state['mdl_end_date'] = datetime.now().date()
+    if 'mdl_quick_select' not in st.session_state:
+        st.session_state['mdl_quick_select'] = "鏈�杩�7澶�"
+    if 'mdl_model_type' not in st.session_state:
+        st.session_state['mdl_model_type'] = 'LSTM'
+    if 'mdl_sequence_length' not in st.session_state:
+        st.session_state['mdl_sequence_length'] = 10
+    if 'mdl_time_offset' not in st.session_state:
+        st.session_state['mdl_time_offset'] = 0
+    if 'mdl_product_variety' not in st.session_state:
+        st.session_state['mdl_product_variety'] = 'all'
+    if 'mdl_filter_transient' not in st.session_state:
+        st.session_state['mdl_filter_transient'] = True
+    
+    # 榛樿鐗瑰緛鍒楄〃
+    default_features = ['铻烘潌杞��', '鏈哄ご鍘嬪姏', '娴佺▼涓婚��', '铻烘潌娓╁害', 
+                       '鍚庢満绛掓俯搴�', '鍓嶆満绛掓俯搴�', '鏈哄ご娓╁害']
+
+    # 瀹氫箟鍥炶皟鍑芥暟
+    def update_dates(qs):
+        st.session_state['mdl_quick_select'] = qs
+        today = datetime.now().date()
+        if qs == "浠婂ぉ":
+            st.session_state['mdl_start_date'] = today
+            st.session_state['mdl_end_date'] = today
+        elif qs == "鏈�杩�3澶�":
+            st.session_state['mdl_start_date'] = today - timedelta(days=3)
+            st.session_state['mdl_end_date'] = today
+        elif qs == "鏈�杩�7澶�":
+            st.session_state['mdl_start_date'] = today - timedelta(days=7)
+            st.session_state['mdl_end_date'] = today
+        elif qs == "鏈�杩�30澶�":
+            st.session_state['mdl_start_date'] = today - timedelta(days=30)
+            st.session_state['mdl_end_date'] = today
+
+    def on_date_change():
+        st.session_state['mdl_quick_select'] = "鑷畾涔�"
+
+    # 鏌ヨ鏉′欢鍖哄煙
+    with st.expander("馃攳 鏌ヨ閰嶇疆", expanded=True):
+        # 娣诲姞鑷畾涔� CSS 瀹炵幇鍝嶅簲寮忔崲琛�
+        st.markdown("""
+            <style>
+            /* 寮哄埗鍒楀鍣ㄦ崲琛� */
+            [data-testid="stExpander"] [data-testid="column"] {
+                flex: 1 1 120px !important;
+                min-width: 120px !important;
+            }
+            /* 閽堝鏃ユ湡杈撳叆妗嗗垪绋嶅井鍔犲涓�鐐� */
+            @media (min-width: 768px) {
+                [data-testid="stExpander"] [data-testid="column"]:nth-child(6),
+                [data-testid="stExpander"] [data-testid="column"]:nth-child(7) {
+                    flex: 2 1 180px !important;
+                    min-width: 180px !important;
+                }
+            }
+            </style>
+            """, unsafe_allow_html=True)
+
+        # 鍒涘缓甯冨眬
+        cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1])
+
+        options = ["浠婂ぉ", "鏈�杩�3澶�", "鏈�杩�7澶�", "鏈�杩�30澶�", "鑷畾涔�"]
+        for i, option in enumerate(options):
+            with cols[i]:
+                # 鏍规嵁褰撳墠閫夋嫨鐘舵�佸喅瀹氭寜閽被鍨�
+                button_type = "primary" if st.session_state['mdl_quick_select'] == option else "secondary"
+                if st.button(option, key=f"btn_mdl_{option}", width='stretch', type=button_type):
+                    update_dates(option)
+                    st.rerun()
+
+        with cols[5]:
+            start_date = st.date_input(
+                "寮�濮嬫棩鏈�",
+                label_visibility="collapsed",
+                key="mdl_start_date",
+                on_change=on_date_change
+            )
+
+        with cols[6]:
+            end_date = st.date_input(
+                "缁撴潫鏃ユ湡",
+                label_visibility="collapsed",
+                key="mdl_end_date",
+                on_change=on_date_change
+            )
+
+        with cols[7]:
+            query_button = st.button("馃殌 寮�濮嬪垎鏋�", key="mdl_query", width='stretch')
+
+        # 楂樼骇閰嶇疆
+        st.markdown("---")
+        advanced_cols = st.columns(2)
+        
+        with advanced_cols[0]:
+            st.write("馃 **妯″瀷閰嶇疆**")
+            # 妯″瀷绫诲瀷閫夋嫨
+            if use_deep_learning:
+                model_options = ['LSTM', 'GRU', 'BiLSTM']
+                model_type = st.selectbox(
+                    "妯″瀷绫诲瀷",
+                    options=model_options,
+                    key="mdl_model_type",
+                    help="閫夋嫨鐢ㄤ簬棰勬祴鐨勬繁搴﹀涔犳ā鍨嬬被鍨�"
+                )
+                
+                # 搴忓垪闀垮害
+                sequence_length = st.slider(
+                    "搴忓垪闀垮害",
+                    min_value=5,
+                    max_value=30,
+                    value=st.session_state['mdl_sequence_length'],
+                    step=1,
+                    help="鐢ㄤ簬娣卞害瀛︿範妯″瀷鐨勬椂闂村簭鍒楅暱搴�",
+                    key="mdl_sequence_length"
+                )
+            else:
+                st.warning("鏈娴嬪埌PyTorch锛屾棤娉曚娇鐢ㄦ繁搴﹀涔犳ā鍨�")
+        
+        with advanced_cols[1]:
+            st.write("鈴憋笍 **鏃堕棿寤惰繜閰嶇疆**")
+            # 鍔ㄦ�佹椂闂村亸绉伙紙鍩轰簬娴佺▼涓婚�燂級
+            time_offset = st.slider(
+                "鎸ゅ嚭鏁版嵁鍚戝悗鍋忕Щ (鍒嗛挓)",
+                min_value=0,
+                max_value=60,
+                value=st.session_state['mdl_time_offset'],
+                step=1,
+                help="鐢变簬鑳庨潰浠庢尋鍑哄埌绉伴噸闇�瑕佹椂闂达紝灏嗘尋鍑烘満鏁版嵁鍚戝悗绉诲姩锛屼娇鍏朵笌绫抽噸鏁版嵁鍦ㄦ椂闂磋酱涓婂榻愩�傚亸绉婚噺浼氬奖鍝嶉娴嬪噯纭�с��",
+                key="mdl_time_offset"
+            )
+        
+        # 绋虫�佽瘑鍒厤缃�
+        st.markdown("---")
+        steady_cols = st.columns(3)
+        with steady_cols[0]:
+            st.write("鈿栵笍 **绋虫�佽瘑鍒厤缃�**")
+            use_steady_data = st.checkbox(
+                "浠呬娇鐢ㄧǔ鎬佹暟鎹繘琛岃缁�",
+                value=True,
+                key="mdl_use_steady_data",
+                help="鍚敤鍚庯紝鍙娇鐢ㄧ背閲嶇ǔ鎬佹椂娈电殑鏁版嵁杩涜妯″瀷璁粌鍜岄娴�"
+            )
+        
+        with steady_cols[1]:
+            st.write("馃搹 **绋虫�佸弬鏁�**")
+            steady_window = st.slider(
+                "婊戝姩绐楀彛澶у皬 (绉�)",
+                min_value=5,
+                max_value=60,
+                value=20,
+                step=5,
+                key="mdl_steady_window",
+                help="鐢ㄤ簬绋虫�佽瘑鍒殑婊戝姩绐楀彛澶у皬"
+            )
+        
+        with steady_cols[2]:
+            st.write("馃搳 **绋虫�侀槇鍊�**")
+            steady_threshold = st.slider(
+                "娉㈠姩闃堝�� (%)",
+                min_value=0.1,
+                max_value=2.0,
+                value=0.5,
+                step=0.1,
+                key="mdl_steady_threshold",
+                help="绋虫�佽瘑鍒殑娉㈠姩鑼冨洿闃堝��"
+            )
+        
+        
+
+    # 杞崲涓篸atetime瀵硅薄
+    start_dt = datetime.combine(start_date, datetime.min.time())
+    end_dt = datetime.combine(end_date, datetime.max.time())
+
+    # 鏌ヨ澶勭悊
+    if query_button:
+        with st.spinner("姝e湪鑾峰彇鏁版嵁..."):
+            # 1. 鑾峰彇瀹屾暣鐨勬尋鍑烘満鏁版嵁
+            df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt)
+
+            # 2. 鑾峰彇涓绘祦绋嬫帶鍒舵暟鎹�
+            df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt)
+
+            df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt)
+
+            # 妫�鏌ユ槸鍚︽湁鏁版嵁
+            has_data = any([
+                df_extruder_full is not None and not df_extruder_full.empty,
+                df_main_speed is not None and not df_main_speed.empty,
+                df_temp is not None and not df_temp.empty
+            ])
+
+            if not has_data:
+                st.warning("鎵�閫夋椂闂存鍐呮湭鎵惧埌浠讳綍鏁版嵁锛岃灏濊瘯璋冩暣鏌ヨ鏉′欢銆�")
+                return
+
+            # 缂撳瓨鏁版嵁鍒颁細璇濈姸鎬�
+            st.session_state['cached_extruder_full'] = df_extruder_full
+            st.session_state['cached_main_speed'] = df_main_speed
+            st.session_state['cached_temp'] = df_temp
+            st.session_state['last_query_start'] = start_dt
+            st.session_state['last_query_end'] = end_dt
+
+    # 鏁版嵁澶勭悊鍜屽垎鏋�
+    if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']):
+        with st.spinner("姝e湪鍒嗘瀽鏁版嵁..."):
+            # 鑾峰彇缂撳瓨鏁版嵁
+            df_extruder_full = st.session_state['cached_extruder_full']
+            df_main_speed = st.session_state['cached_main_speed']
+            df_temp = st.session_state['cached_temp']
+
+            # 妫�鏌ユ槸鍚︽湁鏁版嵁
+            has_data = any([
+                df_extruder_full is not None and not df_extruder_full.empty,
+                df_main_speed is not None and not df_main_speed.empty,
+                df_temp is not None and not df_temp.empty
+            ])
+
+            if not has_data:
+                st.warning("鎵�閫夋椂闂存鍐呮湭鎵惧埌浠讳綍鏁版嵁锛岃灏濊瘯璋冩暣鏌ヨ鏉′欢銆�")
+                return
+
+            # 鏁版嵁鏁村悎涓庨澶勭悊
+            def integrate_data(df_extruder_full, df_main_speed, df_temp, time_offset):
+                # 纭繚鎸ゅ嚭鏈烘暟鎹瓨鍦�
+                if df_extruder_full is None or df_extruder_full.empty:
+                    return None
+
+                # 搴旂敤鏃堕棿鍋忕Щ
+                offset_delta = timedelta(minutes=time_offset)
+                df_extruder_shifted = df_extruder_full.copy()
+                df_extruder_shifted['time'] = df_extruder_shifted['time'] + offset_delta
+                
+                # 鍒涘缓鍙寘鍚背閲嶅拰鏃堕棿鐨勪富鏁版嵁闆�
+                df_merged = df_extruder_shifted[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy()
+
+                # 鏁村悎涓绘祦绋嬫暟鎹�
+                if df_main_speed is not None and not df_main_speed.empty:
+                    df_main_speed_shifted = df_main_speed.copy()
+                    df_main_speed_shifted['time'] = df_main_speed_shifted['time'] + offset_delta
+                    
+                    df_main_speed_shifted = df_main_speed_shifted[['time', 'process_main_speed']]
+                    df_merged = pd.merge_asof(
+                        df_merged.sort_values('time'),
+                        df_main_speed_shifted.sort_values('time'),
+                        on='time',
+                        direction='nearest',
+                        tolerance=pd.Timedelta('1min')
+                    )
+
+                # 鏁村悎娓╁害鏁版嵁
+                if df_temp is not None and not df_temp.empty:
+                    df_temp_shifted = df_temp.copy()
+                    df_temp_shifted['time'] = df_temp_shifted['time'] + offset_delta
+                    
+                    temp_cols = ['time', 'nakata_extruder_screw_display_temp',
+                               'nakata_extruder_rear_barrel_display_temp',
+                               'nakata_extruder_front_barrel_display_temp',
+                               'nakata_extruder_head_display_temp']
+                    df_temp_subset = df_temp_shifted[temp_cols].copy()
+                    df_merged = pd.merge_asof(
+                        df_merged.sort_values('time'),
+                        df_temp_subset.sort_values('time'),
+                        on='time',
+                        direction='nearest',
+                        tolerance=pd.Timedelta('1min')
+                    )
+
+                # 閲嶅懡鍚嶅垪浠ユ彁楂樺彲璇绘��
+                df_merged.rename(columns={
+                    'screw_speed_actual': '铻烘潌杞��',
+                    'head_pressure': '鏈哄ご鍘嬪姏',
+                    'process_main_speed': '娴佺▼涓婚��',
+                    'nakata_extruder_screw_display_temp': '铻烘潌娓╁害',
+                    'nakata_extruder_rear_barrel_display_temp': '鍚庢満绛掓俯搴�',
+                    'nakata_extruder_front_barrel_display_temp': '鍓嶆満绛掓俯搴�',
+                    'nakata_extruder_head_display_temp': '鏈哄ご娓╁害'
+                }, inplace=True)
+
+                # 娓呯悊鏁版嵁
+                df_merged.dropna(subset=['metered_weight'], inplace=True)
+
+                return df_merged
+
+            # 鎵ц鏁版嵁鏁村悎
+            df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp, st.session_state['mdl_time_offset'])
+
+            if df_analysis is None or df_analysis.empty:
+                st.warning("鏁版嵁鏁村悎澶辫触锛岃妫�鏌ユ暟鎹川閲忔垨璋冩暣鏃堕棿鑼冨洿銆�")
+                return
+
+            # 閲嶅懡鍚嶇背閲嶅垪
+            df_analysis.rename(columns={'metered_weight': '绫抽噸'}, inplace=True)
+            
+            # 绋虫�佽瘑鍒�
+            steady_detector = SteadyStateDetector()
+            
+            # 鑾峰彇绋虫�佽瘑鍒弬鏁�
+            use_steady_data = st.session_state.get('mdl_use_steady_data', True)
+            steady_window = st.session_state.get('mdl_steady_window', 20)
+            steady_threshold = st.session_state.get('mdl_steady_threshold', 0.5)
+            
+            # 鎵ц绋虫�佽瘑鍒�
+            df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state(
+                df_analysis, 
+                weight_col='绫抽噸',
+                window_size=steady_window,
+                std_threshold=steady_threshold
+            )
+            
+            # 鏇存柊df_analysis涓哄寘鍚ǔ鎬佹爣璁扮殑鏁版嵁
+            df_analysis = df_analysis_with_steady
+            
+          
+                
+            # 楂樼骇棰勬祴鍒嗘瀽
+            st.subheader("馃搳 娣卞害瀛︿範棰勬祴鍒嗘瀽")
+
+            if use_deep_learning:
+                # 妫�鏌ユ墍鏈夐粯璁ょ壒寰佹槸鍚﹀湪鏁版嵁涓�
+                missing_features = [f for f in default_features if f not in df_analysis.columns]
+                if missing_features:
+                    st.warning(f"鏁版嵁涓己灏戜互涓嬬壒寰�: {', '.join(missing_features)}")
+                else:
+                    # 鍑嗗鏁版嵁
+                    required_cols = default_features + ['绫抽噸', 'is_steady']
+                    combined = df_analysis[required_cols].copy()
+                    
+                    # 濡傛灉鍚敤浜嗙ǔ鎬佹暟鎹紝杩囨护鎺夐潪绋虫�佹暟鎹�
+                    use_steady_data = st.session_state.get('mdl_use_steady_data', True)
+                    if use_steady_data:
+                        combined = combined[combined['is_steady'] == 1]
+                        st.info(f"宸茶繃婊ら潪绋虫�佹暟鎹紝浣跨敤 {len(combined)} 鏉$ǔ鎬佹暟鎹繘琛岃缁�")
+                    
+                    # 娓呯悊鏁版嵁涓殑NaN鍊�
+                    combined_clean = combined.dropna()
+                    
+                    # 妫�鏌ユ竻鐞嗗悗鐨勬暟鎹噺
+                    if len(combined_clean) < 30:
+                        st.warning("鏁版嵁閲忎笉瓒筹紝鏃犳硶杩涜鏈夋晥鐨勯娴嬪垎鏋�")
+                        if use_steady_data:
+                            st.info("寤鸿锛氬皾璇曡皟鏁寸ǔ鎬佽瘑鍒弬鏁版垨绂佺敤'浠呬娇鐢ㄧǔ鎬佹暟鎹�'閫夐」")
+                    else:
+                        # 鏄剧ず绋虫�佺粺璁�
+                        total_data = len(df_analysis)
+                        steady_data = len(combined_clean)
+                        steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0
+                        
+                        metrics_cols = st.columns(3)
+                        with metrics_cols[0]:
+                            st.metric("鎬绘暟鎹噺", total_data)
+                        with metrics_cols[1]:
+                            st.metric("绋虫�佹暟鎹噺", steady_data)
+                        with metrics_cols[2]:
+                            st.metric("绋虫�佹暟鎹瘮渚�", f"{steady_ratio:.1f}%")
+                        
+                        # 绋虫�佹暟鎹彲瑙嗗寲
+                        st.markdown("---")
+                        st.subheader("馃搱 绋虫�佹暟鎹垎甯�")
+                        
+                        # 鍒涘缓绋虫�佹暟鎹彲瑙嗗寲鍥捐〃
+                        fig_steady = go.Figure()
+                        
+                        # 娣诲姞鍘熷绫抽噸鏇茬嚎
+                        fig_steady.add_trace(go.Scatter(
+                            x=df_analysis['time'],
+                            y=df_analysis['绫抽噸'],
+                            name='鍘熷绫抽噸',
+                            mode='lines',
+                            line=dict(color='lightgray', width=1)
+                        ))
+                        
+                        # 娣诲姞绋虫�佹暟鎹偣
+                        steady_data_points = df_analysis[df_analysis['is_steady'] == 1]
+                        fig_steady.add_trace(go.Scatter(
+                            x=steady_data_points['time'],
+                            y=steady_data_points['绫抽噸'],
+                            name='绋虫�佺背閲�',
+                            mode='markers',
+                            marker=dict(color='green', size=3, opacity=0.6)
+                        ))
+                        
+                        # 娣诲姞闈炵ǔ鎬佹暟鎹偣
+                        non_steady_data_points = df_analysis[df_analysis['is_steady'] == 0]
+                        fig_steady.add_trace(go.Scatter(
+                            x=non_steady_data_points['time'],
+                            y=non_steady_data_points['绫抽噸'],
+                            name='闈炵ǔ鎬佺背閲�',
+                            mode='markers',
+                            marker=dict(color='red', size=3, opacity=0.6)
+                        ))
+                        
+                        # 閰嶇疆鍥捐〃甯冨眬
+                        fig_steady.update_layout(
+                            title="绫抽噸鏁版嵁绋虫�佸垎甯�",
+                            xaxis=dict(title="鏃堕棿"),
+                            yaxis=dict(title="绫抽噸 (Kg/m)"),
+                            legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1),
+                            height=500
+                        )
+                        
+                        # 鏄剧ず鍥捐〃
+                        st.plotly_chart(fig_steady, use_container_width=True)
+                        
+                        # 鍒嗙X鍜寉
+                        X_clean = combined_clean[default_features]
+                        y_clean = combined_clean['绫抽噸']
+                        
+                        # 涓烘椂闂村簭鍒楁ā鍨嬪噯澶囨暟鎹�
+                        def create_sequences(X, y, sequence_length):
+                            X_seq = []
+                            y_seq = []
+                            for i in range(len(X) - sequence_length):
+                                X_seq.append(X[i:i+sequence_length])
+                                y_seq.append(y[i+sequence_length])
+                            return np.array(X_seq), np.array(y_seq)
+                        
+                        # 鏁版嵁鏍囧噯鍖�
+                        scaler_X = StandardScaler()
+                        scaler_y = MinMaxScaler()
+                        
+                        X_scaled = scaler_X.fit_transform(X_clean)
+                        y_scaled = scaler_y.fit_transform(y_clean.values.reshape(-1, 1)).ravel()
+                        
+                        # 鍒涘缓搴忓垪鏁版嵁
+                        sequence_length = st.session_state['mdl_sequence_length']
+                        X_seq, y_seq = create_sequences(X_scaled, y_scaled, sequence_length)
+                        
+                        # 妫�鏌ュ簭鍒楁暟鎹噺
+                        if len(X_seq) < 20:
+                            st.warning("搴忓垪鏁版嵁閲忎笉瓒筹紝鏃犳硶杩涜鏈夋晥鐨勬繁搴﹀涔犺缁�")
+                        else:
+                            # 鍒嗗壊璁粌闆嗗拰娴嬭瘯闆�
+                            train_size = int(len(X_seq) * 0.8)
+                            X_train_seq, X_test_seq = X_seq[:train_size], X_seq[train_size:]
+                            y_train_seq, y_test_seq = y_seq[:train_size], y_seq[train_size:]
+                            
+                            # 杞崲涓篜yTorch寮犻噺
+                            X_train_tensor = torch.tensor(X_train_seq, dtype=torch.float32).to(device)
+                            y_train_tensor = torch.tensor(y_train_seq, dtype=torch.float32).unsqueeze(1).to(device)
+                            X_test_tensor = torch.tensor(X_test_seq, dtype=torch.float32).to(device)
+                            y_test_tensor = torch.tensor(y_test_seq, dtype=torch.float32).unsqueeze(1).to(device)
+                            
+                            # 鏋勫缓妯″瀷
+                            input_dim = X_scaled.shape[1]
+                            
+                            if st.session_state['mdl_model_type'] == 'LSTM':
+                                model = LSTMModel(input_dim).to(device)
+                            elif st.session_state['mdl_model_type'] == 'GRU':
+                                model = GRUModel(input_dim).to(device)
+                            elif st.session_state['mdl_model_type'] == 'BiLSTM':
+                                model = BiLSTMModel(input_dim).to(device)
+                            
+                            # 瀹氫箟鎹熷け鍑芥暟鍜屼紭鍖栧櫒
+                            criterion = nn.MSELoss()
+                            optimizer = optim.Adam(model.parameters(), lr=0.001)
+                            
+                            # 璁粌妯″瀷
+                            num_epochs = 50
+                            batch_size = 32
+                            
+                            # 鏄剧ず璁粌杩涘害
+                            progress_bar = st.progress(0)
+                            status_text = st.empty()
+                            
+                            for epoch in range(num_epochs):
+                                model.train()
+                                optimizer.zero_grad()
+                                
+                                # 鍓嶅悜浼犳挱
+                                outputs = model(X_train_tensor)
+                                loss = criterion(outputs, y_train_tensor)
+                                
+                                # 鍙嶅悜浼犳挱鍜屼紭鍖�
+                                loss.backward()
+                                optimizer.step()
+                                
+                                # 鏇存柊杩涘害
+                                progress_bar.progress((epoch + 1) / num_epochs)
+                                status_text.text(f"璁粌涓�: 绗� {epoch + 1}/{num_epochs} 杞�, 鎹熷け: {loss.item():.6f}")
+                            
+                            # 棰勬祴
+                            model.eval()
+                            with torch.no_grad():
+                                y_pred_scaled_tensor = model(X_test_tensor)
+                                y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel()
+                                
+                                # 鍙嶅綊涓�鍖�
+                                y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel()
+                                y_test_actual = scaler_y.inverse_transform(y_test_seq.reshape(-1, 1)).ravel()
+                            
+                            # 璁$畻璇勪及鎸囨爣
+                            r2 = r2_score(y_test_actual, y_pred)
+                            mse = mean_squared_error(y_test_actual, y_pred)
+                            mae = mean_absolute_error(y_test_actual, y_pred)
+                            rmse = np.sqrt(mse)
+
+                            # 鏄剧ず妯″瀷鎬ц兘
+                            metrics_cols = st.columns(2)
+                            with metrics_cols[0]:
+                                st.metric("R虏 寰楀垎", f"{r2:.4f}")
+                                st.metric("鍧囨柟璇樊 (MSE)", f"{mse:.6f}")
+                            with metrics_cols[1]:
+                                st.metric("骞冲潎缁濆璇樊 (MAE)", f"{mae:.6f}")
+                                st.metric("鍧囨柟鏍硅宸� (RMSE)", f"{rmse:.6f}")
+                            
+                            # 娣诲姞绋虫�佺浉鍏崇殑璇勪及璇存槑
+                            use_steady_data = st.session_state.get('mdl_use_steady_data', True)
+                            if use_steady_data:
+                                st.info("鈿狅笍 妯″瀷浠呬娇鐢ㄧǔ鎬佹暟鎹繘琛岃缁冿紝鍦ㄩ潪绋虫�佸伐鍐典笅棰勬祴缁撴灉鍙兘涓嶅噯纭�")
+                            
+                            # --- 瀹為檯鍊间笌棰勬祴鍊煎姣� ---
+
+                            # --- 瀹為檯鍊间笌棰勬祴鍊煎姣� ---
+                            st.subheader("馃攧 瀹為檯鍊间笌棰勬祴鍊煎姣�")
+
+                            # 鍒涘缓瀵规瘮鏁版嵁
+                            compare_df = pd.DataFrame({
+                                '瀹為檯鍊�': y_test_actual,
+                                '棰勬祴鍊�': y_pred
+                            })
+                            compare_df = compare_df.sort_index()
+
+                            # 鍒涘缓瀵规瘮鍥�
+                            fig_compare = go.Figure()
+                            fig_compare.add_trace(go.Scatter(
+                                x=compare_df.index,
+                                y=compare_df['瀹為檯鍊�'],
+                                name='瀹為檯鍊�',
+                                mode='lines+markers',
+                                line=dict(color='blue', width=2)
+                            ))
+                            fig_compare.add_trace(go.Scatter(
+                                x=compare_df.index,
+                                y=compare_df['棰勬祴鍊�'],
+                                name='棰勬祴鍊�',
+                                mode='lines+markers',
+                                line=dict(color='red', width=2, dash='dash')
+                            ))
+                            fig_compare.update_layout(
+                                title=f'娴嬭瘯闆�: 瀹為檯绫抽噸 vs 棰勬祴绫抽噸 ({st.session_state["mdl_model_type"]})',
+                                xaxis=dict(title='鏍锋湰绱㈠紩'),
+                                yaxis=dict(title='绫抽噸 (Kg/m)'),
+                                legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1),
+                                height=400
+                            )
+                            st.plotly_chart(fig_compare, width='stretch')
+
+                            # --- 娈嬪樊鍒嗘瀽 ---
+                            st.subheader("馃搲 娈嬪樊鍒嗘瀽")
+
+                            # 璁$畻娈嬪樊
+                            residuals = y_test_actual - y_pred
+
+                            # 鍒涘缓娈嬪樊鍥�
+                            fig_residual = go.Figure()
+                            fig_residual.add_trace(go.Scatter(
+                                x=y_pred,
+                                y=residuals,
+                                mode='markers',
+                                marker=dict(color='green', size=8, opacity=0.6)
+                            ))
+                            fig_residual.add_shape(
+                                type="line",
+                                x0=y_pred.min(),
+                                y0=0,
+                                x1=y_pred.max(),
+                                y1=0,
+                                line=dict(color="red", width=2, dash="dash")
+                            )
+                            fig_residual.update_layout(
+                                title='娈嬪樊鍥�',
+                                xaxis=dict(title='棰勬祴鍊�'),
+                                yaxis=dict(title='娈嬪樊'),
+                                height=400
+                            )
+                            st.plotly_chart(fig_residual, width='stretch')
+
+                            # --- 妯″瀷淇濆瓨 ---
+                            st.subheader("馃捑 妯″瀷淇濆瓨")
+                            
+                            # 鍒涘缓妯″瀷鐩綍锛堝鏋滀笉瀛樺湪锛�
+                            model_dir = "saved_models"
+                            os.makedirs(model_dir, exist_ok=True)
+                            
+                            # 鍑嗗妯″瀷淇℃伅
+                            model_info = {
+                                'model': model,
+                                'features': default_features,
+                                'scaler_X': scaler_X,
+                                'scaler_y': scaler_y,
+                                'model_type': st.session_state['mdl_model_type'],
+                                'sequence_length': sequence_length,
+                                'created_at': datetime.now(),
+                                'r2_score': r2,
+                                'mse': mse,
+                                'mae': mae,
+                                'rmse': rmse,
+                                'use_steady_data': use_steady_data
+                            }
+                            
+                            # 鐢熸垚妯″瀷鏂囦欢鍚�
+                            model_filename = f"deep_{st.session_state['mdl_model_type'].lower()}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.joblib"
+                            model_path = os.path.join(model_dir, model_filename)
+                            
+                            # 淇濆瓨妯″瀷
+                            joblib.dump(model_info, model_path)
+                            
+                            st.success(f"妯″瀷宸叉垚鍔熶繚瀛�: {model_filename}")
+                            st.info(f"淇濆瓨璺緞: {model_path}")
+            else:
+                st.warning("鏈娴嬪埌PyTorch锛屾棤娉曚娇鐢ㄦ繁搴﹀涔犻娴嬪姛鑳姐�傝纭繚宸叉纭畨瑁匬yTorch搴撱��")
+
+            # --- 鏁版嵁棰勮 ---
+            st.subheader("馃攳 鏁版嵁棰勮")
+            st.dataframe(df_analysis.head(20), width='stretch')
+            
+            # --- 瀵煎嚭鏁版嵁 ---
+            st.subheader("馃捑 瀵煎嚭鏁版嵁")
+            # 灏嗘暟鎹浆鎹负CSV鏍煎紡
+            csv = df_analysis.to_csv(index=False)
+            # 鍒涘缓涓嬭浇鎸夐挳
+            st.download_button(
+                label="瀵煎嚭鏁村悎鍚庣殑鏁版嵁 (CSV)",
+                data=csv,
+                file_name=f"metered_weight_deep_learning_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv",
+                mime="text/csv",
+                help="鐐瑰嚮鎸夐挳瀵煎嚭鏁村悎鍚庣殑绫抽噸鍒嗘瀽鏁版嵁"
+            )
+
+    else:
+        # 鎻愮ず鐢ㄦ埛鐐瑰嚮寮�濮嬪垎鏋愭寜閽�
+        st.info("璇烽�夋嫨鏃堕棿鑼冨洿骞剁偣鍑�'寮�濮嬪垎鏋�'鎸夐挳鑾峰彇鏁版嵁銆�")

--
Gitblit v1.9.3