From 6628f663b636675bcaea316f2deaddf337de480e Mon Sep 17 00:00:00 2001
From: baoshiwei <baoshiwei@shlanbao.cn>
Date: 星期五, 13 三月 2026 10:23:31 +0800
Subject: [PATCH] feat(米重分析): 新增稳态识别和预测功能页面并优化现有模型
---
app/pages/metered_weight_prediction.py | 208 +++
app/pages/extruder_parameter_adjustment.py | 675 +++++++++
app/pages/metered_weight_advanced.py | 314 +++
app/pages/metered_weight_deep_learning.py | 832 ++++++++++++
app/pages/metered_weight_steady_state.py | 463 ++++++
app/pages/metered_weight_regression.py | 300 +++
dashboard.py | 42
app/pages/metered_weight_forecast.py | 716 ++++++++++
app/services/parameter_adjustment_service.py | 495 +++++++
9 files changed, 3,963 insertions(+), 82 deletions(-)
diff --git a/app/pages/extruder_parameter_adjustment.py b/app/pages/extruder_parameter_adjustment.py
new file mode 100644
index 0000000..113aa28
--- /dev/null
+++ b/app/pages/extruder_parameter_adjustment.py
@@ -0,0 +1,675 @@
+import streamlit as st
+import plotly.express as px
+import plotly.graph_objects as go
+import pandas as pd
+import numpy as np
+import joblib
+import os
+from datetime import datetime
+from app.services.parameter_adjustment_service import ParameterAdjustmentAdvisor
+
+# 椤甸潰鍑芥暟瀹氫箟
+def show_extruder_parameter_adjustment():
+ # 椤甸潰鏍囬
+ st.title("鎸ゅ嚭鏈哄弬鏁拌皟鑺傚缓璁�")
+
+ # 娣诲姞鎿嶄綔鎸囧紩
+ with st.expander("馃摉 鎿嶄綔鎸囧紩", expanded=True):
+ st.markdown("""
+ 娆㈣繋浣跨敤鎸ゅ嚭鏈哄弬鏁拌皟鑺傚缓璁姛鑳斤紒鏈姛鑳藉彲浠ユ牴鎹偍杈撳叆鐨勭背閲嶆暟鎹拰褰撳墠鍙傛暟锛屼负鎮ㄦ彁渚涚瀛﹀悎鐞嗙殑鍙傛暟璋冩暣寤鸿銆�
+
+ **鎿嶄綔姝ラ锛�**
+ 1. 閫夋嫨涓�涓凡璁粌濂界殑妯″瀷
+ 2. 杈撳叆绫抽噸鏍囧噯鍊笺�佷笂涓嬮檺鍜屽綋鍓嶆尋鍑烘満鍙傛暟
+ 3. 杈撳叆褰撳墠瀹為檯绫抽噸娴嬮噺鍊�
+ 4. 鐐瑰嚮"璁$畻璋冭妭寤鸿"鎸夐挳
+ 5. 鏌ョ湅绯荤粺鐢熸垚鐨勫弬鏁拌皟鏁村缓璁�
+
+ **娉ㄦ剰浜嬮」锛�**
+ - 璇风‘淇濊緭鍏ョ殑鍙傛暟鍊煎湪璁惧鍏佽鐨勮寖鍥村唴
+ - 寤鸿鏍规嵁瀹為檯鐢熶骇鎯呭喌璋冩暣妯″瀷鍙傛暟
+ - 鍘嗗彶璋冭妭璁板綍鍙湪椤甸潰搴曢儴鏌ョ湅
+ """)
+
+ # 鍒濆鍖栦細璇濈姸鎬�
+ if 'adjustment_history' not in st.session_state:
+ st.session_state['adjustment_history'] = []
+
+ # 1. 妯″瀷閫夋嫨鍖哄煙
+ with st.expander("馃攳 妯″瀷閫夋嫨", expanded=True):
+ # 鍒涘缓妯″瀷鐩綍锛堝鏋滀笉瀛樺湪锛�
+ model_dir = "saved_models"
+ os.makedirs(model_dir, exist_ok=True)
+
+ # 鑾峰彇鎵�鏈夊凡淇濆瓨鐨勬ā鍨嬫枃浠�
+ model_files = [f for f in os.listdir(model_dir) if f.endswith('.joblib')]
+ model_files.sort(reverse=True) # 鏈�鏂扮殑妯″瀷鎺掑湪鍓嶉潰
+
+ if not model_files:
+ st.warning("灏氭湭淇濆瓨浠讳綍妯″瀷锛岃鍏堣缁冩ā鍨嬪苟淇濆瓨銆�")
+ return
+
+ # 妯″瀷閫夋嫨涓嬫媺妗�
+ selected_model_file = st.selectbox(
+ "閫夋嫨宸蹭繚瀛樼殑妯″瀷",
+ options=model_files,
+ help="閫夋嫨瑕佺敤浜庨娴嬬殑妯″瀷鏂囦欢"
+ )
+
+ # 鍔犺浇骞舵樉绀烘ā鍨嬩俊鎭�
+ if selected_model_file:
+ model_path = os.path.join(model_dir, selected_model_file)
+ model_info = joblib.load(model_path)
+
+ # 鏄剧ず妯″瀷鍩烘湰淇℃伅
+ st.subheader("馃搳 妯″瀷淇℃伅")
+ info_cols = st.columns(2)
+
+ with info_cols[0]:
+ st.metric("妯″瀷绫诲瀷", model_info['model_type'])
+ st.metric("鍒涘缓鏃堕棿", model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S'))
+ st.metric("浣跨敤绋虫�佹暟鎹�", "鏄�" if model_info.get('use_steady_data', False) else "鍚�")
+
+ with info_cols[1]:
+ st.metric("R虏 寰楀垎", f"{model_info['r2_score']:.4f}")
+ st.metric("鍧囨柟璇樊 (MSE)", f"{model_info['mse']:.6f}")
+ st.metric("鍧囨柟鏍硅宸� (RMSE)", f"{model_info['rmse']:.6f}")
+
+ # 鏄剧ず妯″瀷鐗瑰緛
+ st.write("馃攽 妯″瀷浣跨敤鐨勭壒寰�:")
+ st.code(", ".join(model_info['features']))
+
+ # 濡傛灉鏄繁搴﹀涔犳ā鍨嬶紝鏄剧ず搴忓垪闀垮害
+ if 'sequence_length' in model_info:
+ st.metric("搴忓垪闀垮害", model_info['sequence_length'])
+
+ # 淇濆瓨妯″瀷淇℃伅鍒颁細璇濈姸鎬�
+ st.session_state['selected_model'] = model_info
+ st.session_state['selected_model_file'] = selected_model_file
+
+ # 2. 鍙傛暟杈撳叆鍖哄煙
+ st.subheader("馃摑 鍙傛暟杈撳叆")
+
+ # 2.1 绫抽噸鏍囧噯鍊笺�佷笂涓嬮檺杈撳叆
+ with st.expander("鈿栵笍 绫抽噸鏍囧噯涓庝笂涓嬮檺", expanded=True):
+ weight_cols = st.columns(3)
+
+ with weight_cols[0]:
+ standard_weight = st.number_input(
+ "鏍囧噯绫抽噸",
+ key="standard_weight",
+ value=5.20,
+ min_value=0.01,
+ max_value=10.0,
+ step=0.0001,
+ format="%.4f",
+ help="杈撳叆鐩爣绫抽噸鏍囧噯鍊�"
+ )
+ st.caption("鍗曚綅: Kg/m")
+
+ with weight_cols[1]:
+ upper_limit = st.number_input(
+ "绫抽噸涓婇檺",
+ key="upper_limit",
+ value=5.46,
+ min_value=standard_weight,
+ max_value=10.0,
+ step=0.0001,
+ format="%.4f",
+ help="杈撳叆绫抽噸鍏佽鐨勪笂闄愬��"
+ )
+ st.caption("鍗曚綅: Kg/m")
+
+ with weight_cols[2]:
+ lower_limit = st.number_input(
+ "绫抽噸涓嬮檺",
+ key="lower_limit",
+ value=5.02,
+ min_value=0.01,
+ max_value=standard_weight,
+ step=0.0001,
+ format="%.4f",
+ help="杈撳叆绫抽噸鍏佽鐨勪笅闄愬��"
+ )
+ st.caption("鍗曚綅: Kg/m")
+
+ # 2.2 鎸ゅ嚭鏈哄綋鍓嶅弬鏁拌緭鍏�
+ with st.expander("馃敡 鎸ゅ嚭鏈哄綋鍓嶅弬鏁�", expanded=True):
+ param_cols = st.columns(3)
+
+ with param_cols[0]:
+ current_screw_speed = st.number_input(
+ "铻烘潌杞��",
+ key="current_screw_speed",
+ value=230.0,
+ min_value=0.0,
+ max_value=500.0,
+ step=0.1,
+ help="杈撳叆褰撳墠铻烘潌杞��"
+ )
+ st.caption("鍗曚綅: rpm")
+
+ current_head_pressure = st.number_input(
+ "鏈哄ご鍘嬪姏",
+ key="current_head_pressure",
+ value=0.26,
+ min_value=0.0,
+ max_value=500.0,
+ step=1.0,
+ help="杈撳叆褰撳墠鏈哄ご鍘嬪姏"
+ )
+ st.caption("鍗曚綅: bar")
+
+ current_process_speed = st.number_input(
+ "娴佺▼涓婚��",
+ key="current_process_speed",
+ value=6.6,
+ min_value=0.0,
+ max_value=300.0,
+ step=0.1,
+ help="杈撳叆褰撳墠娴佺▼涓婚��"
+ )
+ st.caption("鍗曚綅: m/min")
+
+ with param_cols[1]:
+ current_screw_temperature = st.number_input(
+ "铻烘潌娓╁害",
+ key="current_screw_temperature",
+ value=79.9,
+ min_value=0.0,
+ max_value=300.0,
+ step=1.0,
+ help="杈撳叆褰撳墠铻烘潌娓╁害"
+ )
+ st.caption("鍗曚綅: 掳C")
+
+ current_rear_barrel_temperature = st.number_input(
+ "鍚庢満绛掓俯搴�",
+ key="current_rear_barrel_temperature",
+ value=79.9,
+ min_value=0.0,
+ max_value=300.0,
+ step=1.0,
+ help="杈撳叆褰撳墠鍚庢満绛掓俯搴�"
+ )
+ st.caption("鍗曚綅: 掳C")
+
+ with param_cols[2]:
+ current_front_barrel_temperature = st.number_input(
+ "鍓嶆満绛掓俯搴�",
+ key="current_front_barrel_temperature",
+ value=80.1,
+ min_value=0.0,
+ max_value=300.0,
+ step=1.0,
+ help="杈撳叆褰撳墠鍓嶆満绛掓俯搴�"
+ )
+ st.caption("鍗曚綅: 掳C")
+
+ current_head_temperature = st.number_input(
+ "鏈哄ご娓╁害",
+ key="current_head_temperature",
+ value=95.1,
+ min_value=0.0,
+ max_value=300.0,
+ step=1.0,
+ help="杈撳叆褰撳墠鏈哄ご娓╁害"
+ )
+ st.caption("鍗曚綅: 掳C")
+
+ # 2.3 褰撳墠瀹為檯绫抽噸娴嬮噺鍊艰緭鍏�
+ with st.expander("馃搹 褰撳墠瀹為檯绫抽噸", expanded=True):
+ actual_weight = st.number_input(
+ "褰撳墠瀹為檯绫抽噸",
+ key="actual_weight",
+ value=5.115,
+ min_value=0.01,
+ max_value=10.0,
+ step=0.0001,
+ format="%.4f",
+ help="杈撳叆褰撳墠瀹為檯娴嬮噺鐨勭背閲嶅��"
+ )
+ st.caption("鍗曚綅: Kg/m")
+
+ # 3. 璁$畻璋冭妭寤鸿
+ st.subheader("馃殌 璁$畻璋冭妭寤鸿")
+
+ # 娣诲姞杩唬璋冩暣閫夐」
+ use_iterative_adjustment = st.checkbox("馃攧 浣跨敤杩唬璋冩暣", value=False,
+ help="鍚敤杩唬璋冩暣锛岃嚜鍔ㄤ紭鍖栧弬鏁扮洿鍒伴娴嬬背閲嶆弧瓒冲亸宸姹�")
+
+ # 杩唬璋冩暣鍙傛暟璁剧疆
+ max_iterations = 5
+ tolerance = 0.5
+
+ if use_iterative_adjustment:
+ st.write("### 杩唬璋冩暣鍙傛暟璁剧疆")
+ iter_cols = st.columns(2)
+ max_iterations = iter_cols[0].number_input("鏈�澶ц凯浠f鏁�", min_value=1, max_value=20, value=5, step=1)
+ tolerance = iter_cols[1].number_input("鍏佽鍋忓樊鐧惧垎姣�(%)", min_value=0.1, max_value=5.0, value=0.5, step=0.1)
+
+ if st.button("馃搳 璁$畻璋冭妭寤鸿", key="calculate_adjustment"):
+ # 鍙傛暟楠岃瘉
+ validation_errors = []
+
+ if standard_weight <= 0:
+ validation_errors.append("鏍囧噯绫抽噸蹇呴』澶т簬0")
+
+ if upper_limit <= standard_weight:
+ validation_errors.append("绫抽噸涓婇檺蹇呴』澶т簬鏍囧噯绫抽噸")
+
+ if lower_limit >= standard_weight:
+ validation_errors.append("绫抽噸涓嬮檺蹇呴』灏忎簬鏍囧噯绫抽噸")
+
+ if current_screw_speed <= 0:
+ validation_errors.append("铻烘潌杞�熷繀椤诲ぇ浜�0")
+
+ if current_process_speed <= 0:
+ validation_errors.append("娴佺▼涓婚�熷繀椤诲ぇ浜�0")
+
+ if actual_weight <= 0:
+ validation_errors.append("瀹為檯绫抽噸蹇呴』澶т簬0")
+
+ if validation_errors:
+ st.error("鍙傛暟杈撳叆閿欒锛�")
+ for error in validation_errors:
+ st.error(f"- {error}")
+ else:
+ with st.spinner("姝e湪璁$畻璋冭妭寤鸿..."):
+ # 鍒濆鍖栧弬鏁拌皟鑺傚缓璁櫒
+ adjustment_advisor = ParameterAdjustmentAdvisor()
+
+ # 鍑嗗鍒濆鍙傛暟
+ initial_params = {
+ 'real_time_weight': actual_weight,
+ 'standard_weight': standard_weight,
+ 'upper_limit': upper_limit,
+ 'lower_limit': lower_limit,
+ 'current_screw_speed': current_screw_speed,
+ 'current_process_speed': current_process_speed,
+ 'current_screw_temperature': current_screw_temperature,
+ 'current_rear_barrel_temperature': current_rear_barrel_temperature,
+ 'current_front_barrel_temperature': current_front_barrel_temperature,
+ 'current_head_temperature': current_head_temperature,
+ 'current_head_pressure': current_head_pressure
+ }
+
+ # 鏍规嵁鏄惁鍚敤杩唬璋冩暣鎵ц涓嶅悓閫昏緫
+ if use_iterative_adjustment and 'selected_model' in st.session_state:
+ # 浣跨敤杩唬璋冩暣
+ iterative_result = adjustment_advisor.iterative_adjustment(
+ initial_params=initial_params,
+ model_info=st.session_state['selected_model'],
+ max_iterations=max_iterations,
+ tolerance=tolerance
+ )
+
+ # 浣跨敤杩唬璋冩暣鐨勬渶缁堢粨鏋�
+ adjustment_result = iterative_result['final_result']
+ iteration_history = iterative_result['iteration_history']
+ converged = iterative_result['converged']
+ total_iterations = iterative_result['total_iterations']
+ else:
+ # 姝e父璁$畻璋冩暣寤鸿
+ adjustment_result = adjustment_advisor.calculate_adjustment(
+ real_time_weight=actual_weight,
+ standard_weight=standard_weight,
+ upper_limit=upper_limit,
+ lower_limit=lower_limit,
+ current_screw_speed=current_screw_speed,
+ current_process_speed=current_process_speed,
+ current_screw_temperature=current_screw_temperature,
+ current_rear_barrel_temperature=current_rear_barrel_temperature,
+ current_front_barrel_temperature=current_front_barrel_temperature,
+ current_head_temperature=current_head_temperature
+ )
+
+ # 浣跨敤閫変腑鐨勬ā鍨嬮娴嬭皟鏁村悗鐨勭背閲�
+ predicted_weight = None
+ if 'selected_model' in st.session_state:
+ selected_model_info = st.session_state['selected_model']
+ predicted_weight = adjustment_advisor.predict_weight(
+ model_info=selected_model_info,
+ screw_speed=adjustment_result['new_screw_speed'],
+ head_pressure=current_head_pressure,
+ process_speed=adjustment_result['new_process_speed'],
+ screw_temperature=current_screw_temperature,
+ rear_barrel_temperature=current_rear_barrel_temperature,
+ front_barrel_temperature=current_front_barrel_temperature,
+ head_temperature=current_head_temperature
+ )
+
+ # 灏嗛娴嬬粨鏋滄坊鍔犲埌璋冩暣缁撴灉涓�
+ adjustment_result['predicted_weight'] = predicted_weight
+
+ # 鍒濆鍖栬凯浠e巻鍙诧紙濡傛灉鏈娇鐢ㄨ凯浠h皟鏁达級
+ iteration_history = None
+ converged = None
+ total_iterations = None
+
+ # 淇濆瓨鍒板巻鍙茶褰�
+ history_record = {
+ 'timestamp': datetime.now(),
+ 'model_file': st.session_state.get('selected_model_file', '鏈煡妯″瀷'),
+ 'standard_weight': standard_weight,
+ 'upper_limit': upper_limit,
+ 'lower_limit': lower_limit,
+ 'actual_weight': actual_weight,
+ 'current_screw_speed': current_screw_speed,
+ 'current_process_speed': current_process_speed,
+ 'current_screw_temperature': current_screw_temperature,
+ 'current_rear_barrel_temperature': current_rear_barrel_temperature,
+ 'current_front_barrel_temperature': current_front_barrel_temperature,
+ 'current_head_temperature': current_head_temperature,
+ 'adjustment_result': adjustment_result,
+ 'use_iterative_adjustment': use_iterative_adjustment,
+ 'iteration_history': iteration_history
+ }
+
+ # 娣诲姞鍒颁細璇濈姸鎬佺殑鍘嗗彶璁板綍
+ if 'adjustment_history' not in st.session_state:
+ st.session_state['adjustment_history'] = []
+
+ st.session_state['adjustment_history'].append(history_record)
+
+ # 闄愬埗鍘嗗彶璁板綍鏁伴噺
+ if len(st.session_state['adjustment_history']) > 100:
+ st.session_state['adjustment_history'] = st.session_state['adjustment_history'][-100:]
+
+ # 4. 缁撴灉灞曠ず
+ st.success("璋冭妭寤鸿璁$畻瀹屾垚锛�")
+
+ st.subheader("馃搵 璋冭妭寤鸿缁撴灉")
+
+ # 4.1 绫抽噸鐘舵��
+ if adjustment_result['status'] == "姝e父":
+ st.success(f"绫抽噸鐘舵��: {adjustment_result['status']}")
+ else:
+ st.warning(f"绫抽噸鐘舵��: {adjustment_result['status']}")
+
+ # 4.2 鍋忓樊淇℃伅
+ info_cols = st.columns(3)
+ info_cols[0].metric("瀹炴椂绫抽噸", f"{adjustment_result['real_time_weight']:.4f} Kg/m")
+ info_cols[1].metric("鏍囧噯绫抽噸", f"{adjustment_result['standard_weight']:.4f} Kg/m")
+ info_cols[2].metric("鍋忓樊鐧惧垎姣�", f"{adjustment_result['deviation_percentage']:.2f}%")
+
+ # 4.2.1 妯″瀷棰勬祴缁撴灉
+ if adjustment_result['predicted_weight'] is not None:
+ st.markdown("### 馃搱 妯″瀷棰勬祴缁撴灉")
+ pred_cols = st.columns(3)
+ pred_cols[0].metric("璋冩暣鍚庨娴嬬背閲�", f"{adjustment_result['predicted_weight']:.4f} Kg/m")
+
+ # 璁$畻棰勬祴鍋忓樊
+ predicted_deviation = adjustment_result['predicted_weight'] - adjustment_result['standard_weight']
+ predicted_deviation_percent = (predicted_deviation / adjustment_result['standard_weight']) * 100
+ pred_cols[1].metric("棰勬祴鍋忓樊", f"{predicted_deviation:.4f} Kg/m")
+ pred_cols[2].metric("棰勬祴鍋忓樊鐧惧垎姣�", f"{predicted_deviation_percent:.2f}%")
+
+ # 鏄剧ず棰勬祴鏁堟灉
+ if abs(predicted_deviation_percent) < 0.5:
+ st.success("璋冩暣鍚庣背閲嶉娴嬪�兼帴杩戞爣鍑嗗�硷紝璋冩暣鏁堟灉鑹ソ锛�")
+ elif abs(predicted_deviation_percent) < 1.0:
+ st.info("璋冩暣鍚庣背閲嶉娴嬪�煎湪鍙帴鍙楄寖鍥村唴銆�")
+ else:
+ st.warning("璋冩暣鍚庣背閲嶉娴嬪�间粛鏈夎緝澶у亸宸紝寤鸿杩涗竴姝ュ井璋冦��")
+ else:
+ st.warning("妯″瀷棰勬祴澶辫触锛岃妫�鏌ユā鍨嬫枃浠舵垨鍙傛暟銆�")
+
+ # 4.3 鍏抽敭璋冩暣寤鸿
+ st.markdown("### 馃攽 鍏抽敭璋冩暣寤鸿")
+ st.info(adjustment_result['recommendation'])
+
+ # 4.4 鍙傛暟璋冩暣瀵规瘮
+ st.markdown("### 馃搳 鍙傛暟璋冩暣瀵规瘮")
+
+ param_compare_df = pd.DataFrame({
+ '鍙傛暟鍚嶇О': ['铻烘潌杞��', '娴佺▼涓婚��'],
+ '褰撳墠鍊�': [adjustment_result['current_screw_speed'], adjustment_result['current_process_speed']],
+ '寤鸿鍊�': [adjustment_result['new_screw_speed'], adjustment_result['new_process_speed']],
+ '璋冩暣骞呭害': [f"{adjustment_result['screw_speed_adjust_percent']:.2f}%",
+ f"{adjustment_result['process_speed_adjust_percent']:.2f}%"]
+ })
+
+ # 楂樹寒鏄剧ず璋冩暣骞呭害
+ def highlight_adjustment(val):
+ if isinstance(val, str) and '%' in val:
+ try:
+ percent = float(val.strip('%'))
+ if percent > 0:
+ return 'background-color: #90EE90' # 缁胯壊琛ㄧず澧炲姞
+ elif percent < 0:
+ return 'background-color: #FFB6C1' # 绾㈣壊琛ㄧず鍑忓皯
+ except:
+ pass
+ return ''
+
+ styled_df = param_compare_df.style.applymap(highlight_adjustment, subset=['璋冩暣骞呭害'])
+ st.dataframe(styled_df, use_container_width=True, hide_index=True)
+
+ # 4.5 鍙鍖栧姣�
+ fig = go.Figure()
+ fig.add_trace(go.Bar(
+ x=param_compare_df['鍙傛暟鍚嶇О'],
+ y=param_compare_df['褰撳墠鍊�'],
+ name='褰撳墠鍊�',
+ marker_color='blue'
+ ))
+ fig.add_trace(go.Bar(
+ x=param_compare_df['鍙傛暟鍚嶇О'],
+ y=param_compare_df['寤鸿鍊�'],
+ name='寤鸿鍊�',
+ marker_color='green'
+ ))
+
+ fig.update_layout(
+ barmode='group',
+ title='鍙傛暟璋冩暣瀵规瘮',
+ yaxis_title='鏁板��',
+ height=400
+ )
+
+ st.plotly_chart(fig, use_container_width=True)
+
+ # 4.6 杩唬璋冩暣缁撴灉灞曠ず
+ if use_iterative_adjustment and iteration_history:
+ st.markdown("### 馃攧 杩唬璋冩暣鍘嗗彶")
+
+ # 鏄剧ず杩唬璋冩暣鐘舵��
+ if converged:
+ st.success(f"鉁� 杩唬璋冩暣鎴愬姛鏀舵暃锛佺粡杩� {total_iterations} 娆¤凯浠o紝棰勬祴绫抽噸鍋忓樊杈惧埌 {tolerance}% 浠ュ唴銆�")
+ else:
+ st.warning(f"鈿狅笍 杩唬璋冩暣鏈敹鏁涳紒缁忚繃 {total_iterations} 娆¤凯浠o紝棰勬祴绫抽噸鍋忓樊浠嶆湭杈惧埌 {tolerance}% 浠ュ唴銆�")
+
+ # 鏄剧ず杩唬鍘嗗彶琛ㄦ牸
+ iter_history_df = pd.DataFrame(iteration_history)
+ iter_history_df = iter_history_df[[
+ 'iteration', 'current_screw_speed', 'current_process_speed',
+ 'adjusted_screw_speed', 'adjusted_process_speed',
+ 'predicted_weight', 'predicted_deviation_percent'
+ ]]
+
+ # 鏍煎紡鍖栬〃鏍�
+ iter_history_df = iter_history_df.rename(columns={
+ 'iteration': '杩唬娆℃暟',
+ 'current_screw_speed': '璋冩暣鍓嶈灪鏉嗚浆閫�',
+ 'current_process_speed': '璋冩暣鍓嶆祦绋嬩富閫�',
+ 'adjusted_screw_speed': '璋冩暣鍚庤灪鏉嗚浆閫�',
+ 'adjusted_process_speed': '璋冩暣鍚庢祦绋嬩富閫�',
+ 'predicted_weight': '棰勬祴绫抽噸',
+ 'predicted_deviation_percent': '棰勬祴鍋忓樊鐧惧垎姣�(%)'
+ })
+
+ st.dataframe(iter_history_df, use_container_width=True)
+
+ # 杩唬璋冩暣鍙鍖�
+ st.markdown("### 馃搲 杩唬璋冩暣鏁堟灉")
+
+ # 鍋忓樊鍙樺寲瓒嬪娍鍥�
+ fig_deviation = go.Figure()
+ fig_deviation.add_trace(go.Scatter(
+ x=iter_history_df['杩唬娆℃暟'],
+ y=iter_history_df['棰勬祴鍋忓樊鐧惧垎姣�(%)'],
+ mode='lines+markers',
+ name='棰勬祴鍋忓樊鐧惧垎姣�',
+ line=dict(color='blue', width=2),
+ marker=dict(size=8)
+ ))
+
+ # 娣诲姞鍋忓樊闃堝�肩嚎
+ fig_deviation.add_trace(go.Scatter(
+ x=iter_history_df['杩唬娆℃暟'],
+ y=[tolerance] * len(iter_history_df),
+ mode='lines',
+ name='鍏佽鍋忓樊涓婇檺',
+ line=dict(color='red', dash='dash', width=1)
+ ))
+
+ fig_deviation.add_trace(go.Scatter(
+ x=iter_history_df['杩唬娆℃暟'],
+ y=[-tolerance] * len(iter_history_df),
+ mode='lines',
+ name='鍏佽鍋忓樊涓嬮檺',
+ line=dict(color='red', dash='dash', width=1)
+ ))
+
+ fig_deviation.update_layout(
+ title='杩唬璋冩暣鍋忓樊鍙樺寲瓒嬪娍',
+ xaxis_title='杩唬娆℃暟',
+ yaxis_title='棰勬祴鍋忓樊鐧惧垎姣�(%)',
+ height=400,
+ legend=dict(yanchor="top", y=0.99, xanchor="left", x=0.01)
+ )
+
+ st.plotly_chart(fig_deviation, use_container_width=True)
+
+ # 铻烘潌杞�熷拰娴佺▼涓婚�熷彉鍖栬秼鍔�
+ fig_params = go.Figure()
+ fig_params.add_trace(go.Scatter(
+ x=iter_history_df['杩唬娆℃暟'],
+ y=iter_history_df['璋冩暣鍓嶈灪鏉嗚浆閫�'],
+ mode='lines+markers',
+ name='璋冩暣鍓嶈灪鏉嗚浆閫�',
+ line=dict(color='blue', width=2),
+ marker=dict(size=8)
+ ))
+
+ fig_params.add_trace(go.Scatter(
+ x=iter_history_df['杩唬娆℃暟'],
+ y=iter_history_df['璋冩暣鍚庤灪鏉嗚浆閫�'],
+ mode='lines+markers',
+ name='璋冩暣鍚庤灪鏉嗚浆閫�',
+ line=dict(color='green', width=2),
+ marker=dict(size=8)
+ ))
+
+ fig_params.add_trace(go.Scatter(
+ x=iter_history_df['杩唬娆℃暟'],
+ y=iter_history_df['璋冩暣鍓嶆祦绋嬩富閫�'],
+ mode='lines+markers',
+ name='璋冩暣鍓嶆祦绋嬩富閫�',
+ line=dict(color='orange', width=2),
+ marker=dict(size=8)
+ ))
+
+ fig_params.add_trace(go.Scatter(
+ x=iter_history_df['杩唬娆℃暟'],
+ y=iter_history_df['璋冩暣鍚庢祦绋嬩富閫�'],
+ mode='lines+markers',
+ name='璋冩暣鍚庢祦绋嬩富閫�',
+ line=dict(color='purple', width=2),
+ marker=dict(size=8)
+ ))
+
+ fig_params.update_layout(
+ title='鍙傛暟璋冩暣鍙樺寲瓒嬪娍',
+ xaxis_title='杩唬娆℃暟',
+ yaxis_title='鏁板��',
+ height=400,
+ legend=dict(yanchor="top", y=0.99, xanchor="right", x=0.99)
+ )
+
+ st.plotly_chart(fig_params, use_container_width=True)
+
+ # 5. 鍘嗗彶璁板綍灞曠ず
+ st.subheader("馃摎 鍘嗗彶璋冭妭璁板綍")
+
+ if 'adjustment_history' in st.session_state and st.session_state['adjustment_history']:
+ # 鏄剧ず鍘嗗彶璁板綍鏁伴噺
+ st.write(f"鍏� {len(st.session_state['adjustment_history'])} 鏉″巻鍙茶褰�")
+
+ # 鍒嗛〉鏄剧ず
+ page_size = 10
+ total_pages = (len(st.session_state['adjustment_history']) + page_size - 1) // page_size
+
+ page = st.selectbox(
+ "閫夋嫨椤电爜",
+ options=range(1, total_pages + 1),
+ key="history_page"
+ )
+
+ start_idx = (page - 1) * page_size
+ end_idx = start_idx + page_size
+ paginated_history = st.session_state['adjustment_history'][start_idx:end_idx]
+
+ # 鍙嶅悜鏄剧ず锛屾渶鏂拌褰曞湪鍓嶉潰
+ for record in reversed(paginated_history):
+ with st.expander(f"璁板綍鏃堕棿: {record['timestamp'].strftime('%Y-%m-%d %H:%M:%S')} | 妯″瀷: {record['model_file']}"):
+ history_cols = st.columns(3)
+
+ with history_cols[0]:
+ st.write("**绫抽噸鍙傛暟**")
+ st.write(f"- 鏍囧噯绫抽噸: {record['standard_weight']:.4f} Kg/m")
+ st.write(f"- 绫抽噸涓婇檺: {record['upper_limit']:.4f} Kg/m")
+ st.write(f"- 绫抽噸涓嬮檺: {record['lower_limit']:.4f} Kg/m")
+ st.write(f"- 瀹為檯绫抽噸: {record['actual_weight']:.4f} Kg/m")
+
+ with history_cols[1]:
+ st.write("**閫熷害鍙傛暟**")
+ st.write(f"- 铻烘潌杞��: {record['current_screw_speed']:.1f} rpm")
+ st.write(f"- 娴佺▼涓婚��: {record['current_process_speed']:.1f} m/min")
+
+ with history_cols[2]:
+ st.write("**娓╁害鍙傛暟**")
+ st.write(f"- 铻烘潌娓╁害: {record['current_screw_temperature']:.1f} 掳C")
+ st.write(f"- 鍚庢満绛掓俯搴�: {record['current_rear_barrel_temperature']:.1f} 掳C")
+ st.write(f"- 鍓嶆満绛掓俯搴�: {record['current_front_barrel_temperature']:.1f} 掳C")
+ st.write(f"- 鏈哄ご娓╁害: {record['current_head_temperature']:.1f} 掳C")
+
+ st.write("**璋冩暣寤鸿**")
+ st.write(record['adjustment_result']['recommendation'])
+ else:
+ st.info("鏆傛棤鍘嗗彶璋冭妭璁板綍")
+
+ # 6. 甯姪璇存槑
+ with st.expander("鉂� 甯姪璇存槑"):
+ st.markdown("""
+ ### 鍔熻兘璇存槑
+ 鏈姛鑳芥ā鍧楃敤浜庢牴鎹綋鍓嶇背閲嶆祴閲忓�煎拰鎸ゅ嚭鏈哄弬鏁帮紝涓虹敤鎴锋彁渚涚瀛﹀悎鐞嗙殑鍙傛暟璋冩暣寤鸿锛屼互甯姪鐢ㄦ埛灏嗙背閲嶆帶鍒跺湪鏍囧噯鑼冨洿鍐呫��
+
+ ### 妯″瀷閫夋嫨
+ - 绯荤粺浼氳嚜鍔ㄨ鍙栭」鐩洰褰曚腑璁粌濂界殑妯″瀷鏂囦欢
+ - 妯″瀷鏂囦欢闇�绗﹀悎绯荤粺瑕佹眰鐨勬牸寮忥紝鍖呭惈妯″瀷鍙傛暟鍜岃缁冧俊鎭�
+ - 寤鸿閫夋嫨R虏寰楀垎杈冮珮銆佽宸緝灏忕殑妯″瀷
+
+ ### 鍙傛暟杈撳叆
+ - 绫抽噸鏍囧噯鍊硷細鎮ㄦ湡鏈涚殑鐩爣绫抽噸鍊�
+ - 绫抽噸涓婁笅闄愶細鍏佽鐨勭背閲嶆尝鍔ㄨ寖鍥�
+ - 鎸ゅ嚭鏈哄綋鍓嶅弬鏁帮細鍖呮嫭铻烘潌杞�熴�佹祦绋嬩富閫熴�佹満澶村帇鍔涘拰鎸ゅ嚭鏈虹數娴�
+ - 褰撳墠瀹為檯绫抽噸锛氬疄闄呮祴閲忓緱鍒扮殑绫抽噸鍊�
+
+ ### 缁撴灉瑙h
+ - 绫抽噸鐘舵�侊細鏄剧ず褰撳墠绫抽噸鏄惁鍦ㄥ厑璁歌寖鍥村唴
+ - 鍋忓樊鐧惧垎姣旓細褰撳墠绫抽噸涓庢爣鍑嗙背閲嶇殑鍋忓樊鐧惧垎姣�
+ - 鍏抽敭璋冩暣寤鸿锛氱郴缁熺粰鍑虹殑涓昏璋冩暣寤鸿
+ - 鍙傛暟璋冩暣瀵规瘮锛氳缁嗗睍绀烘瘡涓弬鏁扮殑褰撳墠鍊笺�佸缓璁�煎拰璋冩暣骞呭害
+
+ ### 娉ㄦ剰浜嬮」
+ 1. 璇风‘淇濊緭鍏ョ殑鍙傛暟鍊煎噯纭弽鏄犺澶囧綋鍓嶇姸鎬�
+ 2. 璋冩暣寤鸿浠呬緵鍙傝�冿紝瀹為檯鎿嶄綔鏃惰缁撳悎鐜板満缁忛獙
+ 3. 寤鸿鍦ㄨ皟鏁村弬鏁板悗瀵嗗垏瑙傚療绫抽噸鍙樺寲
+ 4. 瀹氭湡鏇存柊妯″瀷浠ユ彁楂樺缓璁殑鍑嗙‘鎬�
+ """)
+
+# 椤甸潰鍏ュ彛
+if __name__ == "__main__":
+ show_extruder_parameter_adjustment()
diff --git a/app/pages/metered_weight_advanced.py b/app/pages/metered_weight_advanced.py
index d3a4e40..5a9dea8 100644
--- a/app/pages/metered_weight_advanced.py
+++ b/app/pages/metered_weight_advanced.py
@@ -3,6 +3,8 @@
import plotly.graph_objects as go
import pandas as pd
import numpy as np
+import joblib
+import os
from datetime import datetime, timedelta
from app.services.extruder_service import ExtruderService
from app.services.main_process_service import MainProcessService
@@ -13,8 +15,107 @@
from sklearn.svm import SVR
from sklearn.neural_network import MLPRegressor
-
-
+# 瀵煎叆绋虫�佽瘑鍒姛鑳�
+class SteadyStateDetector:
+ def __init__(self):
+ pass
+
+ def detect_steady_state(self, df, weight_col='绫抽噸', window_size=20, std_threshold=0.5, duration_threshold=60):
+ """
+ 绋虫�佽瘑鍒�昏緫锛氭爣璁扮背閲嶆暟鎹腑鐨勭ǔ鎬佹
+ :param df: 鍖呭惈绫抽噸鏁版嵁鐨勬暟鎹
+ :param weight_col: 绫抽噸鍒楀悕
+ :param window_size: 婊戝姩绐楀彛澶у皬锛堢锛�
+ :param std_threshold: 鏍囧噯宸槇鍊�
+ :param duration_threshold: 绋虫�佹寔缁椂闂撮槇鍊硷紙绉掞級
+ :return: 鍖呭惈绋虫�佹爣璁扮殑鏁版嵁妗嗗拰绋虫�佷俊鎭�
+ """
+ if df is None or df.empty:
+ return df, []
+
+ # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+ df['time'] = pd.to_datetime(df['time'])
+
+ # 璁$畻婊氬姩缁熻閲�
+ df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std()
+ df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean()
+
+ # 璁$畻娉㈠姩鑼冨洿
+ df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100
+ df['fluctuation_range'] = df['fluctuation_range'].fillna(0)
+
+ # 鏍囪绋虫�佺偣
+ df['is_steady'] = 0
+ steady_condition = (
+ (df['fluctuation_range'] < std_threshold) &
+ (df[weight_col] >= 0.1)
+ )
+ df.loc[steady_condition, 'is_steady'] = 1
+
+ # 璇嗗埆杩炵画绋虫�佹
+ steady_segments = []
+ current_segment = {}
+
+ for i, row in df.iterrows():
+ if row['is_steady'] == 1:
+ if not current_segment:
+ current_segment = {
+ 'start_time': row['time'],
+ 'start_idx': i,
+ 'weights': [row[weight_col]]
+ }
+ else:
+ current_segment['weights'].append(row[weight_col])
+ else:
+ if current_segment:
+ current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time']
+ current_segment['end_idx'] = i-1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ # 璁$畻缃俊搴�
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ current_segment = {}
+
+ # 澶勭悊鏈�鍚庝竴涓ǔ鎬佹
+ if current_segment:
+ current_segment['end_time'] = df['time'].iloc[-1]
+ current_segment['end_idx'] = len(df) - 1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ # 鍦ㄦ暟鎹涓爣璁板畬鏁寸殑绋虫�佹
+ for segment in steady_segments:
+ df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1
+
+ return df, steady_segments
def show_metered_weight_advanced():
# 鍒濆鍖栨湇鍔�
@@ -35,7 +136,13 @@
st.session_state['ma_model_type'] = 'RandomForest'
if 'ma_sequence_length' not in st.session_state:
st.session_state['ma_sequence_length'] = 10
-
+ if 'ma_use_steady_data' not in st.session_state:
+ st.session_state['ma_use_steady_data'] = True
+ if 'ma_steady_window' not in st.session_state:
+ st.session_state['ma_steady_window'] = 20
+ if 'ma_steady_threshold' not in st.session_state:
+ st.session_state['ma_steady_threshold'] = 0.5
+
# 榛樿鐗瑰緛鍒楄〃锛堜笉鍐嶅厑璁哥敤鎴烽�夋嫨锛�
default_features = ['铻烘潌杞��', '鏈哄ご鍘嬪姏', '娴佺▼涓婚��', '铻烘潌娓╁害',
'鍚庢満绛掓俯搴�', '鍓嶆満绛掓俯搴�', '鏈哄ご娓╁害']
@@ -126,6 +233,42 @@
options=model_options,
key="ma_model_type",
help="閫夋嫨鐢ㄤ簬棰勬祴鐨勬ā鍨嬬被鍨�"
+ )
+
+ # 绋虫�佽瘑鍒厤缃�
+ st.markdown("---")
+ steady_cols = st.columns(3)
+ with steady_cols[0]:
+ st.write("鈿栵笍 **绋虫�佽瘑鍒厤缃�**")
+ st.checkbox(
+ "浠呬娇鐢ㄧǔ鎬佹暟鎹繘琛岃缁�",
+ value=st.session_state['ma_use_steady_data'],
+ key="ma_use_steady_data",
+ help="鍚敤鍚庯紝鍙娇鐢ㄧ背閲嶇ǔ鎬佹椂娈电殑鏁版嵁杩涜妯″瀷璁粌"
+ )
+
+ with steady_cols[1]:
+ st.write("馃搹 **绋虫�佸弬鏁�**")
+ st.slider(
+ "婊戝姩绐楀彛澶у皬 (绉�)",
+ min_value=5,
+ max_value=60,
+ value=st.session_state['ma_steady_window'],
+ step=5,
+ key="ma_steady_window",
+ help="鐢ㄤ簬绋虫�佽瘑鍒殑婊戝姩绐楀彛澶у皬"
+ )
+
+ with steady_cols[2]:
+ st.write("馃搳 **绋虫�侀槇鍊�**")
+ st.slider(
+ "娉㈠姩闃堝�� (%)",
+ min_value=0.1,
+ max_value=2.0,
+ value=st.session_state['ma_steady_threshold'],
+ step=0.1,
+ key="ma_steady_threshold",
+ help="绋虫�佽瘑鍒殑娉㈠姩鑼冨洿闃堝��"
)
@@ -249,6 +392,82 @@
# 閲嶅懡鍚嶇背閲嶅垪
df_analysis.rename(columns={'metered_weight': '绫抽噸'}, inplace=True)
+
+ # 绋虫�佽瘑鍒�
+ steady_detector = SteadyStateDetector()
+
+ # 鑾峰彇绋虫�佽瘑鍒弬鏁�
+ use_steady_data = st.session_state.get('ma_use_steady_data', True)
+ steady_window = st.session_state.get('ma_steady_window', 20)
+ steady_threshold = st.session_state.get('ma_steady_threshold', 0.5)
+
+ # 鎵ц绋虫�佽瘑鍒�
+ df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state(
+ df_analysis,
+ weight_col='绫抽噸',
+ window_size=steady_window,
+ std_threshold=steady_threshold
+ )
+
+ # 鏇存柊df_analysis涓哄寘鍚ǔ鎬佹爣璁扮殑鏁版嵁
+ df_analysis = df_analysis_with_steady
+
+ # 绋虫�佹暟鎹彲瑙嗗寲
+ st.subheader("馃搱 绋虫�佹暟鎹垎甯�")
+
+ # 鍒涘缓绋虫�佹暟鎹彲瑙嗗寲鍥捐〃
+ fig_steady = go.Figure()
+
+ # 娣诲姞鍘熷绫抽噸鏇茬嚎
+ fig_steady.add_trace(go.Scatter(
+ x=df_analysis['time'],
+ y=df_analysis['绫抽噸'],
+ name='鍘熷绫抽噸',
+ mode='lines',
+ line=dict(color='lightgray', width=1)
+ ))
+
+ # 娣诲姞绋虫�佹暟鎹偣
+ steady_data_points = df_analysis[df_analysis['is_steady'] == 1]
+ fig_steady.add_trace(go.Scatter(
+ x=steady_data_points['time'],
+ y=steady_data_points['绫抽噸'],
+ name='绋虫�佺背閲�',
+ mode='markers',
+ marker=dict(color='green', size=3, opacity=0.6)
+ ))
+
+ # 娣诲姞闈炵ǔ鎬佹暟鎹偣
+ non_steady_data_points = df_analysis[df_analysis['is_steady'] == 0]
+ fig_steady.add_trace(go.Scatter(
+ x=non_steady_data_points['time'],
+ y=non_steady_data_points['绫抽噸'],
+ name='闈炵ǔ鎬佺背閲�',
+ mode='markers',
+ marker=dict(color='red', size=3, opacity=0.6)
+ ))
+
+ # 閰嶇疆鍥捐〃甯冨眬
+ fig_steady.update_layout(
+ title="绫抽噸鏁版嵁绋虫�佸垎甯�",
+ xaxis=dict(title="鏃堕棿"),
+ yaxis=dict(title="绫抽噸 (Kg/m)"),
+ legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1),
+ height=500
+ )
+
+ # 鏄剧ず鍥捐〃
+ st.plotly_chart(fig_steady, use_container_width=True)
+
+ # 鏄剧ず绋虫�佺粺璁�
+ total_data = len(df_analysis)
+ steady_data = len(df_analysis[df_analysis['is_steady'] == 1])
+ steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0
+
+ stats_cols = st.columns(3)
+ stats_cols[0].metric("鎬绘暟鎹噺", total_data)
+ stats_cols[1].metric("绋虫�佹暟鎹噺", steady_data)
+ stats_cols[2].metric("绋虫�佹暟鎹瘮渚�", f"{steady_ratio:.1f}%")
# --- 鍘熷鏁版嵁瓒嬪娍鍥� ---
st.subheader("馃搱 鍘熷鏁版嵁瓒嬪娍鍥�")
@@ -381,8 +600,16 @@
else:
try:
# 鍑嗗鏁版嵁
+ # 鏍规嵁閰嶇疆鍐冲畾鏄惁鍙娇鐢ㄧǔ鎬佹暟鎹�
+ use_steady_data = st.session_state.get('ma_use_steady_data', True)
+ if use_steady_data:
+ df_filtered = df_analysis[df_analysis['is_steady'] == 1]
+ st.info(f"宸茶繃婊ら潪绋虫�佹暟鎹紝浣跨敤 {len(df_filtered)} 鏉$ǔ鎬佹暟鎹繘琛岃缁�")
+ else:
+ df_filtered = df_analysis.copy()
+
# 棣栧厛纭繚df_analysis涓病鏈塏aN鍊�
- df_analysis_clean = df_analysis.dropna(subset=default_features + ['绫抽噸'])
+ df_analysis_clean = df_filtered.dropna(subset=default_features + ['绫抽噸'])
# 妫�鏌ユ竻鐞嗗悗鐨勬暟鎹噺
if len(df_analysis_clean) < 30:
@@ -391,8 +618,8 @@
# 鍒涘缓涓�涓柊鐨凞ataFrame鏉ュ瓨鍌ㄦ墍鏈夌壒寰佸拰鐩爣鍙橀噺
all_features = df_analysis_clean[default_features + ['绫抽噸']].copy()
-
-
+
+
# 娓呯悊鎵�鏈塏aN鍊�
all_features_clean = all_features.dropna()
@@ -568,49 +795,38 @@
)
st.plotly_chart(fig_importance, width='stretch')
- # --- 棰勬祴鍔熻兘 ---
- st.subheader("馃敭 绫抽噸棰勬祴")
-
- # 鍒涘缓棰勬祴琛ㄥ崟
- st.write("杈撳叆鐗瑰緛鍊艰繘琛岀背閲嶉娴�:")
- predict_cols = st.columns(2)
- input_features = {}
-
- for i, feature in enumerate(default_features):
- with predict_cols[i % 2]:
- # 鑾峰彇鐗瑰緛鐨勭粺璁′俊鎭�
- min_val = df_analysis_clean[feature].min()
- max_val = df_analysis_clean[feature].max()
- mean_val = df_analysis_clean[feature].mean()
-
- input_features[feature] = st.number_input(
- f"{feature}",
- key=f"ma_pred_{feature}",
- value=float(mean_val),
- min_value=float(min_val),
- max_value=float(max_val),
- step=0.1
- )
-
- if st.button("棰勬祴绫抽噸"):
- # 鍑嗗棰勬祴鏁版嵁
- input_df = pd.DataFrame([input_features])
-
- # 鍚堝苟鐗瑰緛
- input_combined = pd.concat([input_df], axis=1)
-
- # 棰勬祴
- if model_type in ['SVR', 'MLP']:
- input_scaled = scaler_X.transform(input_combined)
- prediction_scaled = model.predict(input_scaled)
- predicted_weight = scaler_y.inverse_transform(prediction_scaled.reshape(-1, 1)).ravel()[0]
-
- else:
- predicted_weight = model.predict(input_combined)[0]
-
- # 鏄剧ず棰勬祴缁撴灉
- st.success(f"棰勬祴绫抽噸: {predicted_weight:.4f} Kg/m")
-
+ # --- 妯″瀷淇濆瓨 ---
+ st.subheader("锟� 妯″瀷淇濆瓨")
+
+ # 鍒涘缓妯″瀷鐩綍锛堝鏋滀笉瀛樺湪锛�
+ model_dir = "saved_models"
+ os.makedirs(model_dir, exist_ok=True)
+
+ # 鍑嗗妯″瀷淇℃伅
+ model_info = {
+ 'model': model,
+ 'features': feature_columns,
+ 'scaler_X': scaler_X if model_type in ['SVR', 'MLP'] else None,
+ 'scaler_y': scaler_y if model_type in ['SVR', 'MLP'] else None,
+ 'model_type': model_type,
+ 'created_at': datetime.now(),
+ 'r2_score': r2,
+ 'mse': mse,
+ 'mae': mae,
+ 'rmse': rmse,
+ 'use_steady_data': use_steady_data
+ }
+
+ # 鐢熸垚妯″瀷鏂囦欢鍚�
+ model_filename = f"advanced_{model_type.lower()}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.joblib"
+ model_path = os.path.join(model_dir, model_filename)
+
+ # 淇濆瓨妯″瀷
+ joblib.dump(model_info, model_path)
+
+ st.success(f"妯″瀷宸叉垚鍔熶繚瀛�: {model_filename}")
+ st.info(f"淇濆瓨璺緞: {model_path}")
+
# --- 鏁版嵁棰勮 ---
st.subheader("馃攳 鏁版嵁棰勮")
st.dataframe(df_analysis.head(20), width='stretch')
diff --git a/app/pages/metered_weight_deep_learning.py b/app/pages/metered_weight_deep_learning.py
new file mode 100644
index 0000000..e9eef5b
--- /dev/null
+++ b/app/pages/metered_weight_deep_learning.py
@@ -0,0 +1,832 @@
+import streamlit as st
+import plotly.express as px
+import plotly.graph_objects as go
+import pandas as pd
+import numpy as np
+import joblib
+import os
+from datetime import datetime, timedelta
+from app.services.extruder_service import ExtruderService
+from app.services.main_process_service import MainProcessService
+from sklearn.preprocessing import StandardScaler, MinMaxScaler
+from sklearn.model_selection import train_test_split
+from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
+
+# 瀵煎叆绋虫�佽瘑鍒姛鑳�
+class SteadyStateDetector:
+ def __init__(self):
+ pass
+
+ def detect_steady_state(self, df, weight_col='绫抽噸', window_size=20, std_threshold=0.5, duration_threshold=60):
+ """
+ 绋虫�佽瘑鍒�昏緫锛氭爣璁扮背閲嶆暟鎹腑鐨勭ǔ鎬佹
+ :param df: 鍖呭惈绫抽噸鏁版嵁鐨勬暟鎹
+ :param weight_col: 绫抽噸鍒楀悕
+ :param window_size: 婊戝姩绐楀彛澶у皬锛堢锛�
+ :param std_threshold: 鏍囧噯宸槇鍊�
+ :param duration_threshold: 绋虫�佹寔缁椂闂撮槇鍊硷紙绉掞級
+ :param trend_threshold: 瓒嬪娍闃堝�硷紙缁濆鍊硷級
+ :return: 鍖呭惈绋虫�佹爣璁扮殑鏁版嵁妗嗗拰绋虫�佷俊鎭�
+ """
+ if df is None or df.empty:
+ return df, []
+
+ # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+ df['time'] = pd.to_datetime(df['time'])
+
+ # 璁$畻婊氬姩缁熻閲�
+ df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std()
+ df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean()
+
+ # 璁$畻娉㈠姩鑼冨洿
+ df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100
+ df['fluctuation_range'] = df['fluctuation_range'].fillna(0)
+
+ # 璁$畻瓒嬪娍
+ # df['trend'] = df[weight_col].diff().rolling(window=window_size, min_periods=5).mean()
+ # df['trend'] = df['trend'].fillna(0)
+ # df['trend_strength'] = (abs(df['trend']) / df['rolling_mean']) * 100
+ # df['trend_strength'] = df['trend_strength'].fillna(0)
+
+ # 鏍囪绋虫�佺偣
+ df['is_steady'] = 0
+ steady_condition = (
+ (df['fluctuation_range'] < std_threshold) &
+ (df[weight_col] >= 0.1)
+ )
+ df.loc[steady_condition, 'is_steady'] = 1
+
+ # 璇嗗埆杩炵画绋虫�佹
+ steady_segments = []
+ current_segment = {}
+
+ for i, row in df.iterrows():
+ if row['is_steady'] == 1:
+ if not current_segment:
+ current_segment = {
+ 'start_time': row['time'],
+ 'start_idx': i,
+ 'weights': [row[weight_col]]
+ }
+ else:
+ current_segment['weights'].append(row[weight_col])
+ else:
+ if current_segment:
+ current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time']
+ current_segment['end_idx'] = i-1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ # 璁$畻缃俊搴�
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ current_segment = {}
+
+ # 澶勭悊鏈�鍚庝竴涓ǔ鎬佹
+ if current_segment:
+ current_segment['end_time'] = df['time'].iloc[-1]
+ current_segment['end_idx'] = len(df) - 1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ # 鍦ㄦ暟鎹涓爣璁板畬鏁寸殑绋虫�佹
+ for segment in steady_segments:
+ df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1
+
+ return df, steady_segments
+
+# 灏濊瘯瀵煎叆娣卞害瀛︿範搴�
+use_deep_learning = False
+try:
+ import torch
+ import torch.nn as nn
+ import torch.optim as optim
+ use_deep_learning = True
+ # 妫�娴婫PU鏄惁鍙敤
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+ print(f"浣跨敤璁惧: {device}")
+
+ # PyTorch娣卞害瀛︿範妯″瀷瀹氫箟
+ class LSTMModel(nn.Module):
+ def __init__(self, input_dim, hidden_dim=64, num_layers=2):
+ super(LSTMModel, self).__init__()
+ self.lstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True)
+ self.fc1 = nn.Linear(hidden_dim, 32)
+ self.dropout = nn.Dropout(0.2)
+ self.fc2 = nn.Linear(32, 1)
+
+ def forward(self, x):
+ out, _ = self.lstm(x)
+ out = out[:, -1, :]
+ out = torch.relu(self.fc1(out))
+ out = self.dropout(out)
+ out = self.fc2(out)
+ return out
+
+ class GRUModel(nn.Module):
+ def __init__(self, input_dim, hidden_dim=64, num_layers=2):
+ super(GRUModel, self).__init__()
+ self.gru = nn.GRU(input_dim, hidden_dim, num_layers, batch_first=True)
+ self.fc1 = nn.Linear(hidden_dim, 32)
+ self.dropout = nn.Dropout(0.2)
+ self.fc2 = nn.Linear(32, 1)
+
+ def forward(self, x):
+ out, _ = self.gru(x)
+ out = out[:, -1, :]
+ out = torch.relu(self.fc1(out))
+ out = self.dropout(out)
+ out = self.fc2(out)
+ return out
+
+ class BiLSTMModel(nn.Module):
+ def __init__(self, input_dim, hidden_dim=64, num_layers=2):
+ super(BiLSTMModel, self).__init__()
+ self.bilstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True, bidirectional=True)
+ self.fc1 = nn.Linear(hidden_dim * 2, 32)
+ self.dropout = nn.Dropout(0.2)
+ self.fc2 = nn.Linear(32, 1)
+
+ def forward(self, x):
+ out, _ = self.bilstm(x)
+ out = out[:, -1, :]
+ out = torch.relu(self.fc1(out))
+ out = self.dropout(out)
+ out = self.fc2(out)
+ return out
+
+ st.success(f"浣跨敤璁惧: {device}")
+except ImportError:
+ st.warning("鏈娴嬪埌PyTorch锛屾繁搴﹀涔犳ā鍨嬪皢涓嶅彲鐢ㄣ�傝瀹夎pytorch浠ヤ娇鐢↙STM/GRU妯″瀷銆�")
+
+def show_metered_weight_deep_learning():
+ # 鍒濆鍖栨湇鍔�
+ extruder_service = ExtruderService()
+ main_process_service = MainProcessService()
+
+ # 椤甸潰鏍囬
+ st.title("绫抽噸娣卞害瀛︿範棰勬祴")
+
+ # 鍒濆鍖栦細璇濈姸鎬�
+ if 'mdl_start_date' not in st.session_state:
+ st.session_state['mdl_start_date'] = datetime.now().date() - timedelta(days=7)
+ if 'mdl_end_date' not in st.session_state:
+ st.session_state['mdl_end_date'] = datetime.now().date()
+ if 'mdl_quick_select' not in st.session_state:
+ st.session_state['mdl_quick_select'] = "鏈�杩�7澶�"
+ if 'mdl_model_type' not in st.session_state:
+ st.session_state['mdl_model_type'] = 'LSTM'
+ if 'mdl_sequence_length' not in st.session_state:
+ st.session_state['mdl_sequence_length'] = 10
+ if 'mdl_time_offset' not in st.session_state:
+ st.session_state['mdl_time_offset'] = 0
+ if 'mdl_product_variety' not in st.session_state:
+ st.session_state['mdl_product_variety'] = 'all'
+ if 'mdl_filter_transient' not in st.session_state:
+ st.session_state['mdl_filter_transient'] = True
+
+ # 榛樿鐗瑰緛鍒楄〃
+ default_features = ['铻烘潌杞��', '鏈哄ご鍘嬪姏', '娴佺▼涓婚��', '铻烘潌娓╁害',
+ '鍚庢満绛掓俯搴�', '鍓嶆満绛掓俯搴�', '鏈哄ご娓╁害']
+
+ # 瀹氫箟鍥炶皟鍑芥暟
+ def update_dates(qs):
+ st.session_state['mdl_quick_select'] = qs
+ today = datetime.now().date()
+ if qs == "浠婂ぉ":
+ st.session_state['mdl_start_date'] = today
+ st.session_state['mdl_end_date'] = today
+ elif qs == "鏈�杩�3澶�":
+ st.session_state['mdl_start_date'] = today - timedelta(days=3)
+ st.session_state['mdl_end_date'] = today
+ elif qs == "鏈�杩�7澶�":
+ st.session_state['mdl_start_date'] = today - timedelta(days=7)
+ st.session_state['mdl_end_date'] = today
+ elif qs == "鏈�杩�30澶�":
+ st.session_state['mdl_start_date'] = today - timedelta(days=30)
+ st.session_state['mdl_end_date'] = today
+
+ def on_date_change():
+ st.session_state['mdl_quick_select'] = "鑷畾涔�"
+
+ # 鏌ヨ鏉′欢鍖哄煙
+ with st.expander("馃攳 鏌ヨ閰嶇疆", expanded=True):
+ # 娣诲姞鑷畾涔� CSS 瀹炵幇鍝嶅簲寮忔崲琛�
+ st.markdown("""
+ <style>
+ /* 寮哄埗鍒楀鍣ㄦ崲琛� */
+ [data-testid="stExpander"] [data-testid="column"] {
+ flex: 1 1 120px !important;
+ min-width: 120px !important;
+ }
+ /* 閽堝鏃ユ湡杈撳叆妗嗗垪绋嶅井鍔犲涓�鐐� */
+ @media (min-width: 768px) {
+ [data-testid="stExpander"] [data-testid="column"]:nth-child(6),
+ [data-testid="stExpander"] [data-testid="column"]:nth-child(7) {
+ flex: 2 1 180px !important;
+ min-width: 180px !important;
+ }
+ }
+ </style>
+ """, unsafe_allow_html=True)
+
+ # 鍒涘缓甯冨眬
+ cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1])
+
+ options = ["浠婂ぉ", "鏈�杩�3澶�", "鏈�杩�7澶�", "鏈�杩�30澶�", "鑷畾涔�"]
+ for i, option in enumerate(options):
+ with cols[i]:
+ # 鏍规嵁褰撳墠閫夋嫨鐘舵�佸喅瀹氭寜閽被鍨�
+ button_type = "primary" if st.session_state['mdl_quick_select'] == option else "secondary"
+ if st.button(option, key=f"btn_mdl_{option}", width='stretch', type=button_type):
+ update_dates(option)
+ st.rerun()
+
+ with cols[5]:
+ start_date = st.date_input(
+ "寮�濮嬫棩鏈�",
+ label_visibility="collapsed",
+ key="mdl_start_date",
+ on_change=on_date_change
+ )
+
+ with cols[6]:
+ end_date = st.date_input(
+ "缁撴潫鏃ユ湡",
+ label_visibility="collapsed",
+ key="mdl_end_date",
+ on_change=on_date_change
+ )
+
+ with cols[7]:
+ query_button = st.button("馃殌 寮�濮嬪垎鏋�", key="mdl_query", width='stretch')
+
+ # 楂樼骇閰嶇疆
+ st.markdown("---")
+ advanced_cols = st.columns(2)
+
+ with advanced_cols[0]:
+ st.write("馃 **妯″瀷閰嶇疆**")
+ # 妯″瀷绫诲瀷閫夋嫨
+ if use_deep_learning:
+ model_options = ['LSTM', 'GRU', 'BiLSTM']
+ model_type = st.selectbox(
+ "妯″瀷绫诲瀷",
+ options=model_options,
+ key="mdl_model_type",
+ help="閫夋嫨鐢ㄤ簬棰勬祴鐨勬繁搴﹀涔犳ā鍨嬬被鍨�"
+ )
+
+ # 搴忓垪闀垮害
+ sequence_length = st.slider(
+ "搴忓垪闀垮害",
+ min_value=5,
+ max_value=30,
+ value=st.session_state['mdl_sequence_length'],
+ step=1,
+ help="鐢ㄤ簬娣卞害瀛︿範妯″瀷鐨勬椂闂村簭鍒楅暱搴�",
+ key="mdl_sequence_length"
+ )
+ else:
+ st.warning("鏈娴嬪埌PyTorch锛屾棤娉曚娇鐢ㄦ繁搴﹀涔犳ā鍨�")
+
+ with advanced_cols[1]:
+ st.write("鈴憋笍 **鏃堕棿寤惰繜閰嶇疆**")
+ # 鍔ㄦ�佹椂闂村亸绉伙紙鍩轰簬娴佺▼涓婚�燂級
+ time_offset = st.slider(
+ "鎸ゅ嚭鏁版嵁鍚戝悗鍋忕Щ (鍒嗛挓)",
+ min_value=0,
+ max_value=60,
+ value=st.session_state['mdl_time_offset'],
+ step=1,
+ help="鐢变簬鑳庨潰浠庢尋鍑哄埌绉伴噸闇�瑕佹椂闂达紝灏嗘尋鍑烘満鏁版嵁鍚戝悗绉诲姩锛屼娇鍏朵笌绫抽噸鏁版嵁鍦ㄦ椂闂磋酱涓婂榻愩�傚亸绉婚噺浼氬奖鍝嶉娴嬪噯纭�с��",
+ key="mdl_time_offset"
+ )
+
+ # 绋虫�佽瘑鍒厤缃�
+ st.markdown("---")
+ steady_cols = st.columns(3)
+ with steady_cols[0]:
+ st.write("鈿栵笍 **绋虫�佽瘑鍒厤缃�**")
+ use_steady_data = st.checkbox(
+ "浠呬娇鐢ㄧǔ鎬佹暟鎹繘琛岃缁�",
+ value=True,
+ key="mdl_use_steady_data",
+ help="鍚敤鍚庯紝鍙娇鐢ㄧ背閲嶇ǔ鎬佹椂娈电殑鏁版嵁杩涜妯″瀷璁粌鍜岄娴�"
+ )
+
+ with steady_cols[1]:
+ st.write("馃搹 **绋虫�佸弬鏁�**")
+ steady_window = st.slider(
+ "婊戝姩绐楀彛澶у皬 (绉�)",
+ min_value=5,
+ max_value=60,
+ value=20,
+ step=5,
+ key="mdl_steady_window",
+ help="鐢ㄤ簬绋虫�佽瘑鍒殑婊戝姩绐楀彛澶у皬"
+ )
+
+ with steady_cols[2]:
+ st.write("馃搳 **绋虫�侀槇鍊�**")
+ steady_threshold = st.slider(
+ "娉㈠姩闃堝�� (%)",
+ min_value=0.1,
+ max_value=2.0,
+ value=0.5,
+ step=0.1,
+ key="mdl_steady_threshold",
+ help="绋虫�佽瘑鍒殑娉㈠姩鑼冨洿闃堝��"
+ )
+
+
+
+ # 杞崲涓篸atetime瀵硅薄
+ start_dt = datetime.combine(start_date, datetime.min.time())
+ end_dt = datetime.combine(end_date, datetime.max.time())
+
+ # 鏌ヨ澶勭悊
+ if query_button:
+ with st.spinner("姝e湪鑾峰彇鏁版嵁..."):
+ # 1. 鑾峰彇瀹屾暣鐨勬尋鍑烘満鏁版嵁
+ df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt)
+
+ # 2. 鑾峰彇涓绘祦绋嬫帶鍒舵暟鎹�
+ df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt)
+
+ df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt)
+
+ # 妫�鏌ユ槸鍚︽湁鏁版嵁
+ has_data = any([
+ df_extruder_full is not None and not df_extruder_full.empty,
+ df_main_speed is not None and not df_main_speed.empty,
+ df_temp is not None and not df_temp.empty
+ ])
+
+ if not has_data:
+ st.warning("鎵�閫夋椂闂存鍐呮湭鎵惧埌浠讳綍鏁版嵁锛岃灏濊瘯璋冩暣鏌ヨ鏉′欢銆�")
+ return
+
+ # 缂撳瓨鏁版嵁鍒颁細璇濈姸鎬�
+ st.session_state['cached_extruder_full'] = df_extruder_full
+ st.session_state['cached_main_speed'] = df_main_speed
+ st.session_state['cached_temp'] = df_temp
+ st.session_state['last_query_start'] = start_dt
+ st.session_state['last_query_end'] = end_dt
+
+ # 鏁版嵁澶勭悊鍜屽垎鏋�
+ if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']):
+ with st.spinner("姝e湪鍒嗘瀽鏁版嵁..."):
+ # 鑾峰彇缂撳瓨鏁版嵁
+ df_extruder_full = st.session_state['cached_extruder_full']
+ df_main_speed = st.session_state['cached_main_speed']
+ df_temp = st.session_state['cached_temp']
+
+ # 妫�鏌ユ槸鍚︽湁鏁版嵁
+ has_data = any([
+ df_extruder_full is not None and not df_extruder_full.empty,
+ df_main_speed is not None and not df_main_speed.empty,
+ df_temp is not None and not df_temp.empty
+ ])
+
+ if not has_data:
+ st.warning("鎵�閫夋椂闂存鍐呮湭鎵惧埌浠讳綍鏁版嵁锛岃灏濊瘯璋冩暣鏌ヨ鏉′欢銆�")
+ return
+
+ # 鏁版嵁鏁村悎涓庨澶勭悊
+ def integrate_data(df_extruder_full, df_main_speed, df_temp, time_offset):
+ # 纭繚鎸ゅ嚭鏈烘暟鎹瓨鍦�
+ if df_extruder_full is None or df_extruder_full.empty:
+ return None
+
+ # 搴旂敤鏃堕棿鍋忕Щ
+ offset_delta = timedelta(minutes=time_offset)
+ df_extruder_shifted = df_extruder_full.copy()
+ df_extruder_shifted['time'] = df_extruder_shifted['time'] + offset_delta
+
+ # 鍒涘缓鍙寘鍚背閲嶅拰鏃堕棿鐨勪富鏁版嵁闆�
+ df_merged = df_extruder_shifted[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy()
+
+ # 鏁村悎涓绘祦绋嬫暟鎹�
+ if df_main_speed is not None and not df_main_speed.empty:
+ df_main_speed_shifted = df_main_speed.copy()
+ df_main_speed_shifted['time'] = df_main_speed_shifted['time'] + offset_delta
+
+ df_main_speed_shifted = df_main_speed_shifted[['time', 'process_main_speed']]
+ df_merged = pd.merge_asof(
+ df_merged.sort_values('time'),
+ df_main_speed_shifted.sort_values('time'),
+ on='time',
+ direction='nearest',
+ tolerance=pd.Timedelta('1min')
+ )
+
+ # 鏁村悎娓╁害鏁版嵁
+ if df_temp is not None and not df_temp.empty:
+ df_temp_shifted = df_temp.copy()
+ df_temp_shifted['time'] = df_temp_shifted['time'] + offset_delta
+
+ temp_cols = ['time', 'nakata_extruder_screw_display_temp',
+ 'nakata_extruder_rear_barrel_display_temp',
+ 'nakata_extruder_front_barrel_display_temp',
+ 'nakata_extruder_head_display_temp']
+ df_temp_subset = df_temp_shifted[temp_cols].copy()
+ df_merged = pd.merge_asof(
+ df_merged.sort_values('time'),
+ df_temp_subset.sort_values('time'),
+ on='time',
+ direction='nearest',
+ tolerance=pd.Timedelta('1min')
+ )
+
+ # 閲嶅懡鍚嶅垪浠ユ彁楂樺彲璇绘��
+ df_merged.rename(columns={
+ 'screw_speed_actual': '铻烘潌杞��',
+ 'head_pressure': '鏈哄ご鍘嬪姏',
+ 'process_main_speed': '娴佺▼涓婚��',
+ 'nakata_extruder_screw_display_temp': '铻烘潌娓╁害',
+ 'nakata_extruder_rear_barrel_display_temp': '鍚庢満绛掓俯搴�',
+ 'nakata_extruder_front_barrel_display_temp': '鍓嶆満绛掓俯搴�',
+ 'nakata_extruder_head_display_temp': '鏈哄ご娓╁害'
+ }, inplace=True)
+
+ # 娓呯悊鏁版嵁
+ df_merged.dropna(subset=['metered_weight'], inplace=True)
+
+ return df_merged
+
+ # 鎵ц鏁版嵁鏁村悎
+ df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp, st.session_state['mdl_time_offset'])
+
+ if df_analysis is None or df_analysis.empty:
+ st.warning("鏁版嵁鏁村悎澶辫触锛岃妫�鏌ユ暟鎹川閲忔垨璋冩暣鏃堕棿鑼冨洿銆�")
+ return
+
+ # 閲嶅懡鍚嶇背閲嶅垪
+ df_analysis.rename(columns={'metered_weight': '绫抽噸'}, inplace=True)
+
+ # 绋虫�佽瘑鍒�
+ steady_detector = SteadyStateDetector()
+
+ # 鑾峰彇绋虫�佽瘑鍒弬鏁�
+ use_steady_data = st.session_state.get('mdl_use_steady_data', True)
+ steady_window = st.session_state.get('mdl_steady_window', 20)
+ steady_threshold = st.session_state.get('mdl_steady_threshold', 0.5)
+
+ # 鎵ц绋虫�佽瘑鍒�
+ df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state(
+ df_analysis,
+ weight_col='绫抽噸',
+ window_size=steady_window,
+ std_threshold=steady_threshold
+ )
+
+ # 鏇存柊df_analysis涓哄寘鍚ǔ鎬佹爣璁扮殑鏁版嵁
+ df_analysis = df_analysis_with_steady
+
+
+
+ # 楂樼骇棰勬祴鍒嗘瀽
+ st.subheader("馃搳 娣卞害瀛︿範棰勬祴鍒嗘瀽")
+
+ if use_deep_learning:
+ # 妫�鏌ユ墍鏈夐粯璁ょ壒寰佹槸鍚﹀湪鏁版嵁涓�
+ missing_features = [f for f in default_features if f not in df_analysis.columns]
+ if missing_features:
+ st.warning(f"鏁版嵁涓己灏戜互涓嬬壒寰�: {', '.join(missing_features)}")
+ else:
+ # 鍑嗗鏁版嵁
+ required_cols = default_features + ['绫抽噸', 'is_steady']
+ combined = df_analysis[required_cols].copy()
+
+ # 濡傛灉鍚敤浜嗙ǔ鎬佹暟鎹紝杩囨护鎺夐潪绋虫�佹暟鎹�
+ use_steady_data = st.session_state.get('mdl_use_steady_data', True)
+ if use_steady_data:
+ combined = combined[combined['is_steady'] == 1]
+ st.info(f"宸茶繃婊ら潪绋虫�佹暟鎹紝浣跨敤 {len(combined)} 鏉$ǔ鎬佹暟鎹繘琛岃缁�")
+
+ # 娓呯悊鏁版嵁涓殑NaN鍊�
+ combined_clean = combined.dropna()
+
+ # 妫�鏌ユ竻鐞嗗悗鐨勬暟鎹噺
+ if len(combined_clean) < 30:
+ st.warning("鏁版嵁閲忎笉瓒筹紝鏃犳硶杩涜鏈夋晥鐨勯娴嬪垎鏋�")
+ if use_steady_data:
+ st.info("寤鸿锛氬皾璇曡皟鏁寸ǔ鎬佽瘑鍒弬鏁版垨绂佺敤'浠呬娇鐢ㄧǔ鎬佹暟鎹�'閫夐」")
+ else:
+ # 鏄剧ず绋虫�佺粺璁�
+ total_data = len(df_analysis)
+ steady_data = len(combined_clean)
+ steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0
+
+ metrics_cols = st.columns(3)
+ with metrics_cols[0]:
+ st.metric("鎬绘暟鎹噺", total_data)
+ with metrics_cols[1]:
+ st.metric("绋虫�佹暟鎹噺", steady_data)
+ with metrics_cols[2]:
+ st.metric("绋虫�佹暟鎹瘮渚�", f"{steady_ratio:.1f}%")
+
+ # 绋虫�佹暟鎹彲瑙嗗寲
+ st.markdown("---")
+ st.subheader("馃搱 绋虫�佹暟鎹垎甯�")
+
+ # 鍒涘缓绋虫�佹暟鎹彲瑙嗗寲鍥捐〃
+ fig_steady = go.Figure()
+
+ # 娣诲姞鍘熷绫抽噸鏇茬嚎
+ fig_steady.add_trace(go.Scatter(
+ x=df_analysis['time'],
+ y=df_analysis['绫抽噸'],
+ name='鍘熷绫抽噸',
+ mode='lines',
+ line=dict(color='lightgray', width=1)
+ ))
+
+ # 娣诲姞绋虫�佹暟鎹偣
+ steady_data_points = df_analysis[df_analysis['is_steady'] == 1]
+ fig_steady.add_trace(go.Scatter(
+ x=steady_data_points['time'],
+ y=steady_data_points['绫抽噸'],
+ name='绋虫�佺背閲�',
+ mode='markers',
+ marker=dict(color='green', size=3, opacity=0.6)
+ ))
+
+ # 娣诲姞闈炵ǔ鎬佹暟鎹偣
+ non_steady_data_points = df_analysis[df_analysis['is_steady'] == 0]
+ fig_steady.add_trace(go.Scatter(
+ x=non_steady_data_points['time'],
+ y=non_steady_data_points['绫抽噸'],
+ name='闈炵ǔ鎬佺背閲�',
+ mode='markers',
+ marker=dict(color='red', size=3, opacity=0.6)
+ ))
+
+ # 閰嶇疆鍥捐〃甯冨眬
+ fig_steady.update_layout(
+ title="绫抽噸鏁版嵁绋虫�佸垎甯�",
+ xaxis=dict(title="鏃堕棿"),
+ yaxis=dict(title="绫抽噸 (Kg/m)"),
+ legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1),
+ height=500
+ )
+
+ # 鏄剧ず鍥捐〃
+ st.plotly_chart(fig_steady, use_container_width=True)
+
+ # 鍒嗙X鍜寉
+ X_clean = combined_clean[default_features]
+ y_clean = combined_clean['绫抽噸']
+
+ # 涓烘椂闂村簭鍒楁ā鍨嬪噯澶囨暟鎹�
+ def create_sequences(X, y, sequence_length):
+ X_seq = []
+ y_seq = []
+ for i in range(len(X) - sequence_length):
+ X_seq.append(X[i:i+sequence_length])
+ y_seq.append(y[i+sequence_length])
+ return np.array(X_seq), np.array(y_seq)
+
+ # 鏁版嵁鏍囧噯鍖�
+ scaler_X = StandardScaler()
+ scaler_y = MinMaxScaler()
+
+ X_scaled = scaler_X.fit_transform(X_clean)
+ y_scaled = scaler_y.fit_transform(y_clean.values.reshape(-1, 1)).ravel()
+
+ # 鍒涘缓搴忓垪鏁版嵁
+ sequence_length = st.session_state['mdl_sequence_length']
+ X_seq, y_seq = create_sequences(X_scaled, y_scaled, sequence_length)
+
+ # 妫�鏌ュ簭鍒楁暟鎹噺
+ if len(X_seq) < 20:
+ st.warning("搴忓垪鏁版嵁閲忎笉瓒筹紝鏃犳硶杩涜鏈夋晥鐨勬繁搴﹀涔犺缁�")
+ else:
+ # 鍒嗗壊璁粌闆嗗拰娴嬭瘯闆�
+ train_size = int(len(X_seq) * 0.8)
+ X_train_seq, X_test_seq = X_seq[:train_size], X_seq[train_size:]
+ y_train_seq, y_test_seq = y_seq[:train_size], y_seq[train_size:]
+
+ # 杞崲涓篜yTorch寮犻噺
+ X_train_tensor = torch.tensor(X_train_seq, dtype=torch.float32).to(device)
+ y_train_tensor = torch.tensor(y_train_seq, dtype=torch.float32).unsqueeze(1).to(device)
+ X_test_tensor = torch.tensor(X_test_seq, dtype=torch.float32).to(device)
+ y_test_tensor = torch.tensor(y_test_seq, dtype=torch.float32).unsqueeze(1).to(device)
+
+ # 鏋勫缓妯″瀷
+ input_dim = X_scaled.shape[1]
+
+ if st.session_state['mdl_model_type'] == 'LSTM':
+ model = LSTMModel(input_dim).to(device)
+ elif st.session_state['mdl_model_type'] == 'GRU':
+ model = GRUModel(input_dim).to(device)
+ elif st.session_state['mdl_model_type'] == 'BiLSTM':
+ model = BiLSTMModel(input_dim).to(device)
+
+ # 瀹氫箟鎹熷け鍑芥暟鍜屼紭鍖栧櫒
+ criterion = nn.MSELoss()
+ optimizer = optim.Adam(model.parameters(), lr=0.001)
+
+ # 璁粌妯″瀷
+ num_epochs = 50
+ batch_size = 32
+
+ # 鏄剧ず璁粌杩涘害
+ progress_bar = st.progress(0)
+ status_text = st.empty()
+
+ for epoch in range(num_epochs):
+ model.train()
+ optimizer.zero_grad()
+
+ # 鍓嶅悜浼犳挱
+ outputs = model(X_train_tensor)
+ loss = criterion(outputs, y_train_tensor)
+
+ # 鍙嶅悜浼犳挱鍜屼紭鍖�
+ loss.backward()
+ optimizer.step()
+
+ # 鏇存柊杩涘害
+ progress_bar.progress((epoch + 1) / num_epochs)
+ status_text.text(f"璁粌涓�: 绗� {epoch + 1}/{num_epochs} 杞�, 鎹熷け: {loss.item():.6f}")
+
+ # 棰勬祴
+ model.eval()
+ with torch.no_grad():
+ y_pred_scaled_tensor = model(X_test_tensor)
+ y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel()
+
+ # 鍙嶅綊涓�鍖�
+ y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel()
+ y_test_actual = scaler_y.inverse_transform(y_test_seq.reshape(-1, 1)).ravel()
+
+ # 璁$畻璇勪及鎸囨爣
+ r2 = r2_score(y_test_actual, y_pred)
+ mse = mean_squared_error(y_test_actual, y_pred)
+ mae = mean_absolute_error(y_test_actual, y_pred)
+ rmse = np.sqrt(mse)
+
+ # 鏄剧ず妯″瀷鎬ц兘
+ metrics_cols = st.columns(2)
+ with metrics_cols[0]:
+ st.metric("R虏 寰楀垎", f"{r2:.4f}")
+ st.metric("鍧囨柟璇樊 (MSE)", f"{mse:.6f}")
+ with metrics_cols[1]:
+ st.metric("骞冲潎缁濆璇樊 (MAE)", f"{mae:.6f}")
+ st.metric("鍧囨柟鏍硅宸� (RMSE)", f"{rmse:.6f}")
+
+ # 娣诲姞绋虫�佺浉鍏崇殑璇勪及璇存槑
+ use_steady_data = st.session_state.get('mdl_use_steady_data', True)
+ if use_steady_data:
+ st.info("鈿狅笍 妯″瀷浠呬娇鐢ㄧǔ鎬佹暟鎹繘琛岃缁冿紝鍦ㄩ潪绋虫�佸伐鍐典笅棰勬祴缁撴灉鍙兘涓嶅噯纭�")
+
+ # --- 瀹為檯鍊间笌棰勬祴鍊煎姣� ---
+
+ # --- 瀹為檯鍊间笌棰勬祴鍊煎姣� ---
+ st.subheader("馃攧 瀹為檯鍊间笌棰勬祴鍊煎姣�")
+
+ # 鍒涘缓瀵规瘮鏁版嵁
+ compare_df = pd.DataFrame({
+ '瀹為檯鍊�': y_test_actual,
+ '棰勬祴鍊�': y_pred
+ })
+ compare_df = compare_df.sort_index()
+
+ # 鍒涘缓瀵规瘮鍥�
+ fig_compare = go.Figure()
+ fig_compare.add_trace(go.Scatter(
+ x=compare_df.index,
+ y=compare_df['瀹為檯鍊�'],
+ name='瀹為檯鍊�',
+ mode='lines+markers',
+ line=dict(color='blue', width=2)
+ ))
+ fig_compare.add_trace(go.Scatter(
+ x=compare_df.index,
+ y=compare_df['棰勬祴鍊�'],
+ name='棰勬祴鍊�',
+ mode='lines+markers',
+ line=dict(color='red', width=2, dash='dash')
+ ))
+ fig_compare.update_layout(
+ title=f'娴嬭瘯闆�: 瀹為檯绫抽噸 vs 棰勬祴绫抽噸 ({st.session_state["mdl_model_type"]})',
+ xaxis=dict(title='鏍锋湰绱㈠紩'),
+ yaxis=dict(title='绫抽噸 (Kg/m)'),
+ legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1),
+ height=400
+ )
+ st.plotly_chart(fig_compare, width='stretch')
+
+ # --- 娈嬪樊鍒嗘瀽 ---
+ st.subheader("馃搲 娈嬪樊鍒嗘瀽")
+
+ # 璁$畻娈嬪樊
+ residuals = y_test_actual - y_pred
+
+ # 鍒涘缓娈嬪樊鍥�
+ fig_residual = go.Figure()
+ fig_residual.add_trace(go.Scatter(
+ x=y_pred,
+ y=residuals,
+ mode='markers',
+ marker=dict(color='green', size=8, opacity=0.6)
+ ))
+ fig_residual.add_shape(
+ type="line",
+ x0=y_pred.min(),
+ y0=0,
+ x1=y_pred.max(),
+ y1=0,
+ line=dict(color="red", width=2, dash="dash")
+ )
+ fig_residual.update_layout(
+ title='娈嬪樊鍥�',
+ xaxis=dict(title='棰勬祴鍊�'),
+ yaxis=dict(title='娈嬪樊'),
+ height=400
+ )
+ st.plotly_chart(fig_residual, width='stretch')
+
+ # --- 妯″瀷淇濆瓨 ---
+ st.subheader("馃捑 妯″瀷淇濆瓨")
+
+ # 鍒涘缓妯″瀷鐩綍锛堝鏋滀笉瀛樺湪锛�
+ model_dir = "saved_models"
+ os.makedirs(model_dir, exist_ok=True)
+
+ # 鍑嗗妯″瀷淇℃伅
+ model_info = {
+ 'model': model,
+ 'features': default_features,
+ 'scaler_X': scaler_X,
+ 'scaler_y': scaler_y,
+ 'model_type': st.session_state['mdl_model_type'],
+ 'sequence_length': sequence_length,
+ 'created_at': datetime.now(),
+ 'r2_score': r2,
+ 'mse': mse,
+ 'mae': mae,
+ 'rmse': rmse,
+ 'use_steady_data': use_steady_data
+ }
+
+ # 鐢熸垚妯″瀷鏂囦欢鍚�
+ model_filename = f"deep_{st.session_state['mdl_model_type'].lower()}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.joblib"
+ model_path = os.path.join(model_dir, model_filename)
+
+ # 淇濆瓨妯″瀷
+ joblib.dump(model_info, model_path)
+
+ st.success(f"妯″瀷宸叉垚鍔熶繚瀛�: {model_filename}")
+ st.info(f"淇濆瓨璺緞: {model_path}")
+ else:
+ st.warning("鏈娴嬪埌PyTorch锛屾棤娉曚娇鐢ㄦ繁搴﹀涔犻娴嬪姛鑳姐�傝纭繚宸叉纭畨瑁匬yTorch搴撱��")
+
+ # --- 鏁版嵁棰勮 ---
+ st.subheader("馃攳 鏁版嵁棰勮")
+ st.dataframe(df_analysis.head(20), width='stretch')
+
+ # --- 瀵煎嚭鏁版嵁 ---
+ st.subheader("馃捑 瀵煎嚭鏁版嵁")
+ # 灏嗘暟鎹浆鎹负CSV鏍煎紡
+ csv = df_analysis.to_csv(index=False)
+ # 鍒涘缓涓嬭浇鎸夐挳
+ st.download_button(
+ label="瀵煎嚭鏁村悎鍚庣殑鏁版嵁 (CSV)",
+ data=csv,
+ file_name=f"metered_weight_deep_learning_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv",
+ mime="text/csv",
+ help="鐐瑰嚮鎸夐挳瀵煎嚭鏁村悎鍚庣殑绫抽噸鍒嗘瀽鏁版嵁"
+ )
+
+ else:
+ # 鎻愮ず鐢ㄦ埛鐐瑰嚮寮�濮嬪垎鏋愭寜閽�
+ st.info("璇烽�夋嫨鏃堕棿鑼冨洿骞剁偣鍑�'寮�濮嬪垎鏋�'鎸夐挳鑾峰彇鏁版嵁銆�")
diff --git a/app/pages/metered_weight_forecast.py b/app/pages/metered_weight_forecast.py
new file mode 100644
index 0000000..2ecdfbe
--- /dev/null
+++ b/app/pages/metered_weight_forecast.py
@@ -0,0 +1,716 @@
+import streamlit as st
+import plotly.express as px
+import plotly.graph_objects as go
+import pandas as pd
+import numpy as np
+import joblib
+import os
+from datetime import datetime, timedelta
+from app.services.extruder_service import ExtruderService
+from app.services.main_process_service import MainProcessService
+
+# 灏濊瘯瀵煎叆torch锛屽鏋滃け璐ュ垯绂佺敤娣卞害瀛︿範妯″瀷鏀寔
+try:
+ import torch
+ TORCH_AVAILABLE = True
+except ImportError:
+ TORCH_AVAILABLE = False
+
+
+# 绋虫�佽瘑鍒被
+class SteadyStateDetector:
+ def __init__(self):
+ pass
+
+ def detect_steady_state(self, df, weight_col='绫抽噸', window_size=20, std_threshold=0.5, duration_threshold=60):
+ """
+ 绋虫�佽瘑鍒�昏緫锛氭爣璁扮背閲嶆暟鎹腑鐨勭ǔ鎬佹
+ :param df: 鍖呭惈绫抽噸鏁版嵁鐨勬暟鎹
+ :param weight_col: 绫抽噸鍒楀悕
+ :param window_size: 婊戝姩绐楀彛澶у皬锛堢锛�
+ :param std_threshold: 鏍囧噯宸槇鍊�
+ :param duration_threshold: 绋虫�佹寔缁椂闂撮槇鍊硷紙绉掞級
+ :return: 鍖呭惈绋虫�佹爣璁扮殑鏁版嵁妗嗗拰绋虫�佷俊鎭�
+ """
+ if df is None or df.empty:
+ return df, []
+
+ # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+ df['time'] = pd.to_datetime(df['time'])
+
+ # 璁$畻婊氬姩缁熻閲�
+ df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std()
+ df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean()
+
+ # 璁$畻娉㈠姩鑼冨洿
+ df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100
+ df['fluctuation_range'] = df['fluctuation_range'].fillna(0)
+
+ # 鏍囪绋虫�佺偣
+ df['is_steady'] = 0
+ steady_condition = (
+ (df['fluctuation_range'] < std_threshold) &
+ (df[weight_col] >= 0.1)
+ )
+ df.loc[steady_condition, 'is_steady'] = 1
+
+ # 璇嗗埆杩炵画绋虫�佹
+ steady_segments = []
+ current_segment = {}
+
+ for i, row in df.iterrows():
+ if row['is_steady'] == 1:
+ if not current_segment:
+ current_segment = {
+ 'start_time': row['time'],
+ 'start_idx': i,
+ 'weights': [row[weight_col]]
+ }
+ else:
+ current_segment['weights'].append(row[weight_col])
+ else:
+ if current_segment:
+ current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time']
+ current_segment['end_idx'] = i-1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ # 璁$畻缃俊搴�
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ current_segment = {}
+
+ # 澶勭悊鏈�鍚庝竴涓ǔ鎬佹
+ if current_segment:
+ current_segment['end_time'] = df['time'].iloc[-1]
+ current_segment['end_idx'] = len(df) - 1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ # 鍦ㄦ暟鎹涓爣璁板畬鏁寸殑绋虫�佹
+ for segment in steady_segments:
+ df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1
+
+ return df, steady_segments
+
+
+def show_metered_weight_forecast():
+ # 鍒濆鍖栨湇鍔�
+ extruder_service = ExtruderService()
+ main_process_service = MainProcessService()
+
+ # 椤甸潰鏍囬
+ st.title("绫抽噸棰勬祴鍒嗘瀽")
+
+ # 鍒濆鍖栦細璇濈姸鎬�
+ if 'forecast_start_date' not in st.session_state:
+ st.session_state['forecast_start_date'] = datetime.now().date() - timedelta(days=7)
+ if 'forecast_end_date' not in st.session_state:
+ st.session_state['forecast_end_date'] = datetime.now().date()
+ if 'forecast_quick_select' not in st.session_state:
+ st.session_state['forecast_quick_select'] = "鏈�杩�7澶�"
+ if 'selected_model' not in st.session_state:
+ st.session_state['selected_model'] = None
+ if 'selected_model_file' not in st.session_state:
+ st.session_state['selected_model_file'] = None
+ if 'forecast_use_steady_only' not in st.session_state:
+ st.session_state['forecast_use_steady_only'] = True
+ if 'forecast_steady_window' not in st.session_state:
+ st.session_state['forecast_steady_window'] = 20
+ if 'forecast_steady_threshold' not in st.session_state:
+ st.session_state['forecast_steady_threshold'] = 1.5
+
+ # 瀹氫箟鍥炶皟鍑芥暟
+ def update_dates(qs):
+ st.session_state['forecast_quick_select'] = qs
+ today = datetime.now().date()
+ if qs == "浠婂ぉ":
+ st.session_state['forecast_start_date'] = today
+ st.session_state['forecast_end_date'] = today
+ elif qs == "鏈�杩�3澶�":
+ st.session_state['forecast_start_date'] = today - timedelta(days=3)
+ st.session_state['forecast_end_date'] = today
+ elif qs == "鏈�杩�7澶�":
+ st.session_state['forecast_start_date'] = today - timedelta(days=7)
+ st.session_state['forecast_end_date'] = today
+ elif qs == "鏈�杩�30澶�":
+ st.session_state['forecast_start_date'] = today - timedelta(days=30)
+ st.session_state['forecast_end_date'] = today
+
+ def on_date_change():
+ st.session_state['forecast_quick_select'] = "鑷畾涔�"
+
+ # 鏌ヨ鏉′欢鍖哄煙
+ with st.expander("馃攳 鏁版嵁閫夋嫨", expanded=True):
+ # 娣诲姞鑷畾涔� CSS 瀹炵幇鍝嶅簲寮忔崲琛�
+ st.markdown("""
+ <style>
+ /* 寮哄埗鍒楀鍣ㄦ崲琛� */
+ [data-testid="stExpander"] [data-testid="column"] {
+ flex: 1 1 120px !important;
+ min-width: 120px !important;
+ }
+ /* 閽堝鏃ユ湡杈撳叆妗嗗垪绋嶅井鍔犲涓�鐐� */
+ @media (min-width: 768px) {
+ [data-testid="stExpander"] [data-testid="column"]:nth-child(6),
+ [data-testid="stExpander"] [data-testid="column"]:nth-child(7) {
+ flex: 2 1 180px !important;
+ min-width: 180px !important;
+ }
+ }
+ </style>
+ """, unsafe_allow_html=True)
+
+ # 鍒涘缓甯冨眬
+ cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1])
+
+ options = ["浠婂ぉ", "鏈�杩�3澶�", "鏈�杩�7澶�", "鏈�杩�30澶�", "鑷畾涔�"]
+ for i, option in enumerate(options):
+ with cols[i]:
+ # 鏍规嵁褰撳墠閫夋嫨鐘舵�佸喅瀹氭寜閽被鍨�
+ button_type = "primary" if st.session_state['forecast_quick_select'] == option else "secondary"
+ if st.button(option, key=f"btn_forecast_{option}", width='stretch', type=button_type):
+ update_dates(option)
+ st.rerun()
+
+ with cols[5]:
+ start_date = st.date_input(
+ "寮�濮嬫棩鏈�",
+ label_visibility="collapsed",
+ key="forecast_start_date",
+ on_change=on_date_change
+ )
+
+ with cols[6]:
+ end_date = st.date_input(
+ "缁撴潫鏃ユ湡",
+ label_visibility="collapsed",
+ key="forecast_end_date",
+ on_change=on_date_change
+ )
+
+ with cols[7]:
+ query_button = st.button("馃殌 鏌ヨ鏁版嵁", key="forecast_query", width='stretch')
+
+ # 杞崲涓篸atetime瀵硅薄
+ start_dt = datetime.combine(start_date, datetime.min.time())
+ end_dt = datetime.combine(end_date, datetime.max.time())
+
+ # 妯″瀷閫夋嫨鍖哄煙
+ with st.expander("馃搧 妯″瀷閫夋嫨", expanded=True):
+ # 鍒涘缓妯″瀷鐩綍锛堝鏋滀笉瀛樺湪锛�
+ model_dir = "saved_models"
+ os.makedirs(model_dir, exist_ok=True)
+
+ # 鑾峰彇鎵�鏈夊凡淇濆瓨鐨勬ā鍨嬫枃浠�
+ model_files = [f for f in os.listdir(model_dir) if f.endswith('.joblib')]
+ model_files.sort(reverse=True) # 鏈�鏂扮殑妯″瀷鎺掑湪鍓嶉潰
+
+ if not model_files:
+ st.warning("灏氭湭淇濆瓨浠讳綍妯″瀷锛岃鍏堣缁冩ā鍨嬪苟淇濆瓨銆�")
+ else:
+ # 妯″瀷閫夋嫨涓嬫媺妗�
+ selected_model_file = st.selectbox(
+ "閫夋嫨宸蹭繚瀛樼殑妯″瀷",
+ options=model_files,
+ help="閫夋嫨瑕佺敤浜庨娴嬬殑妯″瀷鏂囦欢",
+ key="forecast_selected_model"
+ )
+
+ # 鍔犺浇骞舵樉绀烘ā鍨嬩俊鎭�
+ if selected_model_file:
+ model_path = os.path.join(model_dir, selected_model_file)
+ model_info = joblib.load(model_path)
+
+ # 鏄剧ず妯″瀷鍩烘湰淇℃伅
+ st.subheader("馃搳 妯″瀷淇℃伅")
+ info_cols = st.columns(2)
+
+ with info_cols[0]:
+ st.metric("妯″瀷绫诲瀷", model_info['model_type'])
+ st.metric("鍒涘缓鏃堕棿", model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S'))
+ st.metric("浣跨敤绋虫�佹暟鎹�", "鏄�" if model_info.get('use_steady_data', False) else "鍚�")
+
+ with info_cols[1]:
+ st.metric("R虏 寰楀垎", f"{model_info['r2_score']:.4f}")
+ st.metric("鍧囨柟璇樊 (MSE)", f"{model_info['mse']:.6f}")
+ st.metric("鍧囨柟鏍硅宸� (RMSE)", f"{model_info['rmse']:.6f}")
+
+ # 鏄剧ず妯″瀷鐗瑰緛
+ st.write("馃攽 妯″瀷浣跨敤鐨勭壒寰�:")
+ st.code(", ".join(model_info['features']))
+
+ # 濡傛灉鏄繁搴﹀涔犳ā鍨嬶紝鏄剧ず搴忓垪闀垮害
+ if 'sequence_length' in model_info:
+ st.metric("搴忓垪闀垮害", model_info['sequence_length'])
+
+ # 淇濆瓨妯″瀷淇℃伅鍒颁細璇濈姸鎬�
+ st.session_state['selected_model'] = model_info
+ st.session_state['selected_model_file'] = selected_model_file
+
+ # 绋虫�佽瘑鍒厤缃�
+ st.markdown("---")
+ st.write("鈿栵笍 **绋虫�佽瘑鍒厤缃�**")
+
+ steady_cols = st.columns(3)
+ with steady_cols[0]:
+ st.checkbox(
+ "浠呴娴嬬ǔ鎬佹暟鎹�",
+ value=st.session_state['forecast_use_steady_only'],
+ key="forecast_use_steady_only",
+ help="鍚敤鍚庯紝鍙澶勪簬绋虫�佹椂娈电殑鏁版嵁杩涜绫抽噸棰勬祴"
+ )
+
+ with steady_cols[1]:
+ st.slider(
+ "婊戝姩绐楀彛澶у皬 (绉�)",
+ min_value=5,
+ max_value=60,
+ value=st.session_state['forecast_steady_window'],
+ step=5,
+ key="forecast_steady_window",
+ help="鐢ㄤ簬绋虫�佽瘑鍒殑婊戝姩绐楀彛澶у皬"
+ )
+
+ with steady_cols[2]:
+ st.slider(
+ "娉㈠姩闃堝�� (%)",
+ min_value=0.1,
+ max_value=2.0,
+ value=st.session_state['forecast_steady_threshold'],
+ step=0.1,
+ key="forecast_steady_threshold",
+ help="绋虫�佽瘑鍒殑娉㈠姩鑼冨洿闃堝��"
+ )
+
+ # 棰勬祴鍔熻兘鍖哄煙
+ st.subheader("馃敭 绫抽噸棰勬祴")
+
+ if query_button and st.session_state['selected_model']:
+ with st.spinner("姝e湪鑾峰彇鏁版嵁骞惰繘琛岄娴�..."):
+ # 1. 鑾峰彇瀹屾暣鐨勬尋鍑烘満鏁版嵁
+ df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt)
+
+ # 2. 鑾峰彇涓绘祦绋嬫帶鍒舵暟鎹�
+ df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt)
+ df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt)
+
+ # 妫�鏌ユ槸鍚︽湁鏁版嵁
+ has_data = any([
+ df_extruder_full is not None and not df_extruder_full.empty,
+ df_main_speed is not None and not df_main_speed.empty,
+ df_temp is not None and not df_temp.empty
+ ])
+
+ if not has_data:
+ st.warning("鎵�閫夋椂闂存鍐呮湭鎵惧埌浠讳綍鏁版嵁锛岃灏濊瘯璋冩暣鏌ヨ鏉′欢銆�")
+ else:
+ # 鏁版嵁鏁村悎涓庨澶勭悊
+ def integrate_data(df_extruder_full, df_main_speed, df_temp):
+ # 纭繚鎸ゅ嚭鏈烘暟鎹瓨鍦�
+ if df_extruder_full is None or df_extruder_full.empty:
+ return None
+
+ # 鍒涘缓鍙寘鍚背閲嶅拰鏃堕棿鐨勪富鏁版嵁闆�
+ df_merged = df_extruder_full[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy()
+
+
+ # 鏁村悎涓绘祦绋嬫暟鎹�
+ if df_main_speed is not None and not df_main_speed.empty:
+ df_main_speed = df_main_speed[['time', 'process_main_speed']]
+ df_merged = pd.merge_asof(
+ df_merged.sort_values('time'),
+ df_main_speed.sort_values('time'),
+ on='time',
+ direction='nearest',
+ tolerance=pd.Timedelta('1min')
+ )
+
+ # 鏁村悎娓╁害鏁版嵁
+ if df_temp is not None and not df_temp.empty:
+ temp_cols = ['time', 'nakata_extruder_screw_display_temp',
+ 'nakata_extruder_rear_barrel_display_temp',
+ 'nakata_extruder_front_barrel_display_temp',
+ 'nakata_extruder_head_display_temp']
+ df_temp_subset = df_temp[temp_cols].copy()
+ df_merged = pd.merge_asof(
+ df_merged.sort_values('time'),
+ df_temp_subset.sort_values('time'),
+ on='time',
+ direction='nearest',
+ tolerance=pd.Timedelta('1min')
+ )
+
+ # 閲嶅懡鍚嶅垪浠ユ彁楂樺彲璇绘��
+ df_merged.rename(columns={
+ 'screw_speed_actual': '铻烘潌杞��',
+ 'head_pressure': '鏈哄ご鍘嬪姏',
+ 'process_main_speed': '娴佺▼涓婚��',
+ 'nakata_extruder_screw_display_temp': '铻烘潌娓╁害',
+ 'nakata_extruder_rear_barrel_display_temp': '鍚庢満绛掓俯搴�',
+ 'nakata_extruder_front_barrel_display_temp': '鍓嶆満绛掓俯搴�',
+ 'nakata_extruder_head_display_temp': '鏈哄ご娓╁害'
+ }, inplace=True)
+
+ # 娓呯悊鏁版嵁
+ df_merged.dropna(subset=['metered_weight'], inplace=True)
+
+ return df_merged
+
+ # 鎵ц鏁版嵁鏁村悎
+ df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp)
+
+ if df_analysis is None or df_analysis.empty:
+ st.warning("鏁版嵁鏁村悎澶辫触锛岃妫�鏌ユ暟鎹川閲忔垨璋冩暣鏃堕棿鑼冨洿銆�")
+ else:
+ # 閲嶅懡鍚嶇背閲嶅垪
+ df_analysis.rename(columns={'metered_weight': '绫抽噸'}, inplace=True)
+
+ # 绋虫�佽瘑鍒�
+ steady_detector = SteadyStateDetector()
+
+ # 鑾峰彇绋虫�佽瘑鍒弬鏁�
+ use_steady_only = st.session_state.get('forecast_use_steady_only', True)
+ steady_window = st.session_state.get('forecast_steady_window', 20)
+ steady_threshold = st.session_state.get('forecast_steady_threshold', 0.5)
+
+ # 鎵ц绋虫�佽瘑鍒�
+ df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state(
+ df_analysis,
+ weight_col='绫抽噸',
+ window_size=steady_window,
+ std_threshold=steady_threshold
+ )
+
+ # 鏇存柊df_analysis涓哄寘鍚ǔ鎬佹爣璁扮殑鏁版嵁
+ df_analysis = df_analysis_with_steady
+
+ # 鏄剧ず绋虫�佺粺璁′俊鎭�
+ total_data = len(df_analysis)
+ steady_data = len(df_analysis[df_analysis['is_steady'] == 1])
+ steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0
+
+ st.subheader("馃搳 绋虫�佹暟鎹粺璁�")
+ stats_cols = st.columns(4)
+ stats_cols[0].metric("鎬绘暟鎹噺", total_data)
+ stats_cols[1].metric("绋虫�佹暟鎹噺", steady_data)
+ stats_cols[2].metric("绋虫�佹暟鎹瘮渚�", f"{steady_ratio:.1f}%")
+ stats_cols[3].metric("绋虫�佹鏁伴噺", len(steady_segments))
+
+ # 鑾峰彇妯″瀷淇℃伅
+ model_info = st.session_state['selected_model']
+ required_features = model_info['features']
+
+ # 妫�鏌ユ墍鏈夊繀闇�鐨勭壒寰佹槸鍚﹀湪鏁版嵁涓�
+ missing_features = [f for f in required_features if f not in df_analysis.columns]
+ if missing_features:
+ st.warning(f"鏁版嵁涓己灏戜互涓嬬壒寰�: {', '.join(missing_features)}")
+ else:
+ # 鍑嗗鎵�鏈夋暟鎹敤浜庢樉绀�
+ df_all = df_analysis.dropna(subset=required_features + ['绫抽噸']).copy()
+
+ if len(df_all) == 0:
+ st.warning("娌℃湁瓒冲鐨勬湁鏁堟暟鎹繘琛岄娴嬶紝璇疯皟鏁存椂闂磋寖鍥存垨妫�鏌ユ暟鎹川閲忋��")
+ else:
+ # 鏍规嵁閰嶇疆鍐冲畾鏄惁鍙娇鐢ㄧǔ鎬佹暟鎹繘琛岄娴�
+ if use_steady_only:
+ df_pred_steady = df_all[df_all['is_steady'] == 1].copy()
+ if len(df_pred_steady) > 0:
+ df_pred = df_pred_steady
+ st.info(f"宸插惎鐢ㄧǔ鎬佽繃婊わ紝浣跨敤 {len(df_pred)} 鏉$ǔ鎬佹暟鎹繘琛岄娴�")
+ else:
+ df_pred = df_all.copy()
+ st.warning("鏈壘鍒扮ǔ鎬佹暟鎹紝灏嗕娇鐢ㄦ墍鏈夋暟鎹繘琛岄娴�")
+ else:
+ df_pred = df_all.copy()
+
+ # 鎵ц棰勬祴 - 鍙閫夊畾鐨勬暟鎹紙绋虫�佹垨鍏ㄩ儴锛夎繘琛岄娴�
+ X_pred = df_pred[required_features]
+ predicted_weights = []
+
+ # 鑾峰彇妯″瀷
+ model = model_info['model']
+
+ # 妫�鏌ユā鍨嬬被鍨嬪苟鎵ц棰勬祴
+ if model_info['model_type'] in ['LSTM', 'GRU', 'BiLSTM']:
+ # 娣卞害瀛︿範妯″瀷棰勬祴
+ if not TORCH_AVAILABLE:
+ st.error("PyTorch 鏈畨瑁咃紝鏃犳硶浣跨敤娣卞害瀛︿範妯″瀷杩涜棰勬祴銆�")
+ st.stop()
+
+ # 鏁版嵁鏍囧噯鍖�
+ scaler_X = model_info['scaler_X']
+ scaler_y = model_info['scaler_y']
+ X_scaled = scaler_X.transform(X_pred)
+
+ # 鑾峰彇搴忓垪闀垮害
+ sequence_length = model_info['sequence_length']
+
+ # 涓烘繁搴﹀涔犳ā鍨嬪垱寤哄簭鍒�
+ def create_sequences(data, seq_length):
+ sequences = []
+ for i in range(len(data) - seq_length + 1):
+ seq = data[i:i+seq_length]
+ sequences.append(seq)
+ return np.array(sequences)
+
+ X_sequences = create_sequences(X_scaled, sequence_length)
+
+ # 杞崲涓篜yTorch寮犻噺
+ import torch
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+ X_tensor = torch.tensor(X_sequences, dtype=torch.float32).to(device)
+
+ # 棰勬祴
+ model.eval()
+ with torch.no_grad():
+ y_pred_scaled_tensor = model(X_tensor)
+ y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel()
+
+ # 鍙嶅綊涓�鍖�
+ predicted = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel()
+
+ # 鐢变簬搴忓垪棰勬祴锛屾垜浠渶瑕佸~鍏呭墠闈㈢殑缂哄け鍊�
+ predicted_weights = [np.nan] * (sequence_length - 1) + list(predicted)
+
+ elif model_info['model_type'] in ['SVR', 'MLP']:
+ # 鏀寔鍚戦噺鏈烘垨澶氬眰鎰熺煡鍣ㄩ娴�
+ # 鏁版嵁鏍囧噯鍖�
+ scaler_X = model_info['scaler_X']
+ scaler_y = model_info['scaler_y']
+ X_scaled = scaler_X.transform(X_pred)
+
+ # 棰勬祴
+ y_pred_scaled = model.predict(X_scaled)
+
+ # 鍙嶅綊涓�鍖�
+ predicted_weights = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel()
+
+ else:
+ # 鍏朵粬妯″瀷锛堝闅忔満妫灄銆佹搴︽彁鍗囥�佺嚎鎬у洖褰掔瓑锛�
+ predicted_weights = model.predict(X_pred)
+
+ # 灏嗛娴嬬粨鏋滄坊鍔犲埌鏁版嵁妗嗕腑
+ df_pred['棰勬祴绫抽噸'] = predicted_weights
+
+ # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+ df_pred['time'] = pd.to_datetime(df_pred['time'])
+
+ # 鏁版嵁瀵规瘮鍔熻兘
+ st.subheader("馃搳 棰勬祴缁撴灉瀵规瘮鍒嗘瀽")
+
+ # 璁$畻棰勬祴璇樊
+ df_pred['璇樊'] = df_pred['棰勬祴绫抽噸'] - df_pred['绫抽噸']
+ df_pred['缁濆璇樊'] = abs(df_pred['璇樊'])
+ df_pred['鐩稿璇樊'] = (df_pred['缁濆璇樊'] / df_pred['绫抽噸']) * 100
+
+ # 鏄剧ず璇樊缁熻淇℃伅
+ error_stats = df_pred.dropna(subset=['棰勬祴绫抽噸']).describe()
+
+ stats_cols = st.columns(3)
+ with stats_cols[0]:
+ st.metric("骞冲潎瀹為檯绫抽噸", f"{error_stats['绫抽噸']['mean']:.4f} Kg/m")
+ st.metric("骞冲潎棰勬祴绫抽噸", f"{error_stats['棰勬祴绫抽噸']['mean']:.4f} Kg/m")
+ with stats_cols[1]:
+ st.metric("骞冲潎缁濆璇樊", f"{error_stats['缁濆璇樊']['mean']:.4f} Kg/m")
+ st.metric("鏈�澶х粷瀵硅宸�", f"{error_stats['缁濆璇樊']['max']:.4f} Kg/m")
+ with stats_cols[2]:
+ st.metric("骞冲潎鐩稿璇樊", f"{error_stats['鐩稿璇樊']['mean']:.2f}%")
+ st.metric("鏈�澶х浉瀵硅宸�", f"{error_stats['鐩稿璇樊']['max']:.2f}%")
+
+ # 鍙鍖栧睍绀�
+ st.subheader("馃搱 绫抽噸瓒嬪娍瀵规瘮")
+
+ # 鍒涘缓瓒嬪娍鍥� - 浣跨敤鎵�鏈夋暟鎹甦f_all杩涜鏄剧ず
+ fig = go.Figure()
+
+ # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+ df_all['time'] = pd.to_datetime(df_all['time'])
+
+ # # 娣诲姞瀹炴椂绫抽噸鏁版嵁鐐癸紙绋虫�佹暟鎹敤钃濊壊锛岄潪绋虫�佹暟鎹敤鐏拌壊锛�
+ # if 'is_steady' in df_all.columns:
+ # # 绋虫�佹暟鎹� - 浣跨敤鐐规樉绀�
+ # steady_data = df_all[df_all['is_steady'] == 1]
+ # non_steady_data = df_all[df_all['is_steady'] == 0]
+
+ # if len(steady_data) > 0:
+ # fig.add_trace(go.Scatter(
+ # x=steady_data['time'],
+ # y=steady_data['绫抽噸'],
+ # name='瀹炴椂绫抽噸锛堢ǔ鎬侊級',
+ # mode='markers',
+ # marker=dict(color='blue', size=3),
+ # hovertemplate='鏃堕棿: %{x}<br>瀹炴椂绫抽噸锛堢ǔ鎬侊級: %{y:.4f} Kg/m<extra></extra>'
+ # ))
+
+ # # 闈炵ǔ鎬佹暟鎹篃鏄剧ず锛屼絾涓嶈繘琛岄娴�
+ # if len(non_steady_data) > 0:
+ # fig.add_trace(go.Scatter(
+ # x=non_steady_data['time'],
+ # y=non_steady_data['绫抽噸'],
+ # name='瀹炴椂绫抽噸锛堥潪绋虫�侊級',
+ # mode='markers',
+ # marker=dict(color='lightgray', size=3),
+ # hovertemplate='鏃堕棿: %{x}<br>瀹炴椂绫抽噸锛堥潪绋虫�侊級: %{y:.4f} Kg/m<extra></extra>'
+ # ))
+ # else:
+ # 濡傛灉娌℃湁绋虫�佹爣璁帮紝鏄剧ず鎵�鏈夋暟鎹偣
+ fig.add_trace(go.Scatter(
+ x=df_all['time'],
+ y=df_all['绫抽噸'],
+ name='瀹炴椂绫抽噸',
+ mode='lines',
+ line=dict(color='blue', width=1.5),
+ # hovertemplate='鏃堕棿: %{x}<br>瀹炴椂绫抽噸: %{y:.4f} Kg/m<extra></extra>'
+ ))
+
+ # 娣诲姞棰勬祴绫抽噸鏇茬嚎 - 鍙棰勬祴鐨勬暟鎹紙绋虫�佹垨鍏ㄩ儴锛夋樉绀�
+ fig.add_trace(go.Scatter(
+ x=df_pred['time'],
+ y=df_pred['棰勬祴绫抽噸'],
+ name='棰勬祴绫抽噸',
+ mode='lines',
+ line=dict(color='red', width=2, dash='dash'),
+ marker=dict(size=3),
+ # hovertemplate='鏃堕棿: %{x}<br>棰勬祴绫抽噸: %{y:.4f} Kg/m<extra></extra>'
+ ))
+
+ # 娣诲姞鎵�鏈夋尋鍑烘満鍙傛暟鏇茬嚎 - 浣跨敤鎵�鏈夋暟鎹�
+ colors = ['green', 'orange', 'purple', 'brown', 'pink', 'gray', 'olive', 'cyan', 'magenta', 'yellow', 'lime', 'teal']
+ for i, feature in enumerate(required_features):
+ # 涓烘瘡涓壒寰佸垎閰嶄笉鍚岀殑棰滆壊
+ color = colors[i % len(colors)]
+
+ # 纭繚鐗瑰緛瀛樺湪浜庢墍鏈夋暟鎹腑
+ if feature in df_all.columns:
+ fig.add_trace(go.Scatter(
+ x=df_all['time'],
+ y=df_all[feature],
+ name=feature,
+ mode='lines',
+ line=dict(color=color, width=1.5),
+ yaxis=f'y{i+2}',
+ # hovertemplate=f'鏃堕棿: %{{x}}<br>{feature}: %{{y}}<extra></extra>'
+ ))
+
+ # 閰嶇疆鍥捐〃甯冨眬
+ layout = {
+ 'title': '绫抽噸棰勬祴涓庡疄鏃舵暟鎹姣�',
+ 'xaxis': {
+ 'title': '鏃堕棿',
+ 'rangeslider': {'visible': True},
+ 'type': 'date',
+ 'tickformat': '%Y-%m-%d %H:%M'
+ },
+ 'yaxis': {
+ 'title': '绫抽噸 (Kg/m)',
+ 'title_font': {'color': 'blue'},
+ 'tickfont': {'color': 'blue'},
+ 'side': 'left',
+ 'fixedrange': False # 鍏佽y杞寸缉鏀�
+ },
+ 'legend': {
+ 'orientation': 'h',
+ 'yanchor': 'bottom',
+ 'y': 1.02,
+ 'xanchor': 'right',
+ 'x': 1
+ },
+ 'height': 600,
+ 'margin': {'l': 100, 'r': 200, 't': 100, 'b': 100},
+ 'hovermode': 'x unified'
+ }
+
+ # 娣诲姞棰濆鐨剏杞撮厤缃� - 涓烘墍鏈夌壒寰佸垱寤簓杞�
+ for i, feature in enumerate(required_features):
+ layout[f'yaxis{i+2}'] = {
+ 'title': feature,
+ 'title_font': {'color': colors[i % len(colors)]},
+ 'tickfont': {'color': colors[i % len(colors)]},
+ 'overlaying': 'y',
+ 'side': 'right',
+ 'anchor': 'free',
+ 'position': 1 - (i+1)*0.08,
+ 'fixedrange': False # 鍏佽y杞寸缉鏀�
+ }
+
+ fig.update_layout(layout)
+
+ # 鏄剧ず瓒嬪娍鍥� - 鍚敤瀹屾暣鐨勪氦浜掑姛鑳�
+ st.plotly_chart(fig, use_container_width=True, config={
+ 'scrollZoom': True,
+ 'displayModeBar': True,
+ 'modeBarButtonsToAdd': ['pan2d', 'select2d', 'lasso2d', 'resetScale2d'],
+ 'displaylogo': False
+ })
+
+ # 璇樊鍒嗘瀽鍥�
+ st.subheader("馃搲 棰勬祴璇樊鍒嗘瀽")
+
+ # 鍒涘缓璇樊鍒嗗竷鐩存柟鍥�
+ fig_error = px.histogram(df_pred.dropna(subset=['鐩稿璇樊']), x='鐩稿璇樊', nbins=50,
+ title='棰勬祴鐩稿璇樊鍒嗗竷',
+ labels={'鐩稿璇樊': '鐩稿璇樊 (%)'})
+ fig_error.update_layout(
+ xaxis_title='鐩稿璇樊 (%)',
+ yaxis_title='棰戞',
+ height=400
+ )
+ st.plotly_chart(fig_error, use_container_width=True)
+
+ # 鏁版嵁棰勮
+ st.subheader("馃攳 鏁版嵁棰勮")
+ preview_columns = ['time', '绫抽噸', '棰勬祴绫抽噸', '璇樊', '缁濆璇樊', '鐩稿璇樊']
+ if 'is_steady' in df_pred.columns:
+ preview_columns.append('is_steady')
+ preview_columns.extend(required_features)
+ st.dataframe(df_pred[preview_columns].head(20),
+ use_container_width=True)
+
+ # 瀵煎嚭鏁版嵁
+ st.subheader("馃捑 瀵煎嚭鏁版嵁")
+ # 灏嗘暟鎹浆鎹负CSV鏍煎紡
+ csv = df_pred.to_csv(index=False)
+ # 鍒涘缓涓嬭浇鎸夐挳
+ st.download_button(
+ label="瀵煎嚭棰勬祴缁撴灉鏁版嵁 (CSV)",
+ data=csv,
+ file_name=f"metered_weight_forecast_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv",
+ mime="text/csv",
+ help="鐐瑰嚮鎸夐挳瀵煎嚭棰勬祴缁撴灉鏁版嵁"
+ )
+ elif query_button:
+ st.warning("璇峰厛閫夋嫨涓�涓ā鍨嬨��")
+ else:
+ st.info("璇烽�夋嫨鏃堕棿鑼冨洿鍜屾ā鍨嬶紝鐒跺悗鐐瑰嚮'鏌ヨ鏁版嵁'鎸夐挳寮�濮嬮娴嬪垎鏋愩��")
+
+
+# 椤甸潰鍏ュ彛
+if __name__ == "__main__":
+ show_metered_weight_forecast()
\ No newline at end of file
diff --git a/app/pages/metered_weight_prediction.py b/app/pages/metered_weight_prediction.py
new file mode 100644
index 0000000..a413ea8
--- /dev/null
+++ b/app/pages/metered_weight_prediction.py
@@ -0,0 +1,208 @@
+import streamlit as st
+import plotly.express as px
+import plotly.graph_objects as go
+import pandas as pd
+import numpy as np
+import joblib
+import os
+from datetime import datetime
+
+# 灏濊瘯瀵煎叆torch锛屽鏋滃け璐ュ垯绂佺敤娣卞害瀛︿範妯″瀷鏀寔
+try:
+ import torch
+ TORCH_AVAILABLE = True
+except ImportError:
+ TORCH_AVAILABLE = False
+
+# 椤甸潰鍑芥暟瀹氫箟
+def show_metered_weight_prediction():
+ # 椤甸潰鏍囬
+ st.title("绫抽噸缁熶竴棰勬祴")
+
+ # 鍒濆鍖栦細璇濈姸鎬�
+ if 'selected_model' not in st.session_state:
+ st.session_state['selected_model'] = None
+
+ # 鍒涘缓妯″瀷鐩綍锛堝鏋滀笉瀛樺湪锛�
+ model_dir = "saved_models"
+ os.makedirs(model_dir, exist_ok=True)
+
+ # 鑾峰彇鎵�鏈夊凡淇濆瓨鐨勬ā鍨嬫枃浠�
+ model_files = [f for f in os.listdir(model_dir) if f.endswith('.joblib')]
+ model_files.sort(reverse=True) # 鏈�鏂扮殑妯″瀷鎺掑湪鍓嶉潰
+
+ # 妯″瀷閫夋嫨鍖哄煙
+ with st.expander("馃搧 閫夋嫨妯″瀷", expanded=True):
+ if not model_files:
+ st.warning("灏氭湭淇濆瓨浠讳綍妯″瀷锛岃鍏堣缁冩ā鍨嬪苟淇濆瓨銆�")
+ else:
+ # 妯″瀷閫夋嫨涓嬫媺妗�
+ selected_model_file = st.selectbox(
+ "閫夋嫨宸蹭繚瀛樼殑妯″瀷",
+ options=model_files,
+ help="閫夋嫨瑕佺敤浜庨娴嬬殑妯″瀷鏂囦欢"
+ )
+
+ # 鍔犺浇骞舵樉绀烘ā鍨嬩俊鎭�
+ if selected_model_file:
+ model_path = os.path.join(model_dir, selected_model_file)
+ model_info = joblib.load(model_path)
+
+ # 鏄剧ず妯″瀷鍩烘湰淇℃伅
+ st.subheader("馃搳 妯″瀷淇℃伅")
+ info_cols = st.columns(2)
+
+ with info_cols[0]:
+ st.metric("妯″瀷绫诲瀷", model_info['model_type'])
+ st.metric("鍒涘缓鏃堕棿", model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S'))
+ st.metric("浣跨敤绋虫�佹暟鎹�", "鏄�" if model_info.get('use_steady_data', False) else "鍚�")
+
+ with info_cols[1]:
+ st.metric("R虏 寰楀垎", f"{model_info['r2_score']:.4f}")
+ st.metric("鍧囨柟璇樊 (MSE)", f"{model_info['mse']:.6f}")
+ st.metric("鍧囨柟鏍硅宸� (RMSE)", f"{model_info['rmse']:.6f}")
+
+ # 鏄剧ず妯″瀷鐗瑰緛
+ st.write("馃攽 妯″瀷浣跨敤鐨勭壒寰�:")
+ st.code(", ".join(model_info['features']))
+
+ # 濡傛灉鏄繁搴﹀涔犳ā鍨嬶紝鏄剧ず搴忓垪闀垮害
+ if 'sequence_length' in model_info:
+ st.metric("搴忓垪闀垮害", model_info['sequence_length'])
+
+ # 淇濆瓨妯″瀷淇℃伅鍒颁細璇濈姸鎬�
+ st.session_state['selected_model'] = model_info
+ st.session_state['selected_model_file'] = selected_model_file
+
+ # 棰勬祴鍔熻兘鍖哄煙
+ st.subheader("馃敭 绫抽噸棰勬祴")
+
+ if st.session_state['selected_model']:
+ model_info = st.session_state['selected_model']
+
+ # 鑾峰彇妯″瀷闇�瑕佺殑鐗瑰緛
+ required_features = model_info['features']
+
+ # 鍒涘缓棰勬祴琛ㄥ崟
+ st.write("杈撳叆鐗瑰緛鍊艰繘琛岀背閲嶉娴�:")
+ predict_cols = st.columns(2)
+ input_features = {}
+
+ # 鏄剧ず杈撳叆琛ㄥ崟
+ for i, feature in enumerate(required_features):
+ with predict_cols[i % 2]:
+ input_features[feature] = st.number_input(
+ f"{feature}",
+ key=f"pred_{feature}",
+ value=0.0,
+ step=0.0001,
+ format="%.4f"
+ )
+
+ # 棰勬祴鎸夐挳
+ if st.button("馃殌 寮�濮嬮娴�"):
+ try:
+ # 鍑嗗棰勬祴鏁版嵁
+ input_df = pd.DataFrame([input_features])
+
+ # 鏍规嵁妯″瀷绫诲瀷鎵ц涓嶅悓鐨勯娴嬮�昏緫
+ predicted_weight = None
+
+ # 鑾峰彇妯″瀷
+ model = model_info['model']
+
+ # 妫�鏌ユā鍨嬬被鍨嬪苟鎵ц棰勬祴
+ if model_info['model_type'] in ['LSTM', 'GRU', 'BiLSTM']:
+ # 娣卞害瀛︿範妯″瀷棰勬祴
+ if not TORCH_AVAILABLE:
+ st.error("PyTorch 鏈畨瑁咃紝鏃犳硶浣跨敤娣卞害瀛︿範妯″瀷杩涜棰勬祴銆�")
+ return
+
+ # 鏁版嵁鏍囧噯鍖�
+ scaler_X = model_info['scaler_X']
+ scaler_y = model_info['scaler_y']
+ input_scaled = scaler_X.transform(input_df)
+
+ # 鑾峰彇搴忓垪闀垮害
+ sequence_length = model_info['sequence_length']
+
+ # 涓烘繁搴﹀涔犳ā鍨嬪垱寤哄簭鍒�
+ input_seq = np.tile(input_scaled, (sequence_length, 1)).reshape(1, sequence_length, -1)
+
+ # 杞崲涓篜yTorch寮犻噺
+ import torch
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+ input_tensor = torch.tensor(input_seq, dtype=torch.float32).to(device)
+
+ # 棰勬祴
+ model.eval()
+ with torch.no_grad():
+ y_pred_scaled_tensor = model(input_tensor)
+ y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel()[0]
+
+ # 鍙嶅綊涓�鍖�
+ predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0]
+
+ elif model_info['model_type'] in ['SVR', 'MLP']:
+ # 鏀寔鍚戦噺鏈烘垨澶氬眰鎰熺煡鍣ㄩ娴�
+
+ # 鏁版嵁鏍囧噯鍖�
+ scaler_X = model_info['scaler_X']
+ scaler_y = model_info['scaler_y']
+ input_scaled = scaler_X.transform(input_df)
+
+ # 棰勬祴
+ y_pred_scaled = model.predict(input_scaled)[0]
+
+ # 鍙嶅綊涓�鍖�
+ predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0]
+
+ else:
+ # 鍏朵粬妯″瀷锛堝闅忔満妫灄銆佹搴︽彁鍗囥�佺嚎鎬у洖褰掔瓑锛�
+ predicted_weight = model.predict(input_df)[0]
+
+ # 鏄剧ず棰勬祴缁撴灉
+ st.success(f"棰勬祴绫抽噸: {predicted_weight:.4f} Kg/m")
+
+
+ except Exception as e:
+ st.error(f"棰勬祴澶辫触: {str(e)}")
+ else:
+ st.warning("璇峰厛閫夋嫨涓�涓ā鍨嬨��")
+
+ # 妯″瀷绠$悊鍖哄煙
+ if model_files:
+ with st.expander("馃棏锔� 妯″瀷绠$悊", expanded=False):
+ st.write("绠$悊宸蹭繚瀛樼殑妯″瀷鏂囦欢:")
+
+ # 鏄剧ず鎵�鏈夋ā鍨嬫枃浠�
+ for model_file in model_files:
+ cols = st.columns([3, 1, 1])
+ cols[0].write(model_file)
+
+ # 鏌ョ湅妯″瀷淇℃伅鎸夐挳
+ if cols[1].button("鏌ョ湅", key=f"view_{model_file}", help="鏌ョ湅妯″瀷淇℃伅"):
+ model_path = os.path.join(model_dir, model_file)
+ model_info = joblib.load(model_path)
+ st.write("妯″瀷璇︾粏淇℃伅:")
+ st.json({
+ 'model_type': model_info['model_type'],
+ 'created_at': model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S'),
+ 'r2_score': f"{model_info['r2_score']:.4f}",
+ 'mse': f"{model_info['mse']:.6f}",
+ 'mae': f"{model_info['mae']:.6f}",
+ 'rmse': f"{model_info['rmse']:.6f}",
+ 'features': model_info['features'],
+ 'use_steady_data': model_info.get('use_steady_data', False)
+ })
+
+ # 鍒犻櫎妯″瀷鎸夐挳
+ if cols[2].button("鍒犻櫎", key=f"delete_{model_file}", help="鍒犻櫎妯″瀷鏂囦欢", type="primary"):
+ model_path = os.path.join(model_dir, model_file)
+ os.remove(model_path)
+ st.success(f"宸插垹闄ゆā鍨�: {model_file}")
+ st.rerun()
+
+# 椤甸潰鍏ュ彛
+if __name__ == "__main__":
+ show_metered_weight_prediction()
diff --git a/app/pages/metered_weight_regression.py b/app/pages/metered_weight_regression.py
index d07c22b..d06dc68 100644
--- a/app/pages/metered_weight_regression.py
+++ b/app/pages/metered_weight_regression.py
@@ -3,12 +3,117 @@
import plotly.graph_objects as go
import pandas as pd
import numpy as np
+import joblib
+import os
from datetime import datetime, timedelta
from app.services.extruder_service import ExtruderService
from app.services.main_process_service import MainProcessService
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
+
+
+# 瀵煎叆绋虫�佽瘑鍒姛鑳�
+class SteadyStateDetector:
+ def __init__(self):
+ pass
+
+ def detect_steady_state(self, df, weight_col='绫抽噸', window_size=20, std_threshold=0.5, duration_threshold=60):
+ """
+ 绋虫�佽瘑鍒�昏緫锛氭爣璁扮背閲嶆暟鎹腑鐨勭ǔ鎬佹
+ :param df: 鍖呭惈绫抽噸鏁版嵁鐨勬暟鎹
+ :param weight_col: 绫抽噸鍒楀悕
+ :param window_size: 婊戝姩绐楀彛澶у皬锛堢锛�
+ :param std_threshold: 鏍囧噯宸槇鍊�
+ :param duration_threshold: 绋虫�佹寔缁椂闂撮槇鍊硷紙绉掞級
+ :return: 鍖呭惈绋虫�佹爣璁扮殑鏁版嵁妗嗗拰绋虫�佷俊鎭�
+ """
+ if df is None or df.empty:
+ return df, []
+
+ # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+ df['time'] = pd.to_datetime(df['time'])
+
+ # 璁$畻婊氬姩缁熻閲�
+ df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std()
+ df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean()
+
+ # 璁$畻娉㈠姩鑼冨洿
+ df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100
+ df['fluctuation_range'] = df['fluctuation_range'].fillna(0)
+
+ # 鏍囪绋虫�佺偣
+ df['is_steady'] = 0
+ steady_condition = (
+ (df['fluctuation_range'] < std_threshold) &
+ (df[weight_col] >= 0.1)
+ )
+ df.loc[steady_condition, 'is_steady'] = 1
+
+ # 璇嗗埆杩炵画绋虫�佹
+ steady_segments = []
+ current_segment = {}
+
+ for i, row in df.iterrows():
+ if row['is_steady'] == 1:
+ if not current_segment:
+ current_segment = {
+ 'start_time': row['time'],
+ 'start_idx': i,
+ 'weights': [row[weight_col]]
+ }
+ else:
+ current_segment['weights'].append(row[weight_col])
+ else:
+ if current_segment:
+ current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time']
+ current_segment['end_idx'] = i-1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ # 璁$畻缃俊搴�
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ current_segment = {}
+
+ # 澶勭悊鏈�鍚庝竴涓ǔ鎬佹
+ if current_segment:
+ current_segment['end_time'] = df['time'].iloc[-1]
+ current_segment['end_idx'] = len(df) - 1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ # 鍦ㄦ暟鎹涓爣璁板畬鏁寸殑绋虫�佹
+ for segment in steady_segments:
+ df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1
+
+ return df, steady_segments
def show_metered_weight_regression():
@@ -33,6 +138,12 @@
'铻烘潌杞��', '鏈哄ご鍘嬪姏', '娴佺▼涓婚��', '铻烘潌娓╁害',
'鍚庢満绛掓俯搴�', '鍓嶆満绛掓俯搴�', '鏈哄ご娓╁害'
]
+ if 'mr_use_steady_data' not in st.session_state:
+ st.session_state['mr_use_steady_data'] = True
+ if 'mr_steady_window' not in st.session_state:
+ st.session_state['mr_steady_window'] = 20
+ if 'mr_steady_threshold' not in st.session_state:
+ st.session_state['mr_steady_threshold'] = 0.5
# 瀹氫箟鍥炶皟鍑芥暟
def update_dates(qs):
@@ -123,6 +234,42 @@
st.session_state['mr_time_offset'] = time_offset
with offset_cols[2]:
st.write(f"褰撳墠鍋忕Щ: {time_offset} 鍒嗛挓")
+
+ # 绋虫�佽瘑鍒厤缃�
+ st.markdown("---")
+ steady_cols = st.columns(3)
+ with steady_cols[0]:
+ st.write("鈿栵笍 **绋虫�佽瘑鍒厤缃�**")
+ st.checkbox(
+ "浠呬娇鐢ㄧǔ鎬佹暟鎹繘琛岃缁�",
+ value=st.session_state['mr_use_steady_data'],
+ key="mr_use_steady_data",
+ help="鍚敤鍚庯紝鍙娇鐢ㄧ背閲嶇ǔ鎬佹椂娈电殑鏁版嵁杩涜妯″瀷璁粌"
+ )
+
+ with steady_cols[1]:
+ st.write("馃搹 **绋虫�佸弬鏁�**")
+ st.slider(
+ "婊戝姩绐楀彛澶у皬 (绉�)",
+ min_value=5,
+ max_value=60,
+ value=st.session_state['mr_steady_window'],
+ step=5,
+ key="mr_steady_window",
+ help="鐢ㄤ簬绋虫�佽瘑鍒殑婊戝姩绐楀彛澶у皬"
+ )
+
+ with steady_cols[2]:
+ st.write("馃搳 **绋虫�侀槇鍊�**")
+ st.slider(
+ "娉㈠姩闃堝�� (%)",
+ min_value=0.1,
+ max_value=2.0,
+ value=st.session_state['mr_steady_threshold'],
+ step=0.1,
+ key="mr_steady_threshold",
+ help="绋虫�佽瘑鍒殑娉㈠姩鑼冨洿闃堝��"
+ )
# 鐗瑰緛閫夋嫨
st.markdown("---")
@@ -305,6 +452,82 @@
# 閲嶅懡鍚嶇背閲嶅垪
df_analysis.rename(columns={'metered_weight': '绫抽噸'}, inplace=True)
+
+ # 绋虫�佽瘑鍒�
+ steady_detector = SteadyStateDetector()
+
+ # 鑾峰彇绋虫�佽瘑鍒弬鏁�
+ use_steady_data = st.session_state.get('mr_use_steady_data', True)
+ steady_window = st.session_state.get('mr_steady_window', 20)
+ steady_threshold = st.session_state.get('mr_steady_threshold', 0.5)
+
+ # 鎵ц绋虫�佽瘑鍒�
+ df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state(
+ df_analysis,
+ weight_col='绫抽噸',
+ window_size=steady_window,
+ std_threshold=steady_threshold
+ )
+
+ # 鏇存柊df_analysis涓哄寘鍚ǔ鎬佹爣璁扮殑鏁版嵁
+ df_analysis = df_analysis_with_steady
+
+ # 绋虫�佹暟鎹彲瑙嗗寲
+ st.subheader("馃搱 绋虫�佹暟鎹垎甯�")
+
+ # 鍒涘缓绋虫�佹暟鎹彲瑙嗗寲鍥捐〃
+ fig_steady = go.Figure()
+
+ # 娣诲姞鍘熷绫抽噸鏇茬嚎
+ fig_steady.add_trace(go.Scatter(
+ x=df_analysis['time'],
+ y=df_analysis['绫抽噸'],
+ name='鍘熷绫抽噸',
+ mode='lines',
+ line=dict(color='lightgray', width=1)
+ ))
+
+ # 娣诲姞绋虫�佹暟鎹偣
+ steady_data_points = df_analysis[df_analysis['is_steady'] == 1]
+ fig_steady.add_trace(go.Scatter(
+ x=steady_data_points['time'],
+ y=steady_data_points['绫抽噸'],
+ name='绋虫�佺背閲�',
+ mode='markers',
+ marker=dict(color='green', size=3, opacity=0.6)
+ ))
+
+ # 娣诲姞闈炵ǔ鎬佹暟鎹偣
+ non_steady_data_points = df_analysis[df_analysis['is_steady'] == 0]
+ fig_steady.add_trace(go.Scatter(
+ x=non_steady_data_points['time'],
+ y=non_steady_data_points['绫抽噸'],
+ name='闈炵ǔ鎬佺背閲�',
+ mode='markers',
+ marker=dict(color='red', size=3, opacity=0.6)
+ ))
+
+ # 閰嶇疆鍥捐〃甯冨眬
+ fig_steady.update_layout(
+ title="绫抽噸鏁版嵁绋虫�佸垎甯�",
+ xaxis=dict(title="鏃堕棿"),
+ yaxis=dict(title="绫抽噸 (Kg/m)"),
+ legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1),
+ height=500
+ )
+
+ # 鏄剧ず鍥捐〃
+ st.plotly_chart(fig_steady, use_container_width=True)
+
+ # 鏄剧ず绋虫�佺粺璁�
+ total_data = len(df_analysis)
+ steady_data = len(df_analysis[df_analysis['is_steady'] == 1])
+ steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0
+
+ stats_cols = st.columns(3)
+ stats_cols[0].metric("鎬绘暟鎹噺", total_data)
+ stats_cols[1].metric("绋虫�佹暟鎹噺", steady_data)
+ stats_cols[2].metric("绋虫�佹暟鎹瘮渚�", f"{steady_ratio:.1f}%")
# --- 鍘熷鏁版嵁瓒嬪娍鍥� ---
st.subheader("馃搱 鍘熷鏁版嵁瓒嬪娍鍥�")
@@ -440,8 +663,16 @@
st.warning(f"鏁版嵁涓己灏戜互涓嬬壒寰�: {', '.join(missing_features)}")
else:
# 鍑嗗鏁版嵁
- X = df_analysis[st.session_state['mr_selected_features']]
- y = df_analysis['绫抽噸']
+ # 鏍规嵁閰嶇疆鍐冲畾鏄惁鍙娇鐢ㄧǔ鎬佹暟鎹�
+ use_steady_data = st.session_state.get('mr_use_steady_data', True)
+ if use_steady_data:
+ df_filtered = df_analysis[df_analysis['is_steady'] == 1]
+ st.info(f"宸茶繃婊ら潪绋虫�佹暟鎹紝浣跨敤 {len(df_filtered)} 鏉$ǔ鎬佹暟鎹繘琛岃缁�")
+ else:
+ df_filtered = df_analysis.copy()
+
+ X = df_filtered[st.session_state['mr_selected_features']]
+ y = df_filtered['绫抽噸']
# 娓呯悊鏁版嵁涓殑NaN鍊�
combined = pd.concat([X, y], axis=1)
@@ -454,7 +685,7 @@
# 閲嶆柊鍒嗙X鍜寉
X_clean = combined_clean[st.session_state['mr_selected_features']]
y_clean = combined_clean['绫抽噸']
-
+
# 鍒嗗壊璁粌闆嗗拰娴嬭瘯闆�
X_train, X_test, y_train, y_test = train_test_split(X_clean, y_clean, test_size=0.2, random_state=42)
@@ -580,37 +811,42 @@
})
st.dataframe(coef_df, use_container_width=True)
- # --- 棰勬祴鍔熻兘 ---
- st.subheader("馃敭 绫抽噸棰勬祴")
+ # --- 妯″瀷淇濆瓨鍔熻兘 ---
+ st.subheader("馃捑 妯″瀷淇濆瓨")
- # 鍒涘缓棰勬祴琛ㄥ崟
- st.write("杈撳叆鐗瑰緛鍊艰繘琛岀背閲嶉娴�:")
- predict_cols = st.columns(2)
- input_features = {}
+ # 鍒涘缓妯″瀷淇濆瓨琛ㄥ崟
+ st.write("淇濆瓨璁粌濂界殑妯″瀷鏉冮噸:")
+ model_name = st.text_input(
+ "妯″瀷鍚嶇О",
+ value=f"linear_regression_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
+ help="璇疯緭鍏ユā鍨嬪悕绉帮紝妯″瀷灏嗕繚瀛樹负璇ュ悕绉扮殑.joblib鏂囦欢"
+ )
- for i, feature in enumerate(st.session_state['mr_selected_features']):
- with predict_cols[i % 2]:
- # 鑾峰彇鐗瑰緛鐨勭粺璁′俊鎭�
- min_val = df_analysis[feature].min()
- max_val = df_analysis[feature].max()
- mean_val = df_analysis[feature].mean()
-
- input_features[feature] = st.number_input(
- f"{feature}",
- key=f"pred_{feature}",
- value=float(mean_val),
- min_value=float(min_val),
- max_value=float(max_val),
- step=0.1
- )
-
- if st.button("棰勬祴绫抽噸"):
- # 鍑嗗棰勬祴鏁版嵁
- input_data = [[input_features[feature] for feature in st.session_state['mr_selected_features']]]
- # 棰勬祴
- predicted_weight = model.predict(input_data)[0]
- # 鏄剧ず棰勬祴缁撴灉
- st.success(f"棰勬祴绫抽噸: {predicted_weight:.4f} Kg/m")
+ if st.button("淇濆瓨妯″瀷"):
+ # 纭繚妯″瀷鐩綍瀛樺湪
+ model_dir = "saved_models"
+ os.makedirs(model_dir, exist_ok=True)
+
+ # 淇濆瓨妯″瀷
+ model_path = os.path.join(model_dir, f"{model_name}.joblib")
+ try:
+ # 淇濆瓨妯″瀷鏉冮噸鍜岀浉鍏充俊鎭�
+ model_info = {
+ 'model': model,
+ 'features': st.session_state['mr_selected_features'],
+ 'scaler': None, # 绾挎�у洖褰掍笉闇�瑕佹爣鍖栧櫒
+ 'model_type': 'linear_regression',
+ 'created_at': datetime.now(),
+ 'r2_score': r2,
+ 'mse': mse,
+ 'mae': mae,
+ 'rmse': rmse,
+ 'use_steady_data': use_steady_data
+ }
+ joblib.dump(model_info, model_path)
+ st.success(f"妯″瀷宸叉垚鍔熶繚瀛樺埌: {model_path}")
+ except Exception as e:
+ st.error(f"妯″瀷淇濆瓨澶辫触: {e}")
# --- 鏁版嵁棰勮 ---
st.subheader("馃攳 鏁版嵁棰勮")
diff --git a/app/pages/metered_weight_steady_state.py b/app/pages/metered_weight_steady_state.py
new file mode 100644
index 0000000..55d6535
--- /dev/null
+++ b/app/pages/metered_weight_steady_state.py
@@ -0,0 +1,463 @@
+import streamlit as st
+import plotly.express as px
+import plotly.graph_objects as go
+import pandas as pd
+import numpy as np
+from datetime import datetime, timedelta
+from app.services.extruder_service import ExtruderService
+from app.services.data_processing_service import DataProcessingService
+
+class SteadyStateDetector:
+ def __init__(self):
+ self.data_processor = DataProcessingService()
+
+ def preprocess_data(self, df, weight_col='metered_weight', window_size=20):
+ """
+ 鏁版嵁棰勫鐞嗭細浠呭鐞嗙己澶卞��
+ :param df: 鍘熷鏁版嵁妗�
+ :param weight_col: 绫抽噸鍒楀悕
+ :param window_size: 婊戝姩绐楀彛澶у皬
+ :return: 棰勫鐞嗗悗鐨勬暟鎹
+ """
+ if df is None or df.empty:
+ return df
+
+ # 澶嶅埗鏁版嵁閬垮厤淇敼鍘熷鏁版嵁
+ df_processed = df.copy()
+
+ # 澶勭悊缂哄け鍊�
+ df_processed[weight_col] = df_processed[weight_col].ffill().bfill()
+
+ # 鐩存帴浣跨敤鍘熷鏁版嵁锛屼笉杩涜寮傚父鍊兼浛鎹㈠拰骞虫粦澶勭悊
+ df_processed['smoothed_weight'] = df_processed[weight_col]
+
+ # 璁$畻绉诲姩鏍囧噯宸�
+ df_processed['rolling_std'] = df_processed[weight_col].rolling(window=window_size, min_periods=1).std()
+ df_processed['rolling_mean'] = df_processed[weight_col].rolling(window=window_size, min_periods=1).mean()
+
+ return df_processed
+
+ def detect_steady_state(self, df, weight_col='smoothed_weight', window_size=20, std_threshold=0.5, duration_threshold=60):
+ """
+ 绋虫�佽瘑鍒�昏緫
+ :param df: 棰勫鐞嗗悗鐨勬暟鎹
+ :param weight_col: 绫抽噸鍒楀悕锛堝凡骞虫粦锛�
+ :param window_size: 婊戝姩绐楀彛澶у皬锛堢锛�
+ :param std_threshold: 鏍囧噯宸槇鍊�
+ :param duration_threshold: 绋虫�佹寔缁椂闂撮槇鍊硷紙绉掞級
+ :return: 鍖呭惈绋虫�佹爣璁扮殑鏁版嵁妗嗗拰绋虫�佷俊鎭�
+ """
+ if df is None or df.empty:
+ return df, []
+
+ # 纭繚鏃堕棿鍒楁槸datetime绫诲瀷
+ df['time'] = pd.to_datetime(df['time'])
+
+ # 璁$畻鏃堕棿宸紙绉掞級
+ df['time_diff'] = df['time'].diff().dt.total_seconds().fillna(0)
+
+ # 鍒濆鍖栫ǔ鎬佹爣璁�
+ df['is_steady'] = 0
+
+ # 璁$畻姣忎釜绐楀彛鐨勭粺璁$壒寰�
+ df['window_std'] = df['smoothed_weight'].rolling(window=window_size, min_periods=5).std()
+ df['window_mean'] = df['smoothed_weight'].rolling(window=window_size, min_periods=5).mean()
+
+ # 璁$畻娉㈠姩鑼冨洿锛堢浉瀵逛簬鍧囧�肩殑鐧惧垎姣旓級
+ df['fluctuation_range'] = (df['window_std'] / df['window_mean']) * 100
+ df['fluctuation_range'] = df['fluctuation_range'].fillna(0)
+
+ # 鍒濇鏍囪绋虫�佺偣 - 鎺掗櫎绫抽噸灏忎簬0.1kg/m鐨勬暟鎹�
+ df.loc[(df['fluctuation_range'] < std_threshold) & (df['smoothed_weight'] >= 0.1), 'is_steady'] = 1
+
+ # 缁熻杩炵画绋虫�佹
+ steady_segments = []
+ current_segment = {}
+
+ for i, row in df.iterrows():
+ if row['is_steady'] == 1:
+ if not current_segment:
+ # 鏂扮殑绋虫�佹寮�濮�
+ current_segment = {
+ 'start_time': row['time'],
+ 'start_idx': i,
+ 'weights': [row['smoothed_weight']]
+ }
+ else:
+ # 缁х画褰撳墠绋虫�佹
+ current_segment['weights'].append(row['smoothed_weight'])
+ else:
+ if current_segment:
+ # 绋虫�佹缁撴潫锛岃绠楁寔缁椂闂�
+ current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time']
+ current_segment['end_idx'] = i-1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ # 璁$畻绋虫�佹鐨勭粺璁℃寚鏍�
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ # 璁$畻缃俊搴︼紙鍩轰簬娉㈠姩鑼冨洿鍜屾寔缁椂闂达級
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence)) # 缃俊搴﹁寖鍥�50-100
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ # 閲嶇疆褰撳墠绋虫�佹
+ current_segment = {}
+
+ # 澶勭悊鏈�鍚庝竴涓ǔ鎬佹
+ if current_segment:
+ current_segment['end_time'] = df['time'].iloc[-1]
+ current_segment['end_idx'] = len(df) - 1
+ duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds()
+
+ if duration >= duration_threshold:
+ weights_array = np.array(current_segment['weights'])
+ current_segment['duration'] = duration
+ current_segment['mean_weight'] = np.mean(weights_array)
+ current_segment['std_weight'] = np.std(weights_array)
+ current_segment['min_weight'] = np.min(weights_array)
+ current_segment['max_weight'] = np.max(weights_array)
+ current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100
+
+ confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50
+ confidence = max(50, min(100, confidence))
+ current_segment['confidence'] = confidence
+
+ steady_segments.append(current_segment)
+
+ # 鍦ㄦ暟鎹涓爣璁扮ǔ鎬佹
+ for segment in steady_segments:
+ df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1
+
+ return df, steady_segments
+
+ def get_steady_state_metrics(self, steady_segments):
+ """
+ 璁$畻绋虫�佽瘑鍒殑閲忓寲鎸囨爣
+ :param steady_segments: 绋虫�佹鍒楄〃
+ :return: 绋虫�佺粺璁℃寚鏍囧瓧鍏�
+ """
+ if not steady_segments:
+ return {}
+
+ # 璁$畻骞冲潎绋虫�佹寔缁椂闂�
+ avg_duration = np.mean([seg['duration'] for seg in steady_segments])
+
+ # 璁$畻骞冲潎娉㈠姩鑼冨洿
+ avg_fluctuation = np.mean([seg['fluctuation_range'] for seg in steady_segments])
+
+ # 璁$畻骞冲潎缃俊搴�
+ avg_confidence = np.mean([seg['confidence'] for seg in steady_segments])
+
+ # 璁$畻绋虫�佹�绘椂闀�
+ total_steady_duration = sum([seg['duration'] for seg in steady_segments])
+
+ return {
+ 'total_steady_segments': len(steady_segments),
+ 'average_steady_duration': avg_duration,
+ 'average_fluctuation_range': avg_fluctuation,
+ 'average_confidence': avg_confidence,
+ 'total_steady_duration': total_steady_duration
+ }
+
+def show_metered_weight_steady_state():
+ # 鍒濆鍖栨湇鍔″拰妫�娴嬪櫒
+ extruder_service = ExtruderService()
+ steady_state_detector = SteadyStateDetector()
+
+ # 椤甸潰鏍囬
+ st.title("绫抽噸绋虫�佽瘑鍒垎鏋�")
+
+ # 鍒濆鍖栦細璇濈姸鎬�
+ if 'ss_start_date' not in st.session_state:
+ st.session_state['ss_start_date'] = datetime.now().date() - timedelta(days=1)
+ if 'ss_end_date' not in st.session_state:
+ st.session_state['ss_end_date'] = datetime.now().date()
+ if 'ss_quick_select' not in st.session_state:
+ st.session_state['ss_quick_select'] = "鏈�杩�24灏忔椂"
+ if 'ss_window_size' not in st.session_state:
+ st.session_state['ss_window_size'] = 20
+ if 'ss_std_threshold' not in st.session_state:
+ st.session_state['ss_std_threshold'] = 1.5
+ if 'ss_duration_threshold' not in st.session_state:
+ st.session_state['ss_duration_threshold'] = 60
+
+ # 瀹氫箟鍥炶皟鍑芥暟
+ def update_dates(qs):
+ st.session_state['ss_quick_select'] = qs
+ today = datetime.now().date()
+ if qs == "浠婂ぉ":
+ st.session_state['ss_start_date'] = today
+ st.session_state['ss_end_date'] = today
+ elif qs == "鏈�杩�24灏忔椂":
+ st.session_state['ss_start_date'] = today - timedelta(days=1)
+ st.session_state['ss_end_date'] = today
+ elif qs == "鏈�杩�7澶�":
+ st.session_state['ss_start_date'] = today - timedelta(days=7)
+ st.session_state['ss_end_date'] = today
+ elif qs == "鏈�杩�30澶�":
+ st.session_state['ss_start_date'] = today - timedelta(days=30)
+ st.session_state['ss_end_date'] = today
+
+ def on_date_change():
+ st.session_state['ss_quick_select'] = "鑷畾涔�"
+
+ # 鏌ヨ鏉′欢鍖哄煙
+ with st.expander("馃攳 鏌ヨ閰嶇疆", expanded=True):
+ # 鍒涘缓甯冨眬
+ cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1])
+
+ options = ["浠婂ぉ", "鏈�杩�24灏忔椂", "鏈�杩�7澶�", "鏈�杩�30澶�", "鑷畾涔�"]
+ for i, option in enumerate(options):
+ with cols[i]:
+ button_type = "primary" if st.session_state['ss_quick_select'] == option else "secondary"
+ if st.button(option, key=f"btn_ss_{option}", width='stretch', type=button_type):
+ update_dates(option)
+ st.rerun()
+
+ with cols[5]:
+ start_date = st.date_input(
+ "寮�濮嬫棩鏈�",
+ label_visibility="collapsed",
+ key="ss_start_date",
+ on_change=on_date_change
+ )
+
+ with cols[6]:
+ end_date = st.date_input(
+ "缁撴潫鏃ユ湡",
+ label_visibility="collapsed",
+ key="ss_end_date",
+ on_change=on_date_change
+ )
+
+ with cols[7]:
+ query_button = st.button("馃殌 寮�濮嬪垎鏋�", key="ss_query", width='stretch')
+
+ # 绋虫�佸弬鏁伴厤缃�
+ st.markdown("---")
+ param_cols = st.columns(3)
+
+ with param_cols[0]:
+ st.write("鈿欙笍 **绋虫�佸弬鏁伴厤缃�**")
+ window_size = st.slider(
+ "婊戝姩绐楀彛澶у皬 (绉�)",
+ min_value=5,
+ max_value=60,
+ value=st.session_state['ss_window_size'],
+ step=5,
+ key="ss_window_size",
+ help="鐢ㄤ簬骞虫粦鏁版嵁鍜岃绠楃粺璁$壒寰佺殑婊戝姩绐楀彛澶у皬"
+ )
+
+ with param_cols[1]:
+ st.write("馃搹 **娉㈠姩闃堝�奸厤缃�**")
+ std_threshold = st.slider(
+ "鏍囧噯宸槇鍊�",
+ min_value=0.1,
+ max_value=2.0,
+ value=st.session_state['ss_std_threshold'],
+ step=0.1,
+ key="ss_std_threshold",
+ help="绫抽噸娉㈠姩鐨勬爣鍑嗗樊闃堝�硷紝浣庝簬姝ゅ�艰涓虹ǔ鎬�"
+ )
+
+ with param_cols[2]:
+ st.write("鈴憋笍 **鎸佺画鏃堕棿閰嶇疆**")
+ duration_threshold = st.slider(
+ "绋虫�佹寔缁椂闂� (绉�)",
+ min_value=30,
+ max_value=300,
+ value=st.session_state['ss_duration_threshold'],
+ step=10,
+ key="ss_duration_threshold",
+ help="绋虫�佹寔缁殑鏈�灏忔椂闂达紝浣庝簬姝ゅ�间笉瑙嗕负绋虫�佹"
+ )
+
+ # 杞崲涓篸atetime瀵硅薄
+ start_dt = datetime.combine(start_date, datetime.min.time())
+ end_dt = datetime.combine(end_date, datetime.max.time())
+
+ # 鏌ヨ澶勭悊
+ if query_button:
+ with st.spinner("姝e湪鑾峰彇鏁版嵁..."):
+ # 鑾峰彇鎸ゅ嚭鏈烘暟鎹�
+ df_extruder = extruder_service.get_extruder_data(start_dt, end_dt)
+
+ if df_extruder is None or df_extruder.empty:
+ st.warning("鎵�閫夋椂闂存鍐呮湭鎵惧埌浠讳綍鏁版嵁锛岃灏濊瘯璋冩暣鏌ヨ鏉′欢銆�")
+ return
+
+ # 缂撳瓨鏁版嵁鍒颁細璇濈姸鎬�
+ st.session_state['cached_extruder_ss'] = df_extruder
+ st.session_state['last_query_start_ss'] = start_dt
+ st.session_state['last_query_end_ss'] = end_dt
+
+ # 鏁版嵁澶勭悊鍜屽垎鏋�
+ if 'cached_extruder_ss' in st.session_state:
+ with st.spinner("姝e湪鍒嗘瀽鏁版嵁..."):
+ # 鑾峰彇缂撳瓨鏁版嵁
+ df_extruder = st.session_state['cached_extruder_ss']
+
+ # 鏁版嵁棰勫鐞�
+ df_processed = steady_state_detector.preprocess_data(df_extruder, window_size=st.session_state['ss_window_size'])
+
+ # 绋虫�佽瘑鍒�
+ df_with_steady, steady_segments = steady_state_detector.detect_steady_state(
+ df_processed,
+ window_size=st.session_state['ss_window_size'],
+ std_threshold=st.session_state['ss_std_threshold'],
+ duration_threshold=st.session_state['ss_duration_threshold']
+ )
+
+ # 璁$畻绋虫�佹寚鏍�
+ steady_metrics = steady_state_detector.get_steady_state_metrics(steady_segments)
+
+ # 鏁版嵁绫诲瀷妫�鏌ュ拰杞崲
+ df_with_steady['time'] = pd.to_datetime(df_with_steady['time'])
+ df_with_steady['metered_weight'] = pd.to_numeric(df_with_steady['metered_weight'], errors='coerce')
+ df_with_steady['smoothed_weight'] = pd.to_numeric(df_with_steady['smoothed_weight'], errors='coerce')
+
+ # 鍘婚櫎鍙兘瀛樺湪鐨凬aN鍊�
+ df_with_steady = df_with_steady.dropna(subset=['time', 'metered_weight', 'smoothed_weight'])
+
+ # 鏁版嵁鍙鍖栧尯鍩�
+ st.subheader("馃搳 绫抽噸绋虫�佽瘑鍒粨鏋�")
+
+ # 鍒涘缓鍥捐〃
+ fig = go.Figure()
+
+ # 娣诲姞鍘熷绫抽噸鏇茬嚎
+ fig.add_trace(go.Scatter(
+ x=df_with_steady['time'],
+ y=df_with_steady['metered_weight'],
+ name='鍘熷绫抽噸',
+ mode='lines',
+ opacity=0.6,
+ line=dict(color='lightgray', width=1)
+ ))
+
+ # 娣诲姞骞虫粦绫抽噸鏇茬嚎
+ fig.add_trace(go.Scatter(
+ x=df_with_steady['time'],
+ y=df_with_steady['smoothed_weight'],
+ name='骞虫粦绫抽噸',
+ mode='lines',
+ line=dict(color='blue', width=2)
+ ))
+
+ # 鏍囪绋虫�佸尯鍩�
+ for segment in steady_segments:
+ fig.add_shape(
+ type="rect",
+ x0=segment['start_time'],
+ y0=segment['min_weight'] * 0.95,
+ x1=segment['end_time'],
+ y1=segment['max_weight'] * 1.05,
+ fillcolor="rgba(0, 255, 0, 0.2)",
+ line=dict(color="rgba(0, 200, 0, 0.5)", width=1),
+ name="绋虫�佸尯鍩�"
+ )
+
+ # 閰嶇疆鍥捐〃甯冨眬
+ fig.update_layout(
+ title="绫抽噸绋虫�佽瘑鍒粨鏋�",
+ xaxis=dict(title="鏃堕棿"),
+ yaxis=dict(title="绫抽噸 (Kg/m)"),
+ legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1),
+ height=600
+ )
+
+ # 鏄剧ず鍥捐〃
+ st.plotly_chart(fig, use_container_width=True)
+
+ # 绋虫�佺粺璁℃寚鏍�
+ st.subheader("馃搱 绋虫�佺粺璁℃寚鏍�")
+ metrics_cols = st.columns(5)
+
+ with metrics_cols[0]:
+ st.metric(
+ "绋虫�佹鎬绘暟",
+ steady_metrics.get('total_steady_segments', 0),
+ help="璇嗗埆鍒扮殑绋虫�佹鏁伴噺"
+ )
+
+ with metrics_cols[1]:
+ st.metric(
+ "骞冲潎绋虫�佹椂闀�",
+ f"{steady_metrics.get('average_steady_duration', 0):.2f} 绉�",
+ help="鎵�鏈夌ǔ鎬佹鐨勫钩鍧囨寔缁椂闂�"
+ )
+
+ with metrics_cols[2]:
+ st.metric(
+ "骞冲潎娉㈠姩鑼冨洿",
+ f"{steady_metrics.get('average_fluctuation_range', 0):.2f}%",
+ help="绋虫�佹鍐呯背閲嶇殑骞冲潎娉㈠姩鑼冨洿锛堢浉瀵逛簬鍧囧�肩殑鐧惧垎姣旓級"
+ )
+
+ with metrics_cols[3]:
+ st.metric(
+ "骞冲潎缃俊搴�",
+ f"{steady_metrics.get('average_confidence', 0):.1f}%",
+ help="绋虫�佽瘑鍒粨鏋滅殑骞冲潎缃俊搴�"
+ )
+
+ with metrics_cols[4]:
+ st.metric(
+ "鎬荤ǔ鎬佹椂闀�",
+ f"{steady_metrics.get('total_steady_duration', 0)/60:.2f} 鍒嗛挓",
+ help="鎵�鏈夌ǔ鎬佹鐨勬�绘寔缁椂闂�"
+ )
+
+ # 绋虫�佹璇︽儏琛ㄦ牸
+ st.subheader("馃搵 绋虫�佹璇︽儏")
+ if steady_segments:
+ steady_df = pd.DataFrame(steady_segments)
+
+ # 閫夋嫨瑕佹樉绀虹殑鍒�
+ display_cols = ['start_time', 'end_time', 'duration', 'mean_weight', 'std_weight', 'fluctuation_range', 'confidence']
+ steady_df_display = steady_df[display_cols].copy()
+
+ # 鏍煎紡鍖栨樉绀�
+ steady_df_display['duration'] = steady_df_display['duration'].apply(lambda x: f"{x:.1f} 绉�")
+ steady_df_display['mean_weight'] = steady_df_display['mean_weight'].apply(lambda x: f"{x:.4f} Kg/m")
+ steady_df_display['std_weight'] = steady_df_display['std_weight'].apply(lambda x: f"{x:.4f} Kg/m")
+ steady_df_display['fluctuation_range'] = steady_df_display['fluctuation_range'].apply(lambda x: f"{x:.2f}%")
+ steady_df_display['confidence'] = steady_df_display['confidence'].apply(lambda x: f"{x:.1f}%")
+
+ st.dataframe(steady_df_display, use_container_width=True)
+
+ # 瀵煎嚭绋虫�佽瘑鍒粨鏋�
+ st.subheader("馃捑 瀵煎嚭鏁版嵁")
+
+ # 鍑嗗瀵煎嚭鏁版嵁
+ export_df = df_with_steady[['time', 'metered_weight', 'smoothed_weight', 'is_steady']].copy()
+ export_csv = export_df.to_csv(index=False)
+
+ # 鍒涘缓涓嬭浇鎸夐挳
+ st.download_button(
+ label="瀵煎嚭绋虫�佽瘑鍒粨鏋� (CSV)",
+ data=export_csv,
+ file_name=f"metered_weight_steady_state_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv",
+ mime="text/csv",
+ help="鐐瑰嚮鎸夐挳瀵煎嚭绫抽噸绋虫�佽瘑鍒粨鏋滄暟鎹�"
+ )
+ else:
+ st.info("鏈瘑鍒埌浠讳綍绋虫�佹锛岃灏濊瘯璋冩暣绋虫�佸弬鏁伴厤缃��")
+
+ # 鏁版嵁棰勮
+ st.subheader("馃攳 鏁版嵁棰勮")
+ st.dataframe(df_with_steady[['time', 'metered_weight', 'smoothed_weight', 'is_steady', 'fluctuation_range']].head(20), use_container_width=True)
+ else:
+ # 鎻愮ず鐢ㄦ埛鐐瑰嚮寮�濮嬪垎鏋愭寜閽�
+ st.info("璇烽�夋嫨鏃堕棿鑼冨洿骞剁偣鍑�'寮�濮嬪垎鏋�'鎸夐挳鑾峰彇鏁版嵁銆�")
diff --git a/app/services/parameter_adjustment_service.py b/app/services/parameter_adjustment_service.py
new file mode 100644
index 0000000..a791835
--- /dev/null
+++ b/app/services/parameter_adjustment_service.py
@@ -0,0 +1,495 @@
+import pandas as pd
+import numpy as np
+
+# 灏濊瘯瀵煎叆torch锛屽鏋滃け璐ュ垯鏍囪涓轰笉鍙敤
+try:
+ import torch
+except ImportError:
+ torch = None
+
+class ParameterAdjustmentAdvisor:
+ """
+ 鎸ゅ嚭鏈哄弬鏁拌皟鑺傚缓璁櫒
+ 鏍规嵁瀹炴椂绫抽噸涓庢爣鍑嗙背閲嶇殑鍋忓樊锛岀粰鍑鸿灪鏉嗚浆閫熷拰娴佺▼涓婚�熺殑璋冩暣寤鸿
+ """
+
+ def __init__(self):
+ # 鍒濆鍖栭粯璁ゅ弬鏁板叧绯荤郴鏁�
+ # 杩欎簺绯绘暟琛ㄧず绫抽噸鍋忓樊1%鏃讹紝鍙傛暟闇�瑕佽皟鏁寸殑鐧惧垎姣�
+ # 鍙互鏍规嵁瀹為檯鐢熶骇鏁版嵁杩涜浼樺寲
+ self.default_coefficients = {
+ 'screw_speed': 0.1, # 绫抽噸鍋忓樊1%锛岃灪鏉嗚浆閫熻皟鏁�0.3%锛堥檷浣庤皟鏁村箙搴︼級
+ 'process_main_speed': -0.1 # 绫抽噸鍋忓樊1%锛屾祦绋嬩富閫熻皟鏁�-0.2%锛堥檷浣庤皟鏁村箙搴︼級
+ }
+
+ # 榛樿鍙傛暟涓婁笅闄愶紙鍙牴鎹疄闄呰澶囨儏鍐佃皟鏁达級
+ self.default_limits = {
+ 'screw_speed': {'min': 30, 'max': 500},
+ 'process_main_speed': {'min': 0, 'max': 200}
+ }
+
+ # 鏈�澶ц皟鏁村箙搴﹂檺鍒讹紙鐧惧垎姣旓級
+ self.max_adjustment_percentage = {
+ 'screw_speed': 15.0, # 铻烘潌杞�熷崟娆℃渶澶ц皟鏁�15%
+ 'process_main_speed': 10.0 # 娴佺▼涓婚�熷崟娆℃渶澶ц皟鏁�10%
+ }
+
+ def calculate_adjustment(self, real_time_weight, standard_weight, upper_limit, lower_limit,
+ current_screw_speed, current_process_speed,
+ current_screw_temperature=None, current_rear_barrel_temperature=None,
+ current_front_barrel_temperature=None, current_head_temperature=None,
+ coefficients=None, limits=None):
+ """
+ 璁$畻鍙傛暟璋冩暣寤鸿
+
+ :param real_time_weight: 瀹炴椂绫抽噸 (Kg/m)
+ :param standard_weight: 鏍囧噯绫抽噸 (Kg/m)
+ :param upper_limit: 绫抽噸涓婇檺 (Kg/m)
+ :param lower_limit: 绫抽噸涓嬮檺 (Kg/m)
+ :param current_screw_speed: 褰撳墠铻烘潌杞�� (rpm)
+ :param current_process_speed: 褰撳墠娴佺▼涓婚�� (m/min)
+ :param current_screw_temperature: 褰撳墠铻烘潌娓╁害 (掳C)
+ :param current_rear_barrel_temperature: 褰撳墠鍚庢満绛掓俯搴� (掳C)
+ :param current_front_barrel_temperature: 褰撳墠鍓嶆満绛掓俯搴� (掳C)
+ :param current_head_temperature: 褰撳墠鏈哄ご娓╁害 (掳C)
+ :param coefficients: 鑷畾涔夊弬鏁板叧绯荤郴鏁� (鍙��)
+ :param limits: 鑷畾涔夊弬鏁颁笂涓嬮檺 (鍙��)
+ :return: 璋冩暣寤鸿瀛楀吀
+ """
+ # 浣跨敤榛樿绯绘暟鎴栬嚜瀹氫箟绯绘暟
+ coeffs = coefficients if coefficients else self.default_coefficients
+
+ # 浣跨敤榛樿涓婁笅闄愭垨鑷畾涔変笂涓嬮檺
+ param_limits = limits if limits else self.default_limits
+
+ # 璁$畻绫抽噸鍋忓樊
+ weight_deviation = real_time_weight - standard_weight
+ deviation_percentage = (weight_deviation / standard_weight) * 100 if standard_weight != 0 else 0
+
+ # 纭畾绫抽噸鐘舵��
+ if real_time_weight > upper_limit:
+ status = "瓒呬笂闄�"
+ elif real_time_weight < lower_limit:
+ status = "瓒呬笅闄�"
+ else:
+ status = "姝e父鑼冨洿"
+
+ # 璁$畻娓╁害褰卞搷鍥犲瓙 - 鑰冭檻娓╁害瀵规尋鍑鸿繃绋嬬殑褰卞搷
+ # 娓╁害瓒婇珮锛岀墿鏂欐祦鍔ㄦ�ц秺濂斤紝鐩稿悓铻烘潌杞�熶笅鎸ゅ嚭閲忚秺澶�
+ temperature_factor = 1.0
+
+ # 濡傛灉鎻愪緵浜嗘俯搴﹀弬鏁帮紝璁$畻娓╁害褰卞搷鍥犲瓙
+ if current_screw_temperature is not None and current_head_temperature is not None:
+ # 璁$畻骞冲潎娓╁害涓庡弬鑰冩俯搴︾殑鍋忓樊
+ reference_temperature = 80.0 # 鍙傝�冩俯搴︼紝鍙牴鎹疄闄呭伐鑹鸿皟鏁�
+ avg_temperature = (current_screw_temperature + current_head_temperature) / 2
+ temperature_deviation = avg_temperature - reference_temperature
+
+ # 娓╁害鍋忓樊姣忓彉鍖�10掳C锛屽奖鍝嶅洜瀛愬彉鍖�5%
+ temperature_factor = 1.0 + (temperature_deviation / 10.0) * 0.05
+
+ # 闄愬埗娓╁害褰卞搷鍥犲瓙鐨勮寖鍥�
+ temperature_factor = max(0.8, min(1.2, temperature_factor))
+
+ # 璁$畻璋冩暣閲� - 鑰冭檻娓╁害褰卞搷鍥犲瓙
+ # 绫抽噸鍋忓樊 * 绯绘暟 * 娓╁害鍥犲瓙 = 璋冩暣鐧惧垎姣�
+ screw_speed_adjustment_percent = -deviation_percentage * coeffs['screw_speed'] * temperature_factor
+ process_speed_adjustment_percent = -deviation_percentage * coeffs['process_main_speed'] * temperature_factor
+
+ # 闄愬埗璋冩暣骞呭害锛岄伩鍏嶈皟鏁撮噺杩囧ぇ
+ screw_speed_adjustment_percent = max(
+ -self.max_adjustment_percentage['screw_speed'],
+ min(self.max_adjustment_percentage['screw_speed'],
+ screw_speed_adjustment_percent)
+ )
+
+ process_speed_adjustment_percent = max(
+ -self.max_adjustment_percentage['process_main_speed'],
+ min(self.max_adjustment_percentage['process_main_speed'],
+ process_speed_adjustment_percent)
+ )
+
+ # 璁$畻瀹為檯璋冩暣閲�
+ screw_speed_adjustment = (screw_speed_adjustment_percent / 100) * current_screw_speed
+ process_speed_adjustment = (process_speed_adjustment_percent / 100) * current_process_speed
+
+ # 璁$畻璋冩暣鍚庣殑鍙傛暟鍊�
+ new_screw_speed = current_screw_speed + screw_speed_adjustment
+ new_process_speed = current_process_speed + process_speed_adjustment
+
+ # 纭繚鍙傛暟鍦ㄥ悎鐞嗚寖鍥村唴
+ new_screw_speed = max(param_limits['screw_speed']['min'],
+ min(param_limits['screw_speed']['max'], new_screw_speed))
+
+ new_process_speed = max(param_limits['process_main_speed']['min'],
+ min(param_limits['process_main_speed']['max'], new_process_speed))
+
+ # 閲嶆柊璁$畻瀹為檯璋冩暣鐧惧垎姣旓紙鑰冭檻浜嗘渶澶ц皟鏁村箙搴﹀拰鍙傛暟涓婁笅闄愶級
+ screw_speed_adjust_percent = ((new_screw_speed - current_screw_speed) / current_screw_speed) * 100 if current_screw_speed != 0 else 0
+ process_speed_adjust_percent = ((new_process_speed - current_process_speed) / current_process_speed) * 100 if current_process_speed != 0 else 0
+
+ # 鐢熸垚璋冩暣寤鸿 - 鏃犺绫抽噸鏄惁鍦ㄨ寖鍥村唴锛岄兘鎻愪緵璋冩暣寤鸿锛岀洰鏍囨槸璁╁疄鏃剁背閲嶅敖鍙兘鎺ヨ繎鏍囧噯绫抽噸
+ recommendation = self._generate_recommendation(
+ deviation_percentage,
+ screw_speed_adjust_percent,
+ new_screw_speed,
+ process_speed_adjust_percent,
+ new_process_speed
+ )
+
+ return {
+ 'status': status,
+ 'real_time_weight': real_time_weight,
+ 'standard_weight': standard_weight,
+ 'upper_limit': upper_limit,
+ 'lower_limit': lower_limit,
+ 'deviation': weight_deviation,
+ 'deviation_percentage': deviation_percentage,
+ 'current_screw_speed': current_screw_speed,
+ 'current_process_speed': current_process_speed,
+ 'new_screw_speed': new_screw_speed,
+ 'new_process_speed': new_process_speed,
+ 'screw_speed_adjustment': screw_speed_adjustment,
+ 'process_speed_adjustment': process_speed_adjustment,
+ 'screw_speed_adjust_percent': screw_speed_adjust_percent,
+ 'process_speed_adjust_percent': process_speed_adjust_percent,
+ 'recommendation': recommendation
+ }
+
+ def _generate_recommendation(self, deviation_percentage, screw_speed_adjust_percent,
+ new_screw_speed, process_speed_adjust_percent, new_process_speed):
+ """
+ 鐢熸垚璋冩暣寤鸿鏂囨湰
+
+ :param deviation_percentage: 绫抽噸鍋忓樊鐧惧垎姣�
+ :param screw_speed_adjust_percent: 铻烘潌杞�熻皟鏁寸櫨鍒嗘瘮
+ :param new_screw_speed: 璋冩暣鍚庣殑铻烘潌杞��
+ :param process_speed_adjust_percent: 娴佺▼涓婚�熻皟鏁寸櫨鍒嗘瘮
+ :param new_process_speed: 璋冩暣鍚庣殑娴佺▼涓婚��
+ :return: 璋冩暣寤鸿鏂囨湰
+ """
+ # 鏍规嵁鍋忓樊鏂瑰悜鍜屽ぇ灏忕敓鎴愬缓璁�
+ abs_deviation = abs(deviation_percentage)
+
+ if abs_deviation < 0.5:
+ # 鍋忓樊寰堝皬锛屾帴杩戞爣鍑嗗��
+ return f"绫抽噸鍋忓樊寰堝皬 ({abs_deviation:.2f}%)锛屾帴杩戞爣鍑嗗�笺�俓n" \
+ f"寤鸿寰皟浠ヨ繘涓�姝ユ帴杩戞爣鍑嗗�硷細\n" \
+ f"1. 灏嗚灪鏉嗚浆閫熻皟鏁磋嚦 {new_screw_speed:.1f} rpm " \
+ f"( {'闄嶄綆' if screw_speed_adjust_percent < 0 else '鎻愰珮'} {abs(screw_speed_adjust_percent):.2f}% )\n" \
+ f"2. 灏嗘祦绋嬩富閫熻皟鏁磋嚦 {new_process_speed:.1f} m/min " \
+ f"( {'闄嶄綆' if process_speed_adjust_percent < 0 else '鎻愰珮'} {abs(process_speed_adjust_percent):.2f}% )"
+ elif deviation_percentage > 0:
+ # 绫抽噸鍋忛珮锛岄渶瑕侀檷浣庣背閲�
+ return f"绫抽噸鍋忓樊 {abs_deviation:.2f}%锛堝亸楂橈級锛屽缓璁皟鏁达細\n" \
+ f"1. 灏嗚灪鏉嗚浆閫熶粠褰撳墠鍊艰皟鏁磋嚦 {new_screw_speed:.1f} rpm " \
+ f"( {'闄嶄綆' if screw_speed_adjust_percent < 0 else '鎻愰珮'} {abs(screw_speed_adjust_percent):.2f}% )\n" \
+ f"2. 灏嗘祦绋嬩富閫熶粠褰撳墠鍊艰皟鏁磋嚦 {new_process_speed:.1f} m/min " \
+ f"( {'闄嶄綆' if process_speed_adjust_percent < 0 else '鎻愰珮'} {abs(process_speed_adjust_percent):.2f}% )"
+ else:
+ # 绫抽噸鍋忎綆锛岄渶瑕佹彁楂樼背閲�
+ return f"绫抽噸鍋忓樊 {abs_deviation:.2f}%锛堝亸浣庯級锛屽缓璁皟鏁达細\n" \
+ f"1. 灏嗚灪鏉嗚浆閫熶粠褰撳墠鍊艰皟鏁磋嚦 {new_screw_speed:.1f} rpm " \
+ f"( {'闄嶄綆' if screw_speed_adjust_percent < 0 else '鎻愰珮'} {abs(screw_speed_adjust_percent):.2f}% )\n" \
+ f"2. 灏嗘祦绋嬩富閫熶粠褰撳墠鍊艰皟鏁磋嚦 {new_process_speed:.1f} m/min " \
+ f"( {'闄嶄綆' if process_speed_adjust_percent < 0 else '鎻愰珮'} {abs(process_speed_adjust_percent):.2f}% )"
+
+ def predict_weight(self, model_info, screw_speed, head_pressure, process_speed,
+ screw_temperature, rear_barrel_temperature,
+ front_barrel_temperature, head_temperature):
+ """
+ 浣跨敤妯″瀷棰勬祴绫抽噸
+
+ :param model_info: 鍖呭惈妯″瀷鍜岀缉鏀惧櫒鐨勬ā鍨嬩俊鎭瓧鍏�
+ :param screw_speed: 铻烘潌杞�� (rpm)
+ :param head_pressure: 鏈哄ご鍘嬪姏 (bar)
+ :param process_speed: 娴佺▼涓婚�� (m/min)
+ :param screw_temperature: 铻烘潌娓╁害 (掳C)
+ :param rear_barrel_temperature: 鍚庢満绛掓俯搴� (掳C)
+ :param front_barrel_temperature: 鍓嶆満绛掓俯搴� (掳C)
+ :param head_temperature: 鏈哄ご娓╁害 (掳C)
+ :return: 棰勬祴鐨勭背閲嶅�� (Kg/m)
+ """
+ try:
+ # 鑾峰彇妯″瀷闇�瑕佺殑鐗瑰緛
+ required_features = model_info['features']
+
+ # 鍑嗗杈撳叆鏁版嵁锛屼繚鎸佷笌妯″瀷鐗瑰緛椤哄簭涓�鑷�
+ input_features = {
+ '铻烘潌杞��': screw_speed,
+ '鏈哄ご鍘嬪姏': head_pressure,
+ '娴佺▼涓婚��': process_speed,
+ '铻烘潌娓╁害': screw_temperature,
+ '鍚庢満绛掓俯搴�': rear_barrel_temperature,
+ '鍓嶆満绛掓俯搴�': front_barrel_temperature,
+ '鏈哄ご娓╁害': head_temperature
+ }
+
+ # 鏍规嵁妯″瀷鐗瑰緛鍒涘缓杈撳叆DataFrame
+ input_df = pd.DataFrame([input_features])[required_features]
+
+ # 鍒濆鍖栭娴嬬粨鏋�
+ predicted_weight = None
+
+ # 鑾峰彇妯″瀷
+ model = model_info['model']
+
+ # 鏍规嵁妯″瀷绫诲瀷鎵ц涓嶅悓鐨勯娴嬮�昏緫
+ if model_info['model_type'] in ['LSTM', 'GRU', 'BiLSTM']:
+ # 娣卞害瀛︿範妯″瀷棰勬祴
+ if torch is None:
+ print("PyTorch not available, cannot predict with deep learning models")
+ return None
+
+ # 鏁版嵁鏍囧噯鍖�
+ scaler_X = model_info['scaler_X']
+ scaler_y = model_info['scaler_y']
+ input_scaled = scaler_X.transform(input_df)
+
+ # 鑾峰彇搴忓垪闀垮害
+ sequence_length = model_info['sequence_length']
+
+ # 涓烘繁搴﹀涔犳ā鍨嬪垱寤哄簭鍒�
+ input_seq = np.tile(input_scaled, (sequence_length, 1)).reshape(1, sequence_length, -1)
+
+ # 杞崲涓篜yTorch寮犻噺
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+ input_tensor = torch.tensor(input_seq, dtype=torch.float32).to(device)
+
+ # 棰勬祴
+ model.eval()
+ with torch.no_grad():
+ y_pred_scaled_tensor = model(input_tensor)
+ y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel()[0]
+
+ # 鍙嶅綊涓�鍖�
+ predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0]
+
+ elif model_info['model_type'] in ['SVR', 'MLP', 'GradientBoosting']:
+ # 闇�瑕佺壒寰佺缉鏀剧殑妯″瀷
+ scaler_X = model_info['scaler_X']
+ scaler_y = model_info['scaler_y']
+ input_scaled = scaler_X.transform(input_df)
+
+ # 棰勬祴
+ y_pred_scaled = model.predict(input_scaled)[0]
+
+ # 鍙嶅綊涓�鍖�
+ predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0]
+
+ else:
+ # 鍏朵粬妯″瀷锛堝闅忔満妫灄銆佺嚎鎬у洖褰掔瓑锛�
+ predicted_weight = model.predict(input_df)[0]
+
+ return predicted_weight
+ except Exception as e:
+ print(f"妯″瀷棰勬祴澶辫触: {e}")
+ import traceback
+ traceback.print_exc()
+ return None
+ def iterative_adjustment(self, initial_params, model_info, max_iterations=5, tolerance=0.5):
+ """
+ 杩唬璋冩暣鍙傛暟锛岀洿鍒伴娴嬬背閲嶆弧瓒冲亸宸姹�
+
+ :param initial_params: 鍒濆鍙傛暟瀛楀吀锛屽寘鍚�:
+ - real_time_weight: 瀹炴椂绫抽噸
+ - standard_weight: 鏍囧噯绫抽噸
+ - upper_limit: 绫抽噸涓婇檺
+ - lower_limit: 绫抽噸涓嬮檺
+ - current_screw_speed: 褰撳墠铻烘潌杞��
+ - current_process_speed: 褰撳墠娴佺▼涓婚��
+ - current_screw_temperature: 铻烘潌娓╁害
+ - current_rear_barrel_temperature: 鍚庢満绛掓俯搴�
+ - current_front_barrel_temperature: 鍓嶆満绛掓俯搴�
+ - current_head_temperature: 鏈哄ご娓╁害
+ - current_head_pressure: 鏈哄ご鍘嬪姏
+ :param model_info: 妯″瀷淇℃伅瀛楀吀
+ :param max_iterations: 鏈�澶ц凯浠f鏁�
+ :param tolerance: 鍏佽鐨勭背閲嶅亸宸櫨鍒嗘瘮闃堝��
+ :return: 杩唬璋冩暣缁撴灉瀛楀吀锛屽寘鍚�:
+ - final_result: 鏈�缁堣皟鏁寸粨鏋�
+ - iteration_history: 姣忔杩唬鐨勫巻鍙茶褰�
+ - converged: 鏄惁鏀舵暃
+ """
+ iteration_history = []
+ converged = False
+
+ # 淇濆瓨鍒濆鍙傛暟锛岀敤浜庢渶缁堢粨鏋滆绠�
+ original_params = initial_params.copy()
+
+ # 鍒濆鍖栧綋鍓嶅弬鏁颁负鍒濆鍙傛暟
+ current_params = initial_params.copy()
+
+ for i in range(max_iterations):
+ # 璁$畻褰撳墠杩唬鐨勮皟鏁村缓璁�
+ adjustment_result = self.calculate_adjustment(
+ real_time_weight=current_params['real_time_weight'],
+ standard_weight=current_params['standard_weight'],
+ upper_limit=current_params['upper_limit'],
+ lower_limit=current_params['lower_limit'],
+ current_screw_speed=current_params['current_screw_speed'],
+ current_process_speed=current_params['current_process_speed'],
+ current_screw_temperature=current_params['current_screw_temperature'],
+ current_rear_barrel_temperature=current_params['current_rear_barrel_temperature'],
+ current_front_barrel_temperature=current_params['current_front_barrel_temperature'],
+ current_head_temperature=current_params['current_head_temperature']
+ )
+
+ # 浣跨敤妯″瀷棰勬祴璋冩暣鍚庣殑绫抽噸
+ predicted_weight = self.predict_weight(
+ model_info=model_info,
+ screw_speed=adjustment_result['new_screw_speed'],
+ head_pressure=current_params['current_head_pressure'],
+ process_speed=adjustment_result['new_process_speed'],
+ screw_temperature=current_params['current_screw_temperature'],
+ rear_barrel_temperature=current_params['current_rear_barrel_temperature'],
+ front_barrel_temperature=current_params['current_front_barrel_temperature'],
+ head_temperature=current_params['current_head_temperature']
+ )
+
+ # 妫�鏌ユā鍨嬮娴嬫槸鍚︽垚鍔�
+ if predicted_weight is None:
+ print(f"妯″瀷棰勬祴澶辫触锛岀粓姝㈣凯浠h皟鏁�")
+ iteration_history.append({
+ 'iteration': i + 1,
+ 'current_screw_speed': current_params['current_screw_speed'],
+ 'current_process_speed': current_params['current_process_speed'],
+ 'adjusted_screw_speed': adjustment_result['new_screw_speed'],
+ 'adjusted_process_speed': adjustment_result['new_process_speed'],
+ 'predicted_weight': None,
+ 'predicted_deviation': None,
+ 'predicted_deviation_percent': None,
+ 'screw_speed_adjustment': adjustment_result['screw_speed_adjustment'],
+ 'process_speed_adjustment': adjustment_result['process_speed_adjustment']
+ })
+ break
+
+ # 璁$畻棰勬祴鍋忓樊
+ predicted_deviation = predicted_weight - current_params['standard_weight']
+ predicted_deviation_percent = (predicted_deviation / current_params['standard_weight']) * 100
+
+ # 淇濆瓨杩唬鍘嗗彶
+ iteration_history.append({
+ 'iteration': i + 1,
+ 'current_screw_speed': current_params['current_screw_speed'],
+ 'current_process_speed': current_params['current_process_speed'],
+ 'adjusted_screw_speed': adjustment_result['new_screw_speed'],
+ 'adjusted_process_speed': adjustment_result['new_process_speed'],
+ 'predicted_weight': predicted_weight,
+ 'predicted_deviation': predicted_deviation,
+ 'predicted_deviation_percent': predicted_deviation_percent,
+ 'screw_speed_adjustment': adjustment_result['screw_speed_adjustment'],
+ 'process_speed_adjustment': adjustment_result['process_speed_adjustment']
+ })
+
+ # 妫�鏌ユ槸鍚︽敹鏁�
+ if abs(predicted_deviation_percent) <= tolerance:
+ converged = True
+ break
+
+ # 鏇存柊褰撳墠鍙傛暟锛屽噯澶囦笅涓�娆¤凯浠�
+ current_params.update({
+ 'real_time_weight': predicted_weight,
+ 'current_screw_speed': adjustment_result['new_screw_speed'],
+ 'current_process_speed': adjustment_result['new_process_speed']
+ })
+
+ # 鑾峰彇鏈�缁堣皟鏁村悗鐨勫弬鏁�
+ final_screw_speed = iteration_history[-1]['adjusted_screw_speed']
+ final_process_speed = iteration_history[-1]['adjusted_process_speed']
+ final_predicted_weight = iteration_history[-1]['predicted_weight']
+
+ # 璁$畻浠庡垵濮嬪弬鏁板埌鏈�缁堝弬鏁扮殑鎬昏皟鏁�
+ # 鍒涘缓涓�涓寘鍚垵濮嬪弬鏁扮殑璋冩暣缁撴灉
+ final_result = {
+ 'status': '姝e父鑼冨洿' if abs((final_predicted_weight - original_params['standard_weight']) / original_params['standard_weight'] * 100) <= tolerance else '瓒呬笂闄�' if final_predicted_weight > original_params['upper_limit'] else '瓒呬笅闄�',
+ 'real_time_weight': original_params['real_time_weight'], # 鍒濆瀹炴椂绫抽噸
+ 'standard_weight': original_params['standard_weight'],
+ 'upper_limit': original_params['upper_limit'],
+ 'lower_limit': original_params['lower_limit'],
+ 'deviation': original_params['real_time_weight'] - original_params['standard_weight'], # 鍒濆鍋忓樊
+ 'deviation_percentage': (original_params['real_time_weight'] - original_params['standard_weight']) / original_params['standard_weight'] * 100, # 鍒濆鍋忓樊鐧惧垎姣�
+ 'current_screw_speed': original_params['current_screw_speed'], # 鍒濆铻烘潌杞��
+ 'current_process_speed': original_params['current_process_speed'], # 鍒濆娴佺▼涓婚��
+ 'new_screw_speed': final_screw_speed, # 鏈�缁堣皟鏁村悗鐨勮灪鏉嗚浆閫�
+ 'new_process_speed': final_process_speed, # 鏈�缁堣皟鏁村悗鐨勬祦绋嬩富閫�
+ 'screw_speed_adjustment': final_screw_speed - original_params['current_screw_speed'], # 鎬昏皟鏁撮噺
+ 'process_speed_adjustment': final_process_speed - original_params['current_process_speed'], # 鎬昏皟鏁撮噺
+ 'screw_speed_adjust_percent': ((final_screw_speed - original_params['current_screw_speed']) / original_params['current_screw_speed']) * 100 if original_params['current_screw_speed'] != 0 else 0, # 鎬昏皟鏁寸櫨鍒嗘瘮
+ 'process_speed_adjust_percent': ((final_process_speed - original_params['current_process_speed']) / original_params['current_process_speed']) * 100 if original_params['current_process_speed'] != 0 else 0, # 鎬昏皟鏁寸櫨鍒嗘瘮
+ 'predicted_weight': final_predicted_weight # 鏈�缁堥娴嬬背閲�
+ }
+
+ # 鐢熸垚璋冩暣寤鸿鏂囨湰
+ final_deviation_percent = (final_predicted_weight - original_params['standard_weight']) / original_params['standard_weight'] * 100
+ final_result['recommendation'] = self._generate_recommendation(
+ final_deviation_percent,
+ final_result['screw_speed_adjust_percent'],
+ final_screw_speed,
+ final_result['process_speed_adjust_percent'],
+ final_process_speed
+ )
+
+ # 娣诲姞娓╁害鐩稿叧鍙傛暟锛堢敤浜庨娴嬶級
+ final_result['current_screw_temperature'] = original_params['current_screw_temperature']
+ final_result['current_rear_barrel_temperature'] = original_params['current_rear_barrel_temperature']
+ final_result['current_front_barrel_temperature'] = original_params['current_front_barrel_temperature']
+ final_result['current_head_temperature'] = original_params['current_head_temperature']
+
+ # 娣诲姞鏈�缁堥娴嬬浉鍏充俊鎭�
+ final_predicted_deviation = final_predicted_weight - original_params['standard_weight']
+ final_predicted_deviation_percent = (final_predicted_deviation / original_params['standard_weight']) * 100
+ final_result['final_predicted_deviation'] = final_predicted_deviation
+ final_result['final_predicted_deviation_percent'] = final_predicted_deviation_percent
+
+ return {
+ 'final_result': final_result,
+ 'iteration_history': iteration_history,
+ 'converged': converged,
+ 'total_iterations': len(iteration_history),
+ 'initial_params': original_params # 淇濆瓨鍒濆鍙傛暟锛岀敤浜庣粨鏋滃睍绀�
+ }
+ def analyze_historical_adjustments(self, df):
+ """
+ 鍒嗘瀽鍘嗗彶璋冩暣鏁版嵁锛屼紭鍖栬皟鏁寸郴鏁�
+
+ :param df: 鍖呭惈鍘嗗彶璋冩暣鏁版嵁鐨凞ataFrame锛岄渶瑕佸寘鍚互涓嬪垪锛�
+ - real_time_weight: 瀹炴椂绫抽噸
+ - standard_weight: 鏍囧噯绫抽噸
+ - current_screw_speed: 褰撳墠铻烘潌杞��
+ - current_process_speed: 褰撳墠娴佺▼涓婚��
+ - adjusted_screw_speed: 璋冩暣鍚庣殑铻烘潌杞��
+ - adjusted_process_speed: 璋冩暣鍚庣殑娴佺▼涓婚��
+ - result_weight: 璋冩暣鍚庣殑绫抽噸
+ :return: 浼樺寲鍚庣殑绯绘暟
+ """
+ if df is None or df.empty:
+ return self.default_coefficients
+
+ try:
+ # 璁$畻绫抽噸鍋忓樊
+ df['weight_deviation'] = df['real_time_weight'] - df['standard_weight']
+ df['deviation_percentage'] = (df['weight_deviation'] / df['standard_weight']) * 100
+
+ # 璁$畻鍙傛暟璋冩暣鐧惧垎姣�
+ df['screw_speed_adjust_percent'] = ((df['adjusted_screw_speed'] - df['current_screw_speed']) / df['current_screw_speed']) * 100
+ df['process_speed_adjust_percent'] = ((df['adjusted_process_speed'] - df['current_process_speed']) / df['current_process_speed']) * 100
+
+ # 璁$畻璋冩暣鏁堟灉
+ df['adjustment_effect'] = df['result_weight'] - df['real_time_weight']
+
+ # 浣跨敤绾挎�у洖褰掕绠椾紭鍖栫郴鏁�
+ # 绯绘暟 = 鍙傛暟璋冩暣鐧惧垎姣� / 绫抽噸鍋忓樊鐧惧垎姣�
+ screw_speed_coeff = df['screw_speed_adjust_percent'].sum() / df['deviation_percentage'].sum() if df['deviation_percentage'].sum() != 0 else self.default_coefficients['screw_speed']
+ process_speed_coeff = df['process_speed_adjust_percent'].sum() / df['deviation_percentage'].sum() if df['deviation_percentage'].sum() != 0 else self.default_coefficients['process_main_speed']
+
+ # 鍙栫粷瀵瑰�硷紝纭繚鏂瑰悜姝g‘
+ optimized_coeffs = {
+ 'screw_speed': abs(screw_speed_coeff),
+ 'process_main_speed': -abs(process_speed_coeff) # 娴佺▼涓婚�熶笌绫抽噸璐熺浉鍏�
+ }
+
+ return optimized_coeffs
+ except Exception as e:
+ print(f"鍒嗘瀽鍘嗗彶璋冩暣鏁版嵁澶辫触: {e}")
+ return self.default_coefficients
diff --git a/dashboard.py b/dashboard.py
index dc7b0c6..77f0e99 100644
--- a/dashboard.py
+++ b/dashboard.py
@@ -7,6 +7,11 @@
from app.pages.metered_weight_correlation import show_metered_weight_correlation
from app.pages.metered_weight_regression import show_metered_weight_regression
from app.pages.metered_weight_advanced import show_metered_weight_advanced
+from app.pages.metered_weight_deep_learning import show_metered_weight_deep_learning
+from app.pages.metered_weight_steady_state import show_metered_weight_steady_state
+from app.pages.metered_weight_prediction import show_metered_weight_prediction
+from app.pages.metered_weight_forecast import show_metered_weight_forecast
+from app.pages.extruder_parameter_adjustment import show_extruder_parameter_adjustment
# 璁剧疆椤甸潰閰嶇疆
st.set_page_config(
@@ -72,6 +77,41 @@
url_path="metered_weight_advanced"
)
+metered_weight_deep_learning_page = st.Page(
+ show_metered_weight_deep_learning,
+ title="绫抽噸娣卞害瀛︿範棰勬祴",
+ icon="馃",
+ url_path="metered_weight_deep_learning"
+)
+
+metered_weight_steady_state_page = st.Page(
+ show_metered_weight_steady_state,
+ title="绫抽噸绋虫�佽瘑鍒�",
+ icon="鈿栵笍",
+ url_path="metered_weight_steady_state"
+)
+
+metered_weight_prediction_page = st.Page(
+ show_metered_weight_prediction,
+ title="绫抽噸缁熶竴棰勬祴",
+ icon="馃敭",
+ url_path="metered_weight_prediction"
+)
+
+metered_weight_forecast_page = st.Page(
+ show_metered_weight_forecast,
+ title="绫抽噸棰勬祴鍒嗘瀽",
+ icon="馃搱",
+ url_path="metered_weight_forecast"
+)
+
+extruder_parameter_adjustment_page = st.Page(
+ show_extruder_parameter_adjustment,
+ title="鎸ゅ嚭鏈哄弬鏁拌皟鑺�",
+ icon="鈿欙笍",
+ url_path="extruder_parameter_adjustment"
+)
+
# 渚ц竟鏍忛〉鑴氫俊鎭�
def show_footer():
st.sidebar.markdown("---")
@@ -79,7 +119,7 @@
# 瀵艰埅閰嶇疆
pg = st.navigation({
- "缁煎悎鍒嗘瀽": [comprehensive_page, metered_weight_page, metered_weight_correlation_page, metered_weight_regression_page, metered_weight_advanced_page],
+ "缁煎悎鍒嗘瀽": [comprehensive_page, metered_weight_page, metered_weight_correlation_page, metered_weight_regression_page, metered_weight_advanced_page, metered_weight_deep_learning_page, metered_weight_steady_state_page, metered_weight_prediction_page, metered_weight_forecast_page, extruder_parameter_adjustment_page],
"鍒嗛」鍒嗘瀽": [sorting_page, extruder_page, main_process_page]
})
--
Gitblit v1.9.3