feat(米重分析): 新增稳态识别和预测功能页面并优化现有模型
新增米重稳态识别、深度学习预测、统一预测和预测分析功能页面
优化现有回归和高级分析页面,增加稳态数据过滤和模型保存功能
重构稳态识别逻辑为公共类,提高代码复用性
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | import joblib |
| | | import os |
| | | from datetime import datetime |
| | | from app.services.parameter_adjustment_service import ParameterAdjustmentAdvisor |
| | | |
| | | # 页é¢å½æ°å®ä¹ |
| | | def show_extruder_parameter_adjustment(): |
| | | # 页颿 é¢ |
| | | st.title("æ¤åºæºåæ°è°è建议") |
| | | |
| | | # æ·»å æä½æå¼ |
| | | with st.expander("ð æä½æå¼", expanded=True): |
| | | st.markdown(""" |
| | | 欢è¿ä½¿ç¨æ¤åºæºåæ°è°è建议åè½ï¼æ¬åè½å¯ä»¥æ ¹æ®æ¨è¾å
¥çç±³éæ°æ®åå½ååæ°ï¼ä¸ºæ¨æä¾ç§å¦åççåæ°è°æ´å»ºè®®ã |
| | | |
| | | **æä½æ¥éª¤ï¼** |
| | | 1. éæ©ä¸ä¸ªå·²è®ç»å¥½ç模å |
| | | 2. è¾å
¥ç±³éæ åå¼ãä¸ä¸éåå½åæ¤åºæºåæ° |
| | | 3. è¾å
¥å½åå®é
ç±³éæµéå¼ |
| | | 4. ç¹å»"计ç®è°è建议"æé® |
| | | 5. æ¥çç³»ç»çæçåæ°è°æ´å»ºè®® |
| | | |
| | | **注æäºé¡¹ï¼** |
| | | - 请确ä¿è¾å
¥çåæ°å¼å¨è®¾å¤å
许çèå´å
|
| | | - å»ºè®®æ ¹æ®å®é
ç产æ
åµè°æ´æ¨¡ååæ° |
| | | - åå²è°èè®°å½å¯å¨é¡µé¢åºé¨æ¥ç |
| | | """) |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'adjustment_history' not in st.session_state: |
| | | st.session_state['adjustment_history'] = [] |
| | | |
| | | # 1. 模åéæ©åºå |
| | | with st.expander("ð æ¨¡åéæ©", expanded=True): |
| | | # å建模åç®å½ï¼å¦æä¸åå¨ï¼ |
| | | model_dir = "saved_models" |
| | | os.makedirs(model_dir, exist_ok=True) |
| | | |
| | | # è·åææå·²ä¿åçæ¨¡åæä»¶ |
| | | model_files = [f for f in os.listdir(model_dir) if f.endswith('.joblib')] |
| | | model_files.sort(reverse=True) # ææ°çæ¨¡åæå¨åé¢ |
| | | |
| | | if not model_files: |
| | | st.warning("å°æªä¿å任使¨¡åï¼è¯·å
è®ç»æ¨¡åå¹¶ä¿åã") |
| | | return |
| | | |
| | | # 模åéæ©ä¸ææ¡ |
| | | selected_model_file = st.selectbox( |
| | | "鿩已ä¿åçæ¨¡å", |
| | | options=model_files, |
| | | help="éæ©è¦ç¨äºé¢æµç模åæä»¶" |
| | | ) |
| | | |
| | | # å 载并æ¾ç¤ºæ¨¡åä¿¡æ¯ |
| | | if selected_model_file: |
| | | model_path = os.path.join(model_dir, selected_model_file) |
| | | model_info = joblib.load(model_path) |
| | | |
| | | # æ¾ç¤ºæ¨¡ååºæ¬ä¿¡æ¯ |
| | | st.subheader("ð æ¨¡åä¿¡æ¯") |
| | | info_cols = st.columns(2) |
| | | |
| | | with info_cols[0]: |
| | | st.metric("模åç±»å", model_info['model_type']) |
| | | st.metric("å建æ¶é´", model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S')) |
| | | st.metric("使ç¨ç¨³ææ°æ®", "æ¯" if model_info.get('use_steady_data', False) else "å¦") |
| | | |
| | | with info_cols[1]: |
| | | st.metric("R² å¾å", f"{model_info['r2_score']:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{model_info['mse']:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{model_info['rmse']:.6f}") |
| | | |
| | | # æ¾ç¤ºæ¨¡åç¹å¾ |
| | | st.write("ð æ¨¡å使ç¨çç¹å¾:") |
| | | st.code(", ".join(model_info['features'])) |
| | | |
| | | # å¦ææ¯æ·±åº¦å¦ä¹ 模åï¼æ¾ç¤ºåºåé¿åº¦ |
| | | if 'sequence_length' in model_info: |
| | | st.metric("åºåé¿åº¦", model_info['sequence_length']) |
| | | |
| | | # ä¿å模åä¿¡æ¯å°ä¼è¯ç¶æ |
| | | st.session_state['selected_model'] = model_info |
| | | st.session_state['selected_model_file'] = selected_model_file |
| | | |
| | | # 2. åæ°è¾å
¥åºå |
| | | st.subheader("ð åæ°è¾å
¥") |
| | | |
| | | # 2.1 ç±³éæ åå¼ãä¸ä¸éè¾å
¥ |
| | | with st.expander("âï¸ ç±³éæ åä¸ä¸ä¸é", expanded=True): |
| | | weight_cols = st.columns(3) |
| | | |
| | | with weight_cols[0]: |
| | | standard_weight = st.number_input( |
| | | "æ åç±³é", |
| | | key="standard_weight", |
| | | value=5.20, |
| | | min_value=0.01, |
| | | max_value=10.0, |
| | | step=0.0001, |
| | | format="%.4f", |
| | | help="è¾å
¥ç®æ ç±³éæ åå¼" |
| | | ) |
| | | st.caption("åä½: Kg/m") |
| | | |
| | | with weight_cols[1]: |
| | | upper_limit = st.number_input( |
| | | "ç±³éä¸é", |
| | | key="upper_limit", |
| | | value=5.46, |
| | | min_value=standard_weight, |
| | | max_value=10.0, |
| | | step=0.0001, |
| | | format="%.4f", |
| | | help="è¾å
¥ç±³éå
许çä¸éå¼" |
| | | ) |
| | | st.caption("åä½: Kg/m") |
| | | |
| | | with weight_cols[2]: |
| | | lower_limit = st.number_input( |
| | | "ç±³éä¸é", |
| | | key="lower_limit", |
| | | value=5.02, |
| | | min_value=0.01, |
| | | max_value=standard_weight, |
| | | step=0.0001, |
| | | format="%.4f", |
| | | help="è¾å
¥ç±³éå
许çä¸éå¼" |
| | | ) |
| | | st.caption("åä½: Kg/m") |
| | | |
| | | # 2.2 æ¤åºæºå½ååæ°è¾å
¥ |
| | | with st.expander("ð§ æ¤åºæºå½ååæ°", expanded=True): |
| | | param_cols = st.columns(3) |
| | | |
| | | with param_cols[0]: |
| | | current_screw_speed = st.number_input( |
| | | "èºæè½¬é", |
| | | key="current_screw_speed", |
| | | value=230.0, |
| | | min_value=0.0, |
| | | max_value=500.0, |
| | | step=0.1, |
| | | help="è¾å
¥å½åèºæè½¬é" |
| | | ) |
| | | st.caption("åä½: rpm") |
| | | |
| | | current_head_pressure = st.number_input( |
| | | "æºå¤´åå", |
| | | key="current_head_pressure", |
| | | value=0.26, |
| | | min_value=0.0, |
| | | max_value=500.0, |
| | | step=1.0, |
| | | help="è¾å
¥å½åæºå¤´åå" |
| | | ) |
| | | st.caption("åä½: bar") |
| | | |
| | | current_process_speed = st.number_input( |
| | | "æµç¨ä¸»é", |
| | | key="current_process_speed", |
| | | value=6.6, |
| | | min_value=0.0, |
| | | max_value=300.0, |
| | | step=0.1, |
| | | help="è¾å
¥å½åæµç¨ä¸»é" |
| | | ) |
| | | st.caption("åä½: m/min") |
| | | |
| | | with param_cols[1]: |
| | | current_screw_temperature = st.number_input( |
| | | "èºææ¸©åº¦", |
| | | key="current_screw_temperature", |
| | | value=79.9, |
| | | min_value=0.0, |
| | | max_value=300.0, |
| | | step=1.0, |
| | | help="è¾å
¥å½åèºææ¸©åº¦" |
| | | ) |
| | | st.caption("åä½: °C") |
| | | |
| | | current_rear_barrel_temperature = st.number_input( |
| | | "åæºçæ¸©åº¦", |
| | | key="current_rear_barrel_temperature", |
| | | value=79.9, |
| | | min_value=0.0, |
| | | max_value=300.0, |
| | | step=1.0, |
| | | help="è¾å
¥å½ååæºçæ¸©åº¦" |
| | | ) |
| | | st.caption("åä½: °C") |
| | | |
| | | with param_cols[2]: |
| | | current_front_barrel_temperature = st.number_input( |
| | | "åæºçæ¸©åº¦", |
| | | key="current_front_barrel_temperature", |
| | | value=80.1, |
| | | min_value=0.0, |
| | | max_value=300.0, |
| | | step=1.0, |
| | | help="è¾å
¥å½ååæºçæ¸©åº¦" |
| | | ) |
| | | st.caption("åä½: °C") |
| | | |
| | | current_head_temperature = st.number_input( |
| | | "æºå¤´æ¸©åº¦", |
| | | key="current_head_temperature", |
| | | value=95.1, |
| | | min_value=0.0, |
| | | max_value=300.0, |
| | | step=1.0, |
| | | help="è¾å
¥å½åæºå¤´æ¸©åº¦" |
| | | ) |
| | | st.caption("åä½: °C") |
| | | |
| | | # 2.3 å½åå®é
ç±³éæµéå¼è¾å
¥ |
| | | with st.expander("ð å½åå®é
ç±³é", expanded=True): |
| | | actual_weight = st.number_input( |
| | | "å½åå®é
ç±³é", |
| | | key="actual_weight", |
| | | value=5.115, |
| | | min_value=0.01, |
| | | max_value=10.0, |
| | | step=0.0001, |
| | | format="%.4f", |
| | | help="è¾å
¥å½åå®é
æµéçç±³éå¼" |
| | | ) |
| | | st.caption("åä½: Kg/m") |
| | | |
| | | # 3. 计ç®è°è建议 |
| | | st.subheader("ð 计ç®è°è建议") |
| | | |
| | | # æ·»å è¿ä»£è°æ´é项 |
| | | use_iterative_adjustment = st.checkbox("ð 使ç¨è¿ä»£è°æ´", value=False, |
| | | help="å¯ç¨è¿ä»£è°æ´ï¼èªå¨ä¼ååæ°ç´å°é¢æµç±³é满足åå·®è¦æ±") |
| | | |
| | | # è¿ä»£è°æ´åæ°è®¾ç½® |
| | | max_iterations = 5 |
| | | tolerance = 0.5 |
| | | |
| | | if use_iterative_adjustment: |
| | | st.write("### è¿ä»£è°æ´åæ°è®¾ç½®") |
| | | iter_cols = st.columns(2) |
| | | max_iterations = iter_cols[0].number_input("æå¤§è¿ä»£æ¬¡æ°", min_value=1, max_value=20, value=5, step=1) |
| | | tolerance = iter_cols[1].number_input("å
许åå·®ç¾åæ¯(%)", min_value=0.1, max_value=5.0, value=0.5, step=0.1) |
| | | |
| | | if st.button("ð 计ç®è°è建议", key="calculate_adjustment"): |
| | | # åæ°éªè¯ |
| | | validation_errors = [] |
| | | |
| | | if standard_weight <= 0: |
| | | validation_errors.append("æ åç±³éå¿
须大äº0") |
| | | |
| | | if upper_limit <= standard_weight: |
| | | validation_errors.append("ç±³éä¸éå¿
é¡»å¤§äºæ åç±³é") |
| | | |
| | | if lower_limit >= standard_weight: |
| | | validation_errors.append("ç±³éä¸éå¿
é¡»å°äºæ åç±³é") |
| | | |
| | | if current_screw_speed <= 0: |
| | | validation_errors.append("èºæè½¬éå¿
须大äº0") |
| | | |
| | | if current_process_speed <= 0: |
| | | validation_errors.append("æµç¨ä¸»éå¿
须大äº0") |
| | | |
| | | if actual_weight <= 0: |
| | | validation_errors.append("å®é
ç±³éå¿
须大äº0") |
| | | |
| | | if validation_errors: |
| | | st.error("åæ°è¾å
¥é误ï¼") |
| | | for error in validation_errors: |
| | | st.error(f"- {error}") |
| | | else: |
| | | with st.spinner("æ£å¨è®¡ç®è°è建议..."): |
| | | # åå§ååæ°è°èå»ºè®®å¨ |
| | | adjustment_advisor = ParameterAdjustmentAdvisor() |
| | | |
| | | # åå¤åå§åæ° |
| | | initial_params = { |
| | | 'real_time_weight': actual_weight, |
| | | 'standard_weight': standard_weight, |
| | | 'upper_limit': upper_limit, |
| | | 'lower_limit': lower_limit, |
| | | 'current_screw_speed': current_screw_speed, |
| | | 'current_process_speed': current_process_speed, |
| | | 'current_screw_temperature': current_screw_temperature, |
| | | 'current_rear_barrel_temperature': current_rear_barrel_temperature, |
| | | 'current_front_barrel_temperature': current_front_barrel_temperature, |
| | | 'current_head_temperature': current_head_temperature, |
| | | 'current_head_pressure': current_head_pressure |
| | | } |
| | | |
| | | # æ ¹æ®æ¯å¦å¯ç¨è¿ä»£è°æ´æ§è¡ä¸åé»è¾ |
| | | if use_iterative_adjustment and 'selected_model' in st.session_state: |
| | | # 使ç¨è¿ä»£è°æ´ |
| | | iterative_result = adjustment_advisor.iterative_adjustment( |
| | | initial_params=initial_params, |
| | | model_info=st.session_state['selected_model'], |
| | | max_iterations=max_iterations, |
| | | tolerance=tolerance |
| | | ) |
| | | |
| | | # 使ç¨è¿ä»£è°æ´çæç»ç»æ |
| | | adjustment_result = iterative_result['final_result'] |
| | | iteration_history = iterative_result['iteration_history'] |
| | | converged = iterative_result['converged'] |
| | | total_iterations = iterative_result['total_iterations'] |
| | | else: |
| | | # æ£å¸¸è®¡ç®è°æ´å»ºè®® |
| | | adjustment_result = adjustment_advisor.calculate_adjustment( |
| | | real_time_weight=actual_weight, |
| | | standard_weight=standard_weight, |
| | | upper_limit=upper_limit, |
| | | lower_limit=lower_limit, |
| | | current_screw_speed=current_screw_speed, |
| | | current_process_speed=current_process_speed, |
| | | current_screw_temperature=current_screw_temperature, |
| | | current_rear_barrel_temperature=current_rear_barrel_temperature, |
| | | current_front_barrel_temperature=current_front_barrel_temperature, |
| | | current_head_temperature=current_head_temperature |
| | | ) |
| | | |
| | | # 使ç¨éä¸ç模å颿µè°æ´åçç±³é |
| | | predicted_weight = None |
| | | if 'selected_model' in st.session_state: |
| | | selected_model_info = st.session_state['selected_model'] |
| | | predicted_weight = adjustment_advisor.predict_weight( |
| | | model_info=selected_model_info, |
| | | screw_speed=adjustment_result['new_screw_speed'], |
| | | head_pressure=current_head_pressure, |
| | | process_speed=adjustment_result['new_process_speed'], |
| | | screw_temperature=current_screw_temperature, |
| | | rear_barrel_temperature=current_rear_barrel_temperature, |
| | | front_barrel_temperature=current_front_barrel_temperature, |
| | | head_temperature=current_head_temperature |
| | | ) |
| | | |
| | | # å°é¢æµç»ææ·»å å°è°æ´ç»æä¸ |
| | | adjustment_result['predicted_weight'] = predicted_weight |
| | | |
| | | # åå§åè¿ä»£åå²ï¼å¦ææªä½¿ç¨è¿ä»£è°æ´ï¼ |
| | | iteration_history = None |
| | | converged = None |
| | | total_iterations = None |
| | | |
| | | # ä¿åå°åå²è®°å½ |
| | | history_record = { |
| | | 'timestamp': datetime.now(), |
| | | 'model_file': st.session_state.get('selected_model_file', 'æªç¥æ¨¡å'), |
| | | 'standard_weight': standard_weight, |
| | | 'upper_limit': upper_limit, |
| | | 'lower_limit': lower_limit, |
| | | 'actual_weight': actual_weight, |
| | | 'current_screw_speed': current_screw_speed, |
| | | 'current_process_speed': current_process_speed, |
| | | 'current_screw_temperature': current_screw_temperature, |
| | | 'current_rear_barrel_temperature': current_rear_barrel_temperature, |
| | | 'current_front_barrel_temperature': current_front_barrel_temperature, |
| | | 'current_head_temperature': current_head_temperature, |
| | | 'adjustment_result': adjustment_result, |
| | | 'use_iterative_adjustment': use_iterative_adjustment, |
| | | 'iteration_history': iteration_history |
| | | } |
| | | |
| | | # æ·»å å°ä¼è¯ç¶æçåå²è®°å½ |
| | | if 'adjustment_history' not in st.session_state: |
| | | st.session_state['adjustment_history'] = [] |
| | | |
| | | st.session_state['adjustment_history'].append(history_record) |
| | | |
| | | # éå¶åå²è®°å½æ°é |
| | | if len(st.session_state['adjustment_history']) > 100: |
| | | st.session_state['adjustment_history'] = st.session_state['adjustment_history'][-100:] |
| | | |
| | | # 4. ç»æå±ç¤º |
| | | st.success("è°è建议计ç®å®æï¼") |
| | | |
| | | st.subheader("ð è°èå»ºè®®ç»æ") |
| | | |
| | | # 4.1 ç±³éç¶æ |
| | | if adjustment_result['status'] == "æ£å¸¸": |
| | | st.success(f"ç±³éç¶æ: {adjustment_result['status']}") |
| | | else: |
| | | st.warning(f"ç±³éç¶æ: {adjustment_result['status']}") |
| | | |
| | | # 4.2 åå·®ä¿¡æ¯ |
| | | info_cols = st.columns(3) |
| | | info_cols[0].metric("宿¶ç±³é", f"{adjustment_result['real_time_weight']:.4f} Kg/m") |
| | | info_cols[1].metric("æ åç±³é", f"{adjustment_result['standard_weight']:.4f} Kg/m") |
| | | info_cols[2].metric("åå·®ç¾åæ¯", f"{adjustment_result['deviation_percentage']:.2f}%") |
| | | |
| | | # 4.2.1 模å颿µç»æ |
| | | if adjustment_result['predicted_weight'] is not None: |
| | | st.markdown("### ð æ¨¡å颿µç»æ") |
| | | pred_cols = st.columns(3) |
| | | pred_cols[0].metric("è°æ´å颿µç±³é", f"{adjustment_result['predicted_weight']:.4f} Kg/m") |
| | | |
| | | # 计ç®é¢æµåå·® |
| | | predicted_deviation = adjustment_result['predicted_weight'] - adjustment_result['standard_weight'] |
| | | predicted_deviation_percent = (predicted_deviation / adjustment_result['standard_weight']) * 100 |
| | | pred_cols[1].metric("颿µåå·®", f"{predicted_deviation:.4f} Kg/m") |
| | | pred_cols[2].metric("颿µåå·®ç¾åæ¯", f"{predicted_deviation_percent:.2f}%") |
| | | |
| | | # æ¾ç¤ºé¢æµææ |
| | | if abs(predicted_deviation_percent) < 0.5: |
| | | st.success("è°æ´åç±³é颿µå¼æ¥è¿æ åå¼ï¼è°æ´ææè¯å¥½ï¼") |
| | | elif abs(predicted_deviation_percent) < 1.0: |
| | | st.info("è°æ´åç±³é颿µå¼å¨å¯æ¥åèå´å
ã") |
| | | else: |
| | | st.warning("è°æ´åç±³é颿µå¼ä»æè¾å¤§åå·®ï¼å»ºè®®è¿ä¸æ¥å¾®è°ã") |
| | | else: |
| | | st.warning("模å颿µå¤±è´¥ï¼è¯·æ£æ¥æ¨¡åæä»¶æåæ°ã") |
| | | |
| | | # 4.3 å
³é®è°æ´å»ºè®® |
| | | st.markdown("### ð å
³é®è°æ´å»ºè®®") |
| | | st.info(adjustment_result['recommendation']) |
| | | |
| | | # 4.4 åæ°è°æ´å¯¹æ¯ |
| | | st.markdown("### ð åæ°è°æ´å¯¹æ¯") |
| | | |
| | | param_compare_df = pd.DataFrame({ |
| | | 'åæ°åç§°': ['èºæè½¬é', 'æµç¨ä¸»é'], |
| | | 'å½åå¼': [adjustment_result['current_screw_speed'], adjustment_result['current_process_speed']], |
| | | '建议å¼': [adjustment_result['new_screw_speed'], adjustment_result['new_process_speed']], |
| | | 'è°æ´å¹
度': [f"{adjustment_result['screw_speed_adjust_percent']:.2f}%", |
| | | f"{adjustment_result['process_speed_adjust_percent']:.2f}%"] |
| | | }) |
| | | |
| | | # é«äº®æ¾ç¤ºè°æ´å¹
度 |
| | | def highlight_adjustment(val): |
| | | if isinstance(val, str) and '%' in val: |
| | | try: |
| | | percent = float(val.strip('%')) |
| | | if percent > 0: |
| | | return 'background-color: #90EE90' # 绿è²è¡¨ç¤ºå¢å |
| | | elif percent < 0: |
| | | return 'background-color: #FFB6C1' # 红è²è¡¨ç¤ºåå° |
| | | except: |
| | | pass |
| | | return '' |
| | | |
| | | styled_df = param_compare_df.style.applymap(highlight_adjustment, subset=['è°æ´å¹
度']) |
| | | st.dataframe(styled_df, use_container_width=True, hide_index=True) |
| | | |
| | | # 4.5 å¯è§åå¯¹æ¯ |
| | | fig = go.Figure() |
| | | fig.add_trace(go.Bar( |
| | | x=param_compare_df['åæ°åç§°'], |
| | | y=param_compare_df['å½åå¼'], |
| | | name='å½åå¼', |
| | | marker_color='blue' |
| | | )) |
| | | fig.add_trace(go.Bar( |
| | | x=param_compare_df['åæ°åç§°'], |
| | | y=param_compare_df['建议å¼'], |
| | | name='建议å¼', |
| | | marker_color='green' |
| | | )) |
| | | |
| | | fig.update_layout( |
| | | barmode='group', |
| | | title='åæ°è°æ´å¯¹æ¯', |
| | | yaxis_title='æ°å¼', |
| | | height=400 |
| | | ) |
| | | |
| | | st.plotly_chart(fig, use_container_width=True) |
| | | |
| | | # 4.6 è¿ä»£è°æ´ç»æå±ç¤º |
| | | if use_iterative_adjustment and iteration_history: |
| | | st.markdown("### ð è¿ä»£è°æ´åå²") |
| | | |
| | | # æ¾ç¤ºè¿ä»£è°æ´ç¶æ |
| | | if converged: |
| | | st.success(f"â
è¿ä»£è°æ´æåæ¶æï¼ç»è¿ {total_iterations} 次è¿ä»£ï¼é¢æµç±³éåå·®è¾¾å° {tolerance}% 以å
ã") |
| | | else: |
| | | st.warning(f"â ï¸ è¿ä»£è°æ´æªæ¶æï¼ç»è¿ {total_iterations} 次è¿ä»£ï¼é¢æµç±³éå差仿ªè¾¾å° {tolerance}% 以å
ã") |
| | | |
| | | # æ¾ç¤ºè¿ä»£åå²è¡¨æ ¼ |
| | | iter_history_df = pd.DataFrame(iteration_history) |
| | | iter_history_df = iter_history_df[[ |
| | | 'iteration', 'current_screw_speed', 'current_process_speed', |
| | | 'adjusted_screw_speed', 'adjusted_process_speed', |
| | | 'predicted_weight', 'predicted_deviation_percent' |
| | | ]] |
| | | |
| | | # æ ¼å¼åè¡¨æ ¼ |
| | | iter_history_df = iter_history_df.rename(columns={ |
| | | 'iteration': 'è¿ä»£æ¬¡æ°', |
| | | 'current_screw_speed': 'è°æ´åèºæè½¬é', |
| | | 'current_process_speed': 'è°æ´åæµç¨ä¸»é', |
| | | 'adjusted_screw_speed': 'è°æ´åèºæè½¬é', |
| | | 'adjusted_process_speed': 'è°æ´åæµç¨ä¸»é', |
| | | 'predicted_weight': '颿µç±³é', |
| | | 'predicted_deviation_percent': '颿µåå·®ç¾åæ¯(%)' |
| | | }) |
| | | |
| | | st.dataframe(iter_history_df, use_container_width=True) |
| | | |
| | | # è¿ä»£è°æ´å¯è§å |
| | | st.markdown("### ð è¿ä»£è°æ´ææ") |
| | | |
| | | # åå·®ååè¶å¿å¾ |
| | | fig_deviation = go.Figure() |
| | | fig_deviation.add_trace(go.Scatter( |
| | | x=iter_history_df['è¿ä»£æ¬¡æ°'], |
| | | y=iter_history_df['颿µåå·®ç¾åæ¯(%)'], |
| | | mode='lines+markers', |
| | | name='颿µåå·®ç¾åæ¯', |
| | | line=dict(color='blue', width=2), |
| | | marker=dict(size=8) |
| | | )) |
| | | |
| | | # æ·»å åå·®éå¼çº¿ |
| | | fig_deviation.add_trace(go.Scatter( |
| | | x=iter_history_df['è¿ä»£æ¬¡æ°'], |
| | | y=[tolerance] * len(iter_history_df), |
| | | mode='lines', |
| | | name='å
许åå·®ä¸é', |
| | | line=dict(color='red', dash='dash', width=1) |
| | | )) |
| | | |
| | | fig_deviation.add_trace(go.Scatter( |
| | | x=iter_history_df['è¿ä»£æ¬¡æ°'], |
| | | y=[-tolerance] * len(iter_history_df), |
| | | mode='lines', |
| | | name='å
许åå·®ä¸é', |
| | | line=dict(color='red', dash='dash', width=1) |
| | | )) |
| | | |
| | | fig_deviation.update_layout( |
| | | title='è¿ä»£è°æ´åå·®ååè¶å¿', |
| | | xaxis_title='è¿ä»£æ¬¡æ°', |
| | | yaxis_title='颿µåå·®ç¾åæ¯(%)', |
| | | height=400, |
| | | legend=dict(yanchor="top", y=0.99, xanchor="left", x=0.01) |
| | | ) |
| | | |
| | | st.plotly_chart(fig_deviation, use_container_width=True) |
| | | |
| | | # èºæè½¬éåæµç¨ä¸»éååè¶å¿ |
| | | fig_params = go.Figure() |
| | | fig_params.add_trace(go.Scatter( |
| | | x=iter_history_df['è¿ä»£æ¬¡æ°'], |
| | | y=iter_history_df['è°æ´åèºæè½¬é'], |
| | | mode='lines+markers', |
| | | name='è°æ´åèºæè½¬é', |
| | | line=dict(color='blue', width=2), |
| | | marker=dict(size=8) |
| | | )) |
| | | |
| | | fig_params.add_trace(go.Scatter( |
| | | x=iter_history_df['è¿ä»£æ¬¡æ°'], |
| | | y=iter_history_df['è°æ´åèºæè½¬é'], |
| | | mode='lines+markers', |
| | | name='è°æ´åèºæè½¬é', |
| | | line=dict(color='green', width=2), |
| | | marker=dict(size=8) |
| | | )) |
| | | |
| | | fig_params.add_trace(go.Scatter( |
| | | x=iter_history_df['è¿ä»£æ¬¡æ°'], |
| | | y=iter_history_df['è°æ´åæµç¨ä¸»é'], |
| | | mode='lines+markers', |
| | | name='è°æ´åæµç¨ä¸»é', |
| | | line=dict(color='orange', width=2), |
| | | marker=dict(size=8) |
| | | )) |
| | | |
| | | fig_params.add_trace(go.Scatter( |
| | | x=iter_history_df['è¿ä»£æ¬¡æ°'], |
| | | y=iter_history_df['è°æ´åæµç¨ä¸»é'], |
| | | mode='lines+markers', |
| | | name='è°æ´åæµç¨ä¸»é', |
| | | line=dict(color='purple', width=2), |
| | | marker=dict(size=8) |
| | | )) |
| | | |
| | | fig_params.update_layout( |
| | | title='åæ°è°æ´ååè¶å¿', |
| | | xaxis_title='è¿ä»£æ¬¡æ°', |
| | | yaxis_title='æ°å¼', |
| | | height=400, |
| | | legend=dict(yanchor="top", y=0.99, xanchor="right", x=0.99) |
| | | ) |
| | | |
| | | st.plotly_chart(fig_params, use_container_width=True) |
| | | |
| | | # 5. åå²è®°å½å±ç¤º |
| | | st.subheader("ð åå²è°èè®°å½") |
| | | |
| | | if 'adjustment_history' in st.session_state and st.session_state['adjustment_history']: |
| | | # æ¾ç¤ºåå²è®°å½æ°é |
| | | st.write(f"å
± {len(st.session_state['adjustment_history'])} æ¡åå²è®°å½") |
| | | |
| | | # å页æ¾ç¤º |
| | | page_size = 10 |
| | | total_pages = (len(st.session_state['adjustment_history']) + page_size - 1) // page_size |
| | | |
| | | page = st.selectbox( |
| | | "鿩页ç ", |
| | | options=range(1, total_pages + 1), |
| | | key="history_page" |
| | | ) |
| | | |
| | | start_idx = (page - 1) * page_size |
| | | end_idx = start_idx + page_size |
| | | paginated_history = st.session_state['adjustment_history'][start_idx:end_idx] |
| | | |
| | | # ååæ¾ç¤ºï¼ææ°è®°å½å¨åé¢ |
| | | for record in reversed(paginated_history): |
| | | with st.expander(f"è®°å½æ¶é´: {record['timestamp'].strftime('%Y-%m-%d %H:%M:%S')} | 模å: {record['model_file']}"): |
| | | history_cols = st.columns(3) |
| | | |
| | | with history_cols[0]: |
| | | st.write("**ç±³éåæ°**") |
| | | st.write(f"- æ åç±³é: {record['standard_weight']:.4f} Kg/m") |
| | | st.write(f"- ç±³éä¸é: {record['upper_limit']:.4f} Kg/m") |
| | | st.write(f"- ç±³éä¸é: {record['lower_limit']:.4f} Kg/m") |
| | | st.write(f"- å®é
ç±³é: {record['actual_weight']:.4f} Kg/m") |
| | | |
| | | with history_cols[1]: |
| | | st.write("**éåº¦åæ°**") |
| | | st.write(f"- èºæè½¬é: {record['current_screw_speed']:.1f} rpm") |
| | | st.write(f"- æµç¨ä¸»é: {record['current_process_speed']:.1f} m/min") |
| | | |
| | | with history_cols[2]: |
| | | st.write("**æ¸©åº¦åæ°**") |
| | | st.write(f"- èºææ¸©åº¦: {record['current_screw_temperature']:.1f} °C") |
| | | st.write(f"- åæºçæ¸©åº¦: {record['current_rear_barrel_temperature']:.1f} °C") |
| | | st.write(f"- åæºçæ¸©åº¦: {record['current_front_barrel_temperature']:.1f} °C") |
| | | st.write(f"- æºå¤´æ¸©åº¦: {record['current_head_temperature']:.1f} °C") |
| | | |
| | | st.write("**è°æ´å»ºè®®**") |
| | | st.write(record['adjustment_result']['recommendation']) |
| | | else: |
| | | st.info("ææ åå²è°èè®°å½") |
| | | |
| | | # 6. 帮å©è¯´æ |
| | | with st.expander("â 帮å©è¯´æ"): |
| | | st.markdown(""" |
| | | ### åè½è¯´æ |
| | | æ¬åè½æ¨¡åç¨äºæ ¹æ®å½åç±³éæµéå¼åæ¤åºæºåæ°ï¼ä¸ºç¨æ·æä¾ç§å¦åççåæ°è°æ´å»ºè®®ï¼ä»¥å¸®å©ç¨æ·å°ç±³éæ§å¶å¨æ åèå´å
ã |
| | | |
| | | ### 模åéæ© |
| | | - ç³»ç»ä¼èªå¨è¯»å项ç®ç®å½ä¸è®ç»å¥½ç模åæä»¶ |
| | | - 模åæä»¶é符åç³»ç»è¦æ±çæ ¼å¼ï¼å
嫿¨¡ååæ°åè®ç»ä¿¡æ¯ |
| | | - å»ºè®®éæ©R²å¾åè¾é«ã误差è¾å°ç模å |
| | | |
| | | ### åæ°è¾å
¥ |
| | | - ç±³éæ åå¼ï¼æ¨ææçç®æ ç±³éå¼ |
| | | - ç±³éä¸ä¸éï¼å
许çç±³éæ³¢å¨èå´ |
| | | - æ¤åºæºå½ååæ°ï¼å
æ¬èºæè½¬éãæµç¨ä¸»éãæºå¤´åååæ¤åºæºçµæµ |
| | | - å½åå®é
ç±³éï¼å®é
æµéå¾å°çç±³éå¼ |
| | | |
| | | ### ç»æè§£è¯» |
| | | - ç±³éç¶æï¼æ¾ç¤ºå½åç±³éæ¯å¦å¨å
许èå´å
|
| | | - åå·®ç¾åæ¯ï¼å½åç±³é䏿 åç±³éçåå·®ç¾åæ¯ |
| | | - å
³é®è°æ´å»ºè®®ï¼ç³»ç»ç»åºç主è¦è°æ´å»ºè®® |
| | | - åæ°è°æ´å¯¹æ¯ï¼è¯¦ç»å±ç¤ºæ¯ä¸ªåæ°çå½åå¼ã建议å¼åè°æ´å¹
度 |
| | | |
| | | ### 注æäºé¡¹ |
| | | 1. 请确ä¿è¾å
¥çåæ°å¼åç¡®åæ è®¾å¤å½åç¶æ |
| | | 2. è°æ´å»ºè®®ä»
ä¾åèï¼å®é
æä½æ¶è¯·ç»åç°åºç»éª |
| | | 3. 建议å¨è°æ´åæ°åå¯åè§å¯ç±³éåå |
| | | 4. å®ææ´æ°æ¨¡å以æé«å»ºè®®çåç¡®æ§ |
| | | """) |
| | | |
| | | # 页é¢å
¥å£ |
| | | if __name__ == "__main__": |
| | | show_extruder_parameter_adjustment() |
| | |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | import joblib |
| | | import os |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | |
| | | from sklearn.svm import SVR |
| | | from sklearn.neural_network import MLPRegressor |
| | | |
| | | # 导å
¥ç¨³æè¯å«åè½ |
| | | class SteadyStateDetector: |
| | | def __init__(self): |
| | | pass |
| | | |
| | | def detect_steady_state(self, df, weight_col='ç±³é', window_size=20, std_threshold=0.5, duration_threshold=60): |
| | | """ |
| | | 稳æè¯å«é»è¾ï¼æ è®°ç±³éæ°æ®ä¸çç¨³ææ®µ |
| | | :param df: å
å«ç±³éæ°æ®çæ°æ®æ¡ |
| | | :param weight_col: ç±³éåå |
| | | :param window_size: æ»å¨çªå£å¤§å°ï¼ç§ï¼ |
| | | :param std_threshold: æ åå·®éå¼ |
| | | :param duration_threshold: 稳ææç»æ¶é´éå¼ï¼ç§ï¼ |
| | | :return: å
å«ç¨³ææ è®°çæ°æ®æ¡å稳æä¿¡æ¯ |
| | | """ |
| | | if df is None or df.empty: |
| | | return df, [] |
| | | |
| | | # ç¡®ä¿æ¶é´åæ¯datetimeç±»å |
| | | df['time'] = pd.to_datetime(df['time']) |
| | | |
| | | # è®¡ç®æ»å¨ç»è®¡é |
| | | df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std() |
| | | df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean() |
| | | |
| | | # è®¡ç®æ³¢å¨èå´ |
| | | df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100 |
| | | df['fluctuation_range'] = df['fluctuation_range'].fillna(0) |
| | | |
| | | # æ 记稳æç¹ |
| | | df['is_steady'] = 0 |
| | | steady_condition = ( |
| | | (df['fluctuation_range'] < std_threshold) & |
| | | (df[weight_col] >= 0.1) |
| | | ) |
| | | df.loc[steady_condition, 'is_steady'] = 1 |
| | | |
| | | # è¯å«è¿ç»ç¨³ææ®µ |
| | | steady_segments = [] |
| | | current_segment = {} |
| | | |
| | | for i, row in df.iterrows(): |
| | | if row['is_steady'] == 1: |
| | | if not current_segment: |
| | | current_segment = { |
| | | 'start_time': row['time'], |
| | | 'start_idx': i, |
| | | 'weights': [row[weight_col]] |
| | | } |
| | | else: |
| | | current_segment['weights'].append(row[weight_col]) |
| | | else: |
| | | if current_segment: |
| | | current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time'] |
| | | current_segment['end_idx'] = i-1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | # 计ç®ç½®ä¿¡åº¦ |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | current_segment = {} |
| | | |
| | | # å¤çæåä¸ä¸ªç¨³ææ®µ |
| | | if current_segment: |
| | | current_segment['end_time'] = df['time'].iloc[-1] |
| | | current_segment['end_idx'] = len(df) - 1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | # 卿°æ®æ¡ä¸æ è®°å®æ´çç¨³ææ®µ |
| | | for segment in steady_segments: |
| | | df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1 |
| | | |
| | | return df, steady_segments |
| | | |
| | | def show_metered_weight_advanced(): |
| | | # åå§åæå¡ |
| | |
| | | st.session_state['ma_model_type'] = 'RandomForest' |
| | | if 'ma_sequence_length' not in st.session_state: |
| | | st.session_state['ma_sequence_length'] = 10 |
| | | if 'ma_use_steady_data' not in st.session_state: |
| | | st.session_state['ma_use_steady_data'] = True |
| | | if 'ma_steady_window' not in st.session_state: |
| | | st.session_state['ma_steady_window'] = 20 |
| | | if 'ma_steady_threshold' not in st.session_state: |
| | | st.session_state['ma_steady_threshold'] = 0.5 |
| | | |
| | | # é»è®¤ç¹å¾å表ï¼ä¸åå
è®¸ç¨æ·éæ©ï¼ |
| | | default_features = ['èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | |
| | | options=model_options, |
| | | key="ma_model_type", |
| | | help="éæ©ç¨äºé¢æµç模åç±»å" |
| | | ) |
| | | |
| | | # 稳æè¯å«é
ç½® |
| | | st.markdown("---") |
| | | steady_cols = st.columns(3) |
| | | with steady_cols[0]: |
| | | st.write("âï¸ **稳æè¯å«é
ç½®**") |
| | | st.checkbox( |
| | | "ä»
使ç¨ç¨³ææ°æ®è¿è¡è®ç»", |
| | | value=st.session_state['ma_use_steady_data'], |
| | | key="ma_use_steady_data", |
| | | help="å¯ç¨åï¼åªä½¿ç¨ç±³éç¨³ææ¶æ®µçæ°æ®è¿è¡æ¨¡åè®ç»" |
| | | ) |
| | | |
| | | with steady_cols[1]: |
| | | st.write("ð **稳æåæ°**") |
| | | st.slider( |
| | | "æ»å¨çªå£å¤§å° (ç§)", |
| | | min_value=5, |
| | | max_value=60, |
| | | value=st.session_state['ma_steady_window'], |
| | | step=5, |
| | | key="ma_steady_window", |
| | | help="ç¨äºç¨³æè¯å«çæ»å¨çªå£å¤§å°" |
| | | ) |
| | | |
| | | with steady_cols[2]: |
| | | st.write("ð **稳æéå¼**") |
| | | st.slider( |
| | | "æ³¢å¨éå¼ (%)", |
| | | min_value=0.1, |
| | | max_value=2.0, |
| | | value=st.session_state['ma_steady_threshold'], |
| | | step=0.1, |
| | | key="ma_steady_threshold", |
| | | help="稳æè¯å«çæ³¢å¨èå´éå¼" |
| | | ) |
| | | |
| | | |
| | |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # 稳æè¯å« |
| | | steady_detector = SteadyStateDetector() |
| | | |
| | | # è·å稳æè¯å«åæ° |
| | | use_steady_data = st.session_state.get('ma_use_steady_data', True) |
| | | steady_window = st.session_state.get('ma_steady_window', 20) |
| | | steady_threshold = st.session_state.get('ma_steady_threshold', 0.5) |
| | | |
| | | # æ§è¡ç¨³æè¯å« |
| | | df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state( |
| | | df_analysis, |
| | | weight_col='ç±³é', |
| | | window_size=steady_window, |
| | | std_threshold=steady_threshold |
| | | ) |
| | | |
| | | # æ´æ°df_analysis为å
å«ç¨³ææ è®°çæ°æ® |
| | | df_analysis = df_analysis_with_steady |
| | | |
| | | # ç¨³ææ°æ®å¯è§å |
| | | st.subheader("ð ç¨³ææ°æ®åå¸") |
| | | |
| | | # åå»ºç¨³ææ°æ®å¯è§åå¾è¡¨ |
| | | fig_steady = go.Figure() |
| | | |
| | | # æ·»å åå§ç±³éæ²çº¿ |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=df_analysis['time'], |
| | | y=df_analysis['ç±³é'], |
| | | name='åå§ç±³é', |
| | | mode='lines', |
| | | line=dict(color='lightgray', width=1) |
| | | )) |
| | | |
| | | # æ·»å ç¨³ææ°æ®ç¹ |
| | | steady_data_points = df_analysis[df_analysis['is_steady'] == 1] |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=steady_data_points['time'], |
| | | y=steady_data_points['ç±³é'], |
| | | name='稳æç±³é', |
| | | mode='markers', |
| | | marker=dict(color='green', size=3, opacity=0.6) |
| | | )) |
| | | |
| | | # æ·»å éç¨³ææ°æ®ç¹ |
| | | non_steady_data_points = df_analysis[df_analysis['is_steady'] == 0] |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=non_steady_data_points['time'], |
| | | y=non_steady_data_points['ç±³é'], |
| | | name='é稳æç±³é', |
| | | mode='markers', |
| | | marker=dict(color='red', size=3, opacity=0.6) |
| | | )) |
| | | |
| | | # é
ç½®å¾è¡¨å¸å± |
| | | fig_steady.update_layout( |
| | | title="ç±³éæ°æ®ç¨³æåå¸", |
| | | xaxis=dict(title="æ¶é´"), |
| | | yaxis=dict(title="ç±³é (Kg/m)"), |
| | | legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1), |
| | | height=500 |
| | | ) |
| | | |
| | | # æ¾ç¤ºå¾è¡¨ |
| | | st.plotly_chart(fig_steady, use_container_width=True) |
| | | |
| | | # æ¾ç¤ºç¨³æç»è®¡ |
| | | total_data = len(df_analysis) |
| | | steady_data = len(df_analysis[df_analysis['is_steady'] == 1]) |
| | | steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0 |
| | | |
| | | stats_cols = st.columns(3) |
| | | stats_cols[0].metric("æ»æ°æ®é", total_data) |
| | | stats_cols[1].metric("ç¨³ææ°æ®é", steady_data) |
| | | stats_cols[2].metric("ç¨³ææ°æ®æ¯ä¾", f"{steady_ratio:.1f}%") |
| | | |
| | | # --- åå§æ°æ®è¶å¿å¾ --- |
| | | st.subheader("ð åå§æ°æ®è¶å¿å¾") |
| | |
| | | else: |
| | | try: |
| | | # å夿°æ® |
| | | # æ ¹æ®é
ç½®å³å®æ¯å¦åªä½¿ç¨ç¨³ææ°æ® |
| | | use_steady_data = st.session_state.get('ma_use_steady_data', True) |
| | | if use_steady_data: |
| | | df_filtered = df_analysis[df_analysis['is_steady'] == 1] |
| | | st.info(f"å·²è¿æ»¤éç¨³ææ°æ®ï¼ä½¿ç¨ {len(df_filtered)} æ¡ç¨³ææ°æ®è¿è¡è®ç»") |
| | | else: |
| | | df_filtered = df_analysis.copy() |
| | | |
| | | # é¦å
ç¡®ä¿df_analysis䏿²¡æNaNå¼ |
| | | df_analysis_clean = df_analysis.dropna(subset=default_features + ['ç±³é']) |
| | | df_analysis_clean = df_filtered.dropna(subset=default_features + ['ç±³é']) |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(df_analysis_clean) < 30: |
| | |
| | | ) |
| | | st.plotly_chart(fig_importance, width='stretch') |
| | | |
| | | # --- 颿µåè½ --- |
| | | st.subheader("ð® ç±³é颿µ") |
| | | # --- 模åä¿å --- |
| | | st.subheader("� 模åä¿å") |
| | | |
| | | # åå»ºé¢æµè¡¨å |
| | | st.write("è¾å
¥ç¹å¾å¼è¿è¡ç±³é颿µ:") |
| | | predict_cols = st.columns(2) |
| | | input_features = {} |
| | | # å建模åç®å½ï¼å¦æä¸åå¨ï¼ |
| | | model_dir = "saved_models" |
| | | os.makedirs(model_dir, exist_ok=True) |
| | | |
| | | for i, feature in enumerate(default_features): |
| | | with predict_cols[i % 2]: |
| | | # è·åç¹å¾çç»è®¡ä¿¡æ¯ |
| | | min_val = df_analysis_clean[feature].min() |
| | | max_val = df_analysis_clean[feature].max() |
| | | mean_val = df_analysis_clean[feature].mean() |
| | | # å夿¨¡åä¿¡æ¯ |
| | | model_info = { |
| | | 'model': model, |
| | | 'features': feature_columns, |
| | | 'scaler_X': scaler_X if model_type in ['SVR', 'MLP'] else None, |
| | | 'scaler_y': scaler_y if model_type in ['SVR', 'MLP'] else None, |
| | | 'model_type': model_type, |
| | | 'created_at': datetime.now(), |
| | | 'r2_score': r2, |
| | | 'mse': mse, |
| | | 'mae': mae, |
| | | 'rmse': rmse, |
| | | 'use_steady_data': use_steady_data |
| | | } |
| | | |
| | | input_features[feature] = st.number_input( |
| | | f"{feature}", |
| | | key=f"ma_pred_{feature}", |
| | | value=float(mean_val), |
| | | min_value=float(min_val), |
| | | max_value=float(max_val), |
| | | step=0.1 |
| | | ) |
| | | # çææ¨¡åæä»¶å |
| | | model_filename = f"advanced_{model_type.lower()}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.joblib" |
| | | model_path = os.path.join(model_dir, model_filename) |
| | | |
| | | if st.button("颿µç±³é"): |
| | | # åå¤é¢æµæ°æ® |
| | | input_df = pd.DataFrame([input_features]) |
| | | # ä¿å模å |
| | | joblib.dump(model_info, model_path) |
| | | |
| | | # åå¹¶ç¹å¾ |
| | | input_combined = pd.concat([input_df], axis=1) |
| | | |
| | | # 颿µ |
| | | if model_type in ['SVR', 'MLP']: |
| | | input_scaled = scaler_X.transform(input_combined) |
| | | prediction_scaled = model.predict(input_scaled) |
| | | predicted_weight = scaler_y.inverse_transform(prediction_scaled.reshape(-1, 1)).ravel()[0] |
| | | |
| | | else: |
| | | predicted_weight = model.predict(input_combined)[0] |
| | | |
| | | # æ¾ç¤ºé¢æµç»æ |
| | | st.success(f"颿µç±³é: {predicted_weight:.4f} Kg/m") |
| | | st.success(f"模åå·²æåä¿å: {model_filename}") |
| | | st.info(f"ä¿åè·¯å¾: {model_path}") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | import joblib |
| | | import os |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | from sklearn.preprocessing import StandardScaler, MinMaxScaler |
| | | from sklearn.model_selection import train_test_split |
| | | from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error |
| | | |
| | | # 导å
¥ç¨³æè¯å«åè½ |
| | | class SteadyStateDetector: |
| | | def __init__(self): |
| | | pass |
| | | |
| | | def detect_steady_state(self, df, weight_col='ç±³é', window_size=20, std_threshold=0.5, duration_threshold=60): |
| | | """ |
| | | 稳æè¯å«é»è¾ï¼æ è®°ç±³éæ°æ®ä¸çç¨³ææ®µ |
| | | :param df: å
å«ç±³éæ°æ®çæ°æ®æ¡ |
| | | :param weight_col: ç±³éåå |
| | | :param window_size: æ»å¨çªå£å¤§å°ï¼ç§ï¼ |
| | | :param std_threshold: æ åå·®éå¼ |
| | | :param duration_threshold: 稳ææç»æ¶é´éå¼ï¼ç§ï¼ |
| | | :param trend_threshold: è¶å¿éå¼ï¼ç»å¯¹å¼ï¼ |
| | | :return: å
å«ç¨³ææ è®°çæ°æ®æ¡å稳æä¿¡æ¯ |
| | | """ |
| | | if df is None or df.empty: |
| | | return df, [] |
| | | |
| | | # ç¡®ä¿æ¶é´åæ¯datetimeç±»å |
| | | df['time'] = pd.to_datetime(df['time']) |
| | | |
| | | # è®¡ç®æ»å¨ç»è®¡é |
| | | df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std() |
| | | df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean() |
| | | |
| | | # è®¡ç®æ³¢å¨èå´ |
| | | df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100 |
| | | df['fluctuation_range'] = df['fluctuation_range'].fillna(0) |
| | | |
| | | # 计ç®è¶å¿ |
| | | # df['trend'] = df[weight_col].diff().rolling(window=window_size, min_periods=5).mean() |
| | | # df['trend'] = df['trend'].fillna(0) |
| | | # df['trend_strength'] = (abs(df['trend']) / df['rolling_mean']) * 100 |
| | | # df['trend_strength'] = df['trend_strength'].fillna(0) |
| | | |
| | | # æ 记稳æç¹ |
| | | df['is_steady'] = 0 |
| | | steady_condition = ( |
| | | (df['fluctuation_range'] < std_threshold) & |
| | | (df[weight_col] >= 0.1) |
| | | ) |
| | | df.loc[steady_condition, 'is_steady'] = 1 |
| | | |
| | | # è¯å«è¿ç»ç¨³ææ®µ |
| | | steady_segments = [] |
| | | current_segment = {} |
| | | |
| | | for i, row in df.iterrows(): |
| | | if row['is_steady'] == 1: |
| | | if not current_segment: |
| | | current_segment = { |
| | | 'start_time': row['time'], |
| | | 'start_idx': i, |
| | | 'weights': [row[weight_col]] |
| | | } |
| | | else: |
| | | current_segment['weights'].append(row[weight_col]) |
| | | else: |
| | | if current_segment: |
| | | current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time'] |
| | | current_segment['end_idx'] = i-1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | # 计ç®ç½®ä¿¡åº¦ |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | current_segment = {} |
| | | |
| | | # å¤çæåä¸ä¸ªç¨³ææ®µ |
| | | if current_segment: |
| | | current_segment['end_time'] = df['time'].iloc[-1] |
| | | current_segment['end_idx'] = len(df) - 1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | # 卿°æ®æ¡ä¸æ è®°å®æ´çç¨³ææ®µ |
| | | for segment in steady_segments: |
| | | df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1 |
| | | |
| | | return df, steady_segments |
| | | |
| | | # å°è¯å¯¼å
¥æ·±åº¦å¦ä¹ åº |
| | | use_deep_learning = False |
| | | try: |
| | | import torch |
| | | import torch.nn as nn |
| | | import torch.optim as optim |
| | | use_deep_learning = True |
| | | # æ£æµGPUæ¯å¦å¯ç¨ |
| | | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') |
| | | print(f"使ç¨è®¾å¤: {device}") |
| | | |
| | | # PyTorch深度å¦ä¹ 模åå®ä¹ |
| | | class LSTMModel(nn.Module): |
| | | def __init__(self, input_dim, hidden_dim=64, num_layers=2): |
| | | super(LSTMModel, self).__init__() |
| | | self.lstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True) |
| | | self.fc1 = nn.Linear(hidden_dim, 32) |
| | | self.dropout = nn.Dropout(0.2) |
| | | self.fc2 = nn.Linear(32, 1) |
| | | |
| | | def forward(self, x): |
| | | out, _ = self.lstm(x) |
| | | out = out[:, -1, :] |
| | | out = torch.relu(self.fc1(out)) |
| | | out = self.dropout(out) |
| | | out = self.fc2(out) |
| | | return out |
| | | |
| | | class GRUModel(nn.Module): |
| | | def __init__(self, input_dim, hidden_dim=64, num_layers=2): |
| | | super(GRUModel, self).__init__() |
| | | self.gru = nn.GRU(input_dim, hidden_dim, num_layers, batch_first=True) |
| | | self.fc1 = nn.Linear(hidden_dim, 32) |
| | | self.dropout = nn.Dropout(0.2) |
| | | self.fc2 = nn.Linear(32, 1) |
| | | |
| | | def forward(self, x): |
| | | out, _ = self.gru(x) |
| | | out = out[:, -1, :] |
| | | out = torch.relu(self.fc1(out)) |
| | | out = self.dropout(out) |
| | | out = self.fc2(out) |
| | | return out |
| | | |
| | | class BiLSTMModel(nn.Module): |
| | | def __init__(self, input_dim, hidden_dim=64, num_layers=2): |
| | | super(BiLSTMModel, self).__init__() |
| | | self.bilstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True, bidirectional=True) |
| | | self.fc1 = nn.Linear(hidden_dim * 2, 32) |
| | | self.dropout = nn.Dropout(0.2) |
| | | self.fc2 = nn.Linear(32, 1) |
| | | |
| | | def forward(self, x): |
| | | out, _ = self.bilstm(x) |
| | | out = out[:, -1, :] |
| | | out = torch.relu(self.fc1(out)) |
| | | out = self.dropout(out) |
| | | out = self.fc2(out) |
| | | return out |
| | | |
| | | st.success(f"使ç¨è®¾å¤: {device}") |
| | | except ImportError: |
| | | st.warning("æªæ£æµå°PyTorchï¼æ·±åº¦å¦ä¹ 模åå°ä¸å¯ç¨ã请å®è£
pytorch以使ç¨LSTM/GRU模åã") |
| | | |
| | | def show_metered_weight_deep_learning(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³éæ·±åº¦å¦ä¹ 颿µ") |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'mdl_start_date' not in st.session_state: |
| | | st.session_state['mdl_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'mdl_end_date' not in st.session_state: |
| | | st.session_state['mdl_end_date'] = datetime.now().date() |
| | | if 'mdl_quick_select' not in st.session_state: |
| | | st.session_state['mdl_quick_select'] = "æè¿7天" |
| | | if 'mdl_model_type' not in st.session_state: |
| | | st.session_state['mdl_model_type'] = 'LSTM' |
| | | if 'mdl_sequence_length' not in st.session_state: |
| | | st.session_state['mdl_sequence_length'] = 10 |
| | | if 'mdl_time_offset' not in st.session_state: |
| | | st.session_state['mdl_time_offset'] = 0 |
| | | if 'mdl_product_variety' not in st.session_state: |
| | | st.session_state['mdl_product_variety'] = 'all' |
| | | if 'mdl_filter_transient' not in st.session_state: |
| | | st.session_state['mdl_filter_transient'] = True |
| | | |
| | | # é»è®¤ç¹å¾å表 |
| | | default_features = ['èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | | 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦'] |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['mdl_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['mdl_start_date'] = today |
| | | st.session_state['mdl_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['mdl_start_date'] = today - timedelta(days=3) |
| | | st.session_state['mdl_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['mdl_start_date'] = today - timedelta(days=7) |
| | | st.session_state['mdl_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['mdl_start_date'] = today - timedelta(days=30) |
| | | st.session_state['mdl_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['mdl_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['mdl_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_mdl_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mdl_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mdl_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="mdl_query", width='stretch') |
| | | |
| | | # é«çº§é
ç½® |
| | | st.markdown("---") |
| | | advanced_cols = st.columns(2) |
| | | |
| | | with advanced_cols[0]: |
| | | st.write("ð¤ **模åé
ç½®**") |
| | | # 模åç±»åéæ© |
| | | if use_deep_learning: |
| | | model_options = ['LSTM', 'GRU', 'BiLSTM'] |
| | | model_type = st.selectbox( |
| | | "模åç±»å", |
| | | options=model_options, |
| | | key="mdl_model_type", |
| | | help="éæ©ç¨äºé¢æµç深度å¦ä¹ 模åç±»å" |
| | | ) |
| | | |
| | | # åºåé¿åº¦ |
| | | sequence_length = st.slider( |
| | | "åºåé¿åº¦", |
| | | min_value=5, |
| | | max_value=30, |
| | | value=st.session_state['mdl_sequence_length'], |
| | | step=1, |
| | | help="ç¨äºæ·±åº¦å¦ä¹ 模åçæ¶é´åºåé¿åº¦", |
| | | key="mdl_sequence_length" |
| | | ) |
| | | else: |
| | | st.warning("æªæ£æµå°PyTorchï¼æ æ³ä½¿ç¨æ·±åº¦å¦ä¹ 模å") |
| | | |
| | | with advanced_cols[1]: |
| | | st.write("â±ï¸ **æ¶é´å»¶è¿é
ç½®**") |
| | | # 卿æ¶é´åç§»ï¼åºäºæµç¨ä¸»éï¼ |
| | | time_offset = st.slider( |
| | | "æ¤åºæ°æ®åååç§» (åé)", |
| | | min_value=0, |
| | | max_value=60, |
| | | value=st.session_state['mdl_time_offset'], |
| | | step=1, |
| | | help="ç±äºèé¢ä»æ¤åºå°ç§°ééè¦æ¶é´ï¼å°æ¤åºæºæ°æ®ååç§»å¨ï¼ä½¿å
¶ä¸ç±³éæ°æ®å¨æ¶é´è½´ä¸å¯¹é½ãåç§»éä¼å½±å颿µåç¡®æ§ã", |
| | | key="mdl_time_offset" |
| | | ) |
| | | |
| | | # 稳æè¯å«é
ç½® |
| | | st.markdown("---") |
| | | steady_cols = st.columns(3) |
| | | with steady_cols[0]: |
| | | st.write("âï¸ **稳æè¯å«é
ç½®**") |
| | | use_steady_data = st.checkbox( |
| | | "ä»
使ç¨ç¨³ææ°æ®è¿è¡è®ç»", |
| | | value=True, |
| | | key="mdl_use_steady_data", |
| | | help="å¯ç¨åï¼åªä½¿ç¨ç±³éç¨³ææ¶æ®µçæ°æ®è¿è¡æ¨¡åè®ç»å颿µ" |
| | | ) |
| | | |
| | | with steady_cols[1]: |
| | | st.write("ð **稳æåæ°**") |
| | | steady_window = st.slider( |
| | | "æ»å¨çªå£å¤§å° (ç§)", |
| | | min_value=5, |
| | | max_value=60, |
| | | value=20, |
| | | step=5, |
| | | key="mdl_steady_window", |
| | | help="ç¨äºç¨³æè¯å«çæ»å¨çªå£å¤§å°" |
| | | ) |
| | | |
| | | with steady_cols[2]: |
| | | st.write("ð **稳æéå¼**") |
| | | steady_threshold = st.slider( |
| | | "æ³¢å¨éå¼ (%)", |
| | | min_value=0.1, |
| | | max_value=2.0, |
| | | value=0.5, |
| | | step=0.1, |
| | | key="mdl_steady_threshold", |
| | | help="稳æè¯å«çæ³¢å¨èå´éå¼" |
| | | ) |
| | | |
| | | |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç |
| | | if query_button: |
| | | with st.spinner("æ£å¨è·åæ°æ®..."): |
| | | # 1. è·å宿´çæ¤åºæºæ°æ® |
| | | df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # ç¼åæ°æ®å°ä¼è¯ç¶æ |
| | | st.session_state['cached_extruder_full'] = df_extruder_full |
| | | st.session_state['cached_main_speed'] = df_main_speed |
| | | st.session_state['cached_temp'] = df_temp |
| | | st.session_state['last_query_start'] = start_dt |
| | | st.session_state['last_query_end'] = end_dt |
| | | |
| | | # æ°æ®å¤çååæ |
| | | if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']): |
| | | with st.spinner("æ£å¨åææ°æ®..."): |
| | | # è·åç¼åæ°æ® |
| | | df_extruder_full = st.session_state['cached_extruder_full'] |
| | | df_main_speed = st.session_state['cached_main_speed'] |
| | | df_temp = st.session_state['cached_temp'] |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # æ°æ®æ´åä¸é¢å¤ç |
| | | def integrate_data(df_extruder_full, df_main_speed, df_temp, time_offset): |
| | | # ç¡®ä¿æ¤åºæºæ°æ®åå¨ |
| | | if df_extruder_full is None or df_extruder_full.empty: |
| | | return None |
| | | |
| | | # åºç¨æ¶é´åç§» |
| | | offset_delta = timedelta(minutes=time_offset) |
| | | df_extruder_shifted = df_extruder_full.copy() |
| | | df_extruder_shifted['time'] = df_extruder_shifted['time'] + offset_delta |
| | | |
| | | # å建åªå
å«ç±³éåæ¶é´çä¸»æ°æ®é |
| | | df_merged = df_extruder_shifted[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy() |
| | | |
| | | # æ´å主æµç¨æ°æ® |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed_shifted = df_main_speed.copy() |
| | | df_main_speed_shifted['time'] = df_main_speed_shifted['time'] + offset_delta |
| | | |
| | | df_main_speed_shifted = df_main_speed_shifted[['time', 'process_main_speed']] |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_main_speed_shifted.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´åæ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | df_temp_shifted = df_temp.copy() |
| | | df_temp_shifted['time'] = df_temp_shifted['time'] + offset_delta |
| | | |
| | | temp_cols = ['time', 'nakata_extruder_screw_display_temp', |
| | | 'nakata_extruder_rear_barrel_display_temp', |
| | | 'nakata_extruder_front_barrel_display_temp', |
| | | 'nakata_extruder_head_display_temp'] |
| | | df_temp_subset = df_temp_shifted[temp_cols].copy() |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_temp_subset.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # éå½åå以æé«å¯è¯»æ§ |
| | | df_merged.rename(columns={ |
| | | 'screw_speed_actual': 'èºæè½¬é', |
| | | 'head_pressure': 'æºå¤´åå', |
| | | 'process_main_speed': 'æµç¨ä¸»é', |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¸©åº¦', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¸©åº¦' |
| | | }, inplace=True) |
| | | |
| | | # æ¸
çæ°æ® |
| | | df_merged.dropna(subset=['metered_weight'], inplace=True) |
| | | |
| | | return df_merged |
| | | |
| | | # æ§è¡æ°æ®æ´å |
| | | df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp, st.session_state['mdl_time_offset']) |
| | | |
| | | if df_analysis is None or df_analysis.empty: |
| | | st.warning("æ°æ®æ´å失败ï¼è¯·æ£æ¥æ°æ®è´¨éæè°æ´æ¶é´èå´ã") |
| | | return |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # 稳æè¯å« |
| | | steady_detector = SteadyStateDetector() |
| | | |
| | | # è·å稳æè¯å«åæ° |
| | | use_steady_data = st.session_state.get('mdl_use_steady_data', True) |
| | | steady_window = st.session_state.get('mdl_steady_window', 20) |
| | | steady_threshold = st.session_state.get('mdl_steady_threshold', 0.5) |
| | | |
| | | # æ§è¡ç¨³æè¯å« |
| | | df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state( |
| | | df_analysis, |
| | | weight_col='ç±³é', |
| | | window_size=steady_window, |
| | | std_threshold=steady_threshold |
| | | ) |
| | | |
| | | # æ´æ°df_analysis为å
å«ç¨³ææ è®°çæ°æ® |
| | | df_analysis = df_analysis_with_steady |
| | | |
| | | |
| | | |
| | | # é«çº§é¢æµåæ |
| | | st.subheader("ð æ·±åº¦å¦ä¹ 颿µåæ") |
| | | |
| | | if use_deep_learning: |
| | | # æ£æ¥ææé»è®¤ç¹å¾æ¯å¦å¨æ°æ®ä¸ |
| | | missing_features = [f for f in default_features if f not in df_analysis.columns] |
| | | if missing_features: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå°ä»¥ä¸ç¹å¾: {', '.join(missing_features)}") |
| | | else: |
| | | # å夿°æ® |
| | | required_cols = default_features + ['ç±³é', 'is_steady'] |
| | | combined = df_analysis[required_cols].copy() |
| | | |
| | | # 妿å¯ç¨äºç¨³ææ°æ®ï¼è¿æ»¤æéç¨³ææ°æ® |
| | | use_steady_data = st.session_state.get('mdl_use_steady_data', True) |
| | | if use_steady_data: |
| | | combined = combined[combined['is_steady'] == 1] |
| | | st.info(f"å·²è¿æ»¤éç¨³ææ°æ®ï¼ä½¿ç¨ {len(combined)} æ¡ç¨³ææ°æ®è¿è¡è®ç»") |
| | | |
| | | # æ¸
çæ°æ®ä¸çNaNå¼ |
| | | combined_clean = combined.dropna() |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(combined_clean) < 30: |
| | | st.warning("æ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç颿µåæ") |
| | | if use_steady_data: |
| | | st.info("建议ï¼å°è¯è°æ´ç¨³æè¯å«åæ°æç¦ç¨'ä»
使ç¨ç¨³ææ°æ®'é项") |
| | | else: |
| | | # æ¾ç¤ºç¨³æç»è®¡ |
| | | total_data = len(df_analysis) |
| | | steady_data = len(combined_clean) |
| | | steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0 |
| | | |
| | | metrics_cols = st.columns(3) |
| | | with metrics_cols[0]: |
| | | st.metric("æ»æ°æ®é", total_data) |
| | | with metrics_cols[1]: |
| | | st.metric("ç¨³ææ°æ®é", steady_data) |
| | | with metrics_cols[2]: |
| | | st.metric("ç¨³ææ°æ®æ¯ä¾", f"{steady_ratio:.1f}%") |
| | | |
| | | # ç¨³ææ°æ®å¯è§å |
| | | st.markdown("---") |
| | | st.subheader("ð ç¨³ææ°æ®åå¸") |
| | | |
| | | # åå»ºç¨³ææ°æ®å¯è§åå¾è¡¨ |
| | | fig_steady = go.Figure() |
| | | |
| | | # æ·»å åå§ç±³éæ²çº¿ |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=df_analysis['time'], |
| | | y=df_analysis['ç±³é'], |
| | | name='åå§ç±³é', |
| | | mode='lines', |
| | | line=dict(color='lightgray', width=1) |
| | | )) |
| | | |
| | | # æ·»å ç¨³ææ°æ®ç¹ |
| | | steady_data_points = df_analysis[df_analysis['is_steady'] == 1] |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=steady_data_points['time'], |
| | | y=steady_data_points['ç±³é'], |
| | | name='稳æç±³é', |
| | | mode='markers', |
| | | marker=dict(color='green', size=3, opacity=0.6) |
| | | )) |
| | | |
| | | # æ·»å éç¨³ææ°æ®ç¹ |
| | | non_steady_data_points = df_analysis[df_analysis['is_steady'] == 0] |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=non_steady_data_points['time'], |
| | | y=non_steady_data_points['ç±³é'], |
| | | name='é稳æç±³é', |
| | | mode='markers', |
| | | marker=dict(color='red', size=3, opacity=0.6) |
| | | )) |
| | | |
| | | # é
ç½®å¾è¡¨å¸å± |
| | | fig_steady.update_layout( |
| | | title="ç±³éæ°æ®ç¨³æåå¸", |
| | | xaxis=dict(title="æ¶é´"), |
| | | yaxis=dict(title="ç±³é (Kg/m)"), |
| | | legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1), |
| | | height=500 |
| | | ) |
| | | |
| | | # æ¾ç¤ºå¾è¡¨ |
| | | st.plotly_chart(fig_steady, use_container_width=True) |
| | | |
| | | # å离Xåy |
| | | X_clean = combined_clean[default_features] |
| | | y_clean = combined_clean['ç±³é'] |
| | | |
| | | # 为æ¶é´åºå模åå夿°æ® |
| | | def create_sequences(X, y, sequence_length): |
| | | X_seq = [] |
| | | y_seq = [] |
| | | for i in range(len(X) - sequence_length): |
| | | X_seq.append(X[i:i+sequence_length]) |
| | | y_seq.append(y[i+sequence_length]) |
| | | return np.array(X_seq), np.array(y_seq) |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = StandardScaler() |
| | | scaler_y = MinMaxScaler() |
| | | |
| | | X_scaled = scaler_X.fit_transform(X_clean) |
| | | y_scaled = scaler_y.fit_transform(y_clean.values.reshape(-1, 1)).ravel() |
| | | |
| | | # å建åºåæ°æ® |
| | | sequence_length = st.session_state['mdl_sequence_length'] |
| | | X_seq, y_seq = create_sequences(X_scaled, y_scaled, sequence_length) |
| | | |
| | | # æ£æ¥åºåæ°æ®é |
| | | if len(X_seq) < 20: |
| | | st.warning("åºåæ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç深度å¦ä¹ è®ç»") |
| | | else: |
| | | # åå²è®ç»éåæµè¯é |
| | | train_size = int(len(X_seq) * 0.8) |
| | | X_train_seq, X_test_seq = X_seq[:train_size], X_seq[train_size:] |
| | | y_train_seq, y_test_seq = y_seq[:train_size], y_seq[train_size:] |
| | | |
| | | # 转æ¢ä¸ºPyTorchå¼ é |
| | | X_train_tensor = torch.tensor(X_train_seq, dtype=torch.float32).to(device) |
| | | y_train_tensor = torch.tensor(y_train_seq, dtype=torch.float32).unsqueeze(1).to(device) |
| | | X_test_tensor = torch.tensor(X_test_seq, dtype=torch.float32).to(device) |
| | | y_test_tensor = torch.tensor(y_test_seq, dtype=torch.float32).unsqueeze(1).to(device) |
| | | |
| | | # æå»ºæ¨¡å |
| | | input_dim = X_scaled.shape[1] |
| | | |
| | | if st.session_state['mdl_model_type'] == 'LSTM': |
| | | model = LSTMModel(input_dim).to(device) |
| | | elif st.session_state['mdl_model_type'] == 'GRU': |
| | | model = GRUModel(input_dim).to(device) |
| | | elif st.session_state['mdl_model_type'] == 'BiLSTM': |
| | | model = BiLSTMModel(input_dim).to(device) |
| | | |
| | | # å®ä¹æå¤±å½æ°åä¼åå¨ |
| | | criterion = nn.MSELoss() |
| | | optimizer = optim.Adam(model.parameters(), lr=0.001) |
| | | |
| | | # è®ç»æ¨¡å |
| | | num_epochs = 50 |
| | | batch_size = 32 |
| | | |
| | | # æ¾ç¤ºè®ç»è¿åº¦ |
| | | progress_bar = st.progress(0) |
| | | status_text = st.empty() |
| | | |
| | | for epoch in range(num_epochs): |
| | | model.train() |
| | | optimizer.zero_grad() |
| | | |
| | | # ååä¼ æ |
| | | outputs = model(X_train_tensor) |
| | | loss = criterion(outputs, y_train_tensor) |
| | | |
| | | # ååä¼ æåä¼å |
| | | loss.backward() |
| | | optimizer.step() |
| | | |
| | | # æ´æ°è¿åº¦ |
| | | progress_bar.progress((epoch + 1) / num_epochs) |
| | | status_text.text(f"è®ç»ä¸: 第 {epoch + 1}/{num_epochs} è½®, æå¤±: {loss.item():.6f}") |
| | | |
| | | # 颿µ |
| | | model.eval() |
| | | with torch.no_grad(): |
| | | y_pred_scaled_tensor = model(X_test_tensor) |
| | | y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel() |
| | | |
| | | # åå½ä¸å |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | y_test_actual = scaler_y.inverse_transform(y_test_seq.reshape(-1, 1)).ravel() |
| | | |
| | | # 计ç®è¯ä¼°ææ |
| | | r2 = r2_score(y_test_actual, y_pred) |
| | | mse = mean_squared_error(y_test_actual, y_pred) |
| | | mae = mean_absolute_error(y_test_actual, y_pred) |
| | | rmse = np.sqrt(mse) |
| | | |
| | | # æ¾ç¤ºæ¨¡åæ§è½ |
| | | metrics_cols = st.columns(2) |
| | | with metrics_cols[0]: |
| | | st.metric("R² å¾å", f"{r2:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{mse:.6f}") |
| | | with metrics_cols[1]: |
| | | st.metric("å¹³åç»å¯¹è¯¯å·® (MAE)", f"{mae:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{rmse:.6f}") |
| | | |
| | | # æ·»å 稳æç¸å
³çè¯ä¼°è¯´æ |
| | | use_steady_data = st.session_state.get('mdl_use_steady_data', True) |
| | | if use_steady_data: |
| | | st.info("â ï¸ æ¨¡åä»
使ç¨ç¨³ææ°æ®è¿è¡è®ç»ï¼å¨é稳æå·¥åµä¸é¢æµç»æå¯è½ä¸åç¡®") |
| | | |
| | | # --- å®é
å¼ä¸é¢æµå¼å¯¹æ¯ --- |
| | | |
| | | # --- å®é
å¼ä¸é¢æµå¼å¯¹æ¯ --- |
| | | st.subheader("ð å®é
å¼ä¸é¢æµå¼å¯¹æ¯") |
| | | |
| | | # åå»ºå¯¹æ¯æ°æ® |
| | | compare_df = pd.DataFrame({ |
| | | 'å®é
å¼': y_test_actual, |
| | | '颿µå¼': y_pred |
| | | }) |
| | | compare_df = compare_df.sort_index() |
| | | |
| | | # å建对æ¯å¾ |
| | | fig_compare = go.Figure() |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['å®é
å¼'], |
| | | name='å®é
å¼', |
| | | mode='lines+markers', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['颿µå¼'], |
| | | name='颿µå¼', |
| | | mode='lines+markers', |
| | | line=dict(color='red', width=2, dash='dash') |
| | | )) |
| | | fig_compare.update_layout( |
| | | title=f'æµè¯é: å®é
ç±³é vs 颿µç±³é ({st.session_state["mdl_model_type"]})', |
| | | xaxis=dict(title='æ ·æ¬ç´¢å¼'), |
| | | yaxis=dict(title='ç±³é (Kg/m)'), |
| | | legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_compare, width='stretch') |
| | | |
| | | # --- æ®å·®åæ --- |
| | | st.subheader("ð æ®å·®åæ") |
| | | |
| | | # è®¡ç®æ®å·® |
| | | residuals = y_test_actual - y_pred |
| | | |
| | | # å建æ®å·®å¾ |
| | | fig_residual = go.Figure() |
| | | fig_residual.add_trace(go.Scatter( |
| | | x=y_pred, |
| | | y=residuals, |
| | | mode='markers', |
| | | marker=dict(color='green', size=8, opacity=0.6) |
| | | )) |
| | | fig_residual.add_shape( |
| | | type="line", |
| | | x0=y_pred.min(), |
| | | y0=0, |
| | | x1=y_pred.max(), |
| | | y1=0, |
| | | line=dict(color="red", width=2, dash="dash") |
| | | ) |
| | | fig_residual.update_layout( |
| | | title='æ®å·®å¾', |
| | | xaxis=dict(title='颿µå¼'), |
| | | yaxis=dict(title='æ®å·®'), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_residual, width='stretch') |
| | | |
| | | # --- 模åä¿å --- |
| | | st.subheader("ð¾ æ¨¡åä¿å") |
| | | |
| | | # å建模åç®å½ï¼å¦æä¸åå¨ï¼ |
| | | model_dir = "saved_models" |
| | | os.makedirs(model_dir, exist_ok=True) |
| | | |
| | | # å夿¨¡åä¿¡æ¯ |
| | | model_info = { |
| | | 'model': model, |
| | | 'features': default_features, |
| | | 'scaler_X': scaler_X, |
| | | 'scaler_y': scaler_y, |
| | | 'model_type': st.session_state['mdl_model_type'], |
| | | 'sequence_length': sequence_length, |
| | | 'created_at': datetime.now(), |
| | | 'r2_score': r2, |
| | | 'mse': mse, |
| | | 'mae': mae, |
| | | 'rmse': rmse, |
| | | 'use_steady_data': use_steady_data |
| | | } |
| | | |
| | | # çææ¨¡åæä»¶å |
| | | model_filename = f"deep_{st.session_state['mdl_model_type'].lower()}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.joblib" |
| | | model_path = os.path.join(model_dir, model_filename) |
| | | |
| | | # ä¿å模å |
| | | joblib.dump(model_info, model_path) |
| | | |
| | | st.success(f"模åå·²æåä¿å: {model_filename}") |
| | | st.info(f"ä¿åè·¯å¾: {model_path}") |
| | | else: |
| | | st.warning("æªæ£æµå°PyTorchï¼æ æ³ä½¿ç¨æ·±åº¦å¦ä¹ 颿µåè½ã请确ä¿å·²æ£ç¡®å®è£
PyTorchåºã") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | st.dataframe(df_analysis.head(20), width='stretch') |
| | | |
| | | # --- å¯¼åºæ°æ® --- |
| | | st.subheader("ð¾ å¯¼åºæ°æ®") |
| | | # å°æ°æ®è½¬æ¢ä¸ºCSVæ ¼å¼ |
| | | csv = df_analysis.to_csv(index=False) |
| | | # å建ä¸è½½æé® |
| | | st.download_button( |
| | | label="å¯¼åºæ´ååçæ°æ® (CSV)", |
| | | data=csv, |
| | | file_name=f"metered_weight_deep_learning_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv", |
| | | mime="text/csv", |
| | | help="ç¹å»æé®å¯¼åºæ´ååçç±³éåææ°æ®" |
| | | ) |
| | | |
| | | else: |
| | | # æç¤ºç¨æ·ç¹å»å¼å§åææé® |
| | | st.info("è¯·éæ©æ¶é´èå´å¹¶ç¹å»'å¼å§åæ'æé®è·åæ°æ®ã") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | import joblib |
| | | import os |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | |
| | | # å°è¯å¯¼å
¥torchï¼å¦æå¤±è´¥åç¦ç¨æ·±åº¦å¦ä¹ æ¨¡åæ¯æ |
| | | try: |
| | | import torch |
| | | TORCH_AVAILABLE = True |
| | | except ImportError: |
| | | TORCH_AVAILABLE = False |
| | | |
| | | |
| | | # 稳æè¯å«ç±» |
| | | class SteadyStateDetector: |
| | | def __init__(self): |
| | | pass |
| | | |
| | | def detect_steady_state(self, df, weight_col='ç±³é', window_size=20, std_threshold=0.5, duration_threshold=60): |
| | | """ |
| | | 稳æè¯å«é»è¾ï¼æ è®°ç±³éæ°æ®ä¸çç¨³ææ®µ |
| | | :param df: å
å«ç±³éæ°æ®çæ°æ®æ¡ |
| | | :param weight_col: ç±³éåå |
| | | :param window_size: æ»å¨çªå£å¤§å°ï¼ç§ï¼ |
| | | :param std_threshold: æ åå·®éå¼ |
| | | :param duration_threshold: 稳ææç»æ¶é´éå¼ï¼ç§ï¼ |
| | | :return: å
å«ç¨³ææ è®°çæ°æ®æ¡å稳æä¿¡æ¯ |
| | | """ |
| | | if df is None or df.empty: |
| | | return df, [] |
| | | |
| | | # ç¡®ä¿æ¶é´åæ¯datetimeç±»å |
| | | df['time'] = pd.to_datetime(df['time']) |
| | | |
| | | # è®¡ç®æ»å¨ç»è®¡é |
| | | df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std() |
| | | df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean() |
| | | |
| | | # è®¡ç®æ³¢å¨èå´ |
| | | df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100 |
| | | df['fluctuation_range'] = df['fluctuation_range'].fillna(0) |
| | | |
| | | # æ 记稳æç¹ |
| | | df['is_steady'] = 0 |
| | | steady_condition = ( |
| | | (df['fluctuation_range'] < std_threshold) & |
| | | (df[weight_col] >= 0.1) |
| | | ) |
| | | df.loc[steady_condition, 'is_steady'] = 1 |
| | | |
| | | # è¯å«è¿ç»ç¨³ææ®µ |
| | | steady_segments = [] |
| | | current_segment = {} |
| | | |
| | | for i, row in df.iterrows(): |
| | | if row['is_steady'] == 1: |
| | | if not current_segment: |
| | | current_segment = { |
| | | 'start_time': row['time'], |
| | | 'start_idx': i, |
| | | 'weights': [row[weight_col]] |
| | | } |
| | | else: |
| | | current_segment['weights'].append(row[weight_col]) |
| | | else: |
| | | if current_segment: |
| | | current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time'] |
| | | current_segment['end_idx'] = i-1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | # 计ç®ç½®ä¿¡åº¦ |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | current_segment = {} |
| | | |
| | | # å¤çæåä¸ä¸ªç¨³ææ®µ |
| | | if current_segment: |
| | | current_segment['end_time'] = df['time'].iloc[-1] |
| | | current_segment['end_idx'] = len(df) - 1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | # 卿°æ®æ¡ä¸æ è®°å®æ´çç¨³ææ®µ |
| | | for segment in steady_segments: |
| | | df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1 |
| | | |
| | | return df, steady_segments |
| | | |
| | | |
| | | def show_metered_weight_forecast(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³é颿µåæ") |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'forecast_start_date' not in st.session_state: |
| | | st.session_state['forecast_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'forecast_end_date' not in st.session_state: |
| | | st.session_state['forecast_end_date'] = datetime.now().date() |
| | | if 'forecast_quick_select' not in st.session_state: |
| | | st.session_state['forecast_quick_select'] = "æè¿7天" |
| | | if 'selected_model' not in st.session_state: |
| | | st.session_state['selected_model'] = None |
| | | if 'selected_model_file' not in st.session_state: |
| | | st.session_state['selected_model_file'] = None |
| | | if 'forecast_use_steady_only' not in st.session_state: |
| | | st.session_state['forecast_use_steady_only'] = True |
| | | if 'forecast_steady_window' not in st.session_state: |
| | | st.session_state['forecast_steady_window'] = 20 |
| | | if 'forecast_steady_threshold' not in st.session_state: |
| | | st.session_state['forecast_steady_threshold'] = 1.5 |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['forecast_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['forecast_start_date'] = today |
| | | st.session_state['forecast_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['forecast_start_date'] = today - timedelta(days=3) |
| | | st.session_state['forecast_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['forecast_start_date'] = today - timedelta(days=7) |
| | | st.session_state['forecast_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['forecast_start_date'] = today - timedelta(days=30) |
| | | st.session_state['forecast_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['forecast_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ°æ®éæ©", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['forecast_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_forecast_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="forecast_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="forecast_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð æ¥è¯¢æ°æ®", key="forecast_query", width='stretch') |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # 模åéæ©åºå |
| | | with st.expander("ð æ¨¡åéæ©", expanded=True): |
| | | # å建模åç®å½ï¼å¦æä¸åå¨ï¼ |
| | | model_dir = "saved_models" |
| | | os.makedirs(model_dir, exist_ok=True) |
| | | |
| | | # è·åææå·²ä¿åçæ¨¡åæä»¶ |
| | | model_files = [f for f in os.listdir(model_dir) if f.endswith('.joblib')] |
| | | model_files.sort(reverse=True) # ææ°çæ¨¡åæå¨åé¢ |
| | | |
| | | if not model_files: |
| | | st.warning("å°æªä¿å任使¨¡åï¼è¯·å
è®ç»æ¨¡åå¹¶ä¿åã") |
| | | else: |
| | | # 模åéæ©ä¸ææ¡ |
| | | selected_model_file = st.selectbox( |
| | | "鿩已ä¿åçæ¨¡å", |
| | | options=model_files, |
| | | help="éæ©è¦ç¨äºé¢æµç模åæä»¶", |
| | | key="forecast_selected_model" |
| | | ) |
| | | |
| | | # å 载并æ¾ç¤ºæ¨¡åä¿¡æ¯ |
| | | if selected_model_file: |
| | | model_path = os.path.join(model_dir, selected_model_file) |
| | | model_info = joblib.load(model_path) |
| | | |
| | | # æ¾ç¤ºæ¨¡ååºæ¬ä¿¡æ¯ |
| | | st.subheader("ð æ¨¡åä¿¡æ¯") |
| | | info_cols = st.columns(2) |
| | | |
| | | with info_cols[0]: |
| | | st.metric("模åç±»å", model_info['model_type']) |
| | | st.metric("å建æ¶é´", model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S')) |
| | | st.metric("使ç¨ç¨³ææ°æ®", "æ¯" if model_info.get('use_steady_data', False) else "å¦") |
| | | |
| | | with info_cols[1]: |
| | | st.metric("R² å¾å", f"{model_info['r2_score']:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{model_info['mse']:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{model_info['rmse']:.6f}") |
| | | |
| | | # æ¾ç¤ºæ¨¡åç¹å¾ |
| | | st.write("ð æ¨¡å使ç¨çç¹å¾:") |
| | | st.code(", ".join(model_info['features'])) |
| | | |
| | | # å¦ææ¯æ·±åº¦å¦ä¹ 模åï¼æ¾ç¤ºåºåé¿åº¦ |
| | | if 'sequence_length' in model_info: |
| | | st.metric("åºåé¿åº¦", model_info['sequence_length']) |
| | | |
| | | # ä¿å模åä¿¡æ¯å°ä¼è¯ç¶æ |
| | | st.session_state['selected_model'] = model_info |
| | | st.session_state['selected_model_file'] = selected_model_file |
| | | |
| | | # 稳æè¯å«é
ç½® |
| | | st.markdown("---") |
| | | st.write("âï¸ **稳æè¯å«é
ç½®**") |
| | | |
| | | steady_cols = st.columns(3) |
| | | with steady_cols[0]: |
| | | st.checkbox( |
| | | "ä»
颿µç¨³ææ°æ®", |
| | | value=st.session_state['forecast_use_steady_only'], |
| | | key="forecast_use_steady_only", |
| | | help="å¯ç¨åï¼åªå¯¹å¤äºç¨³ææ¶æ®µçæ°æ®è¿è¡ç±³é颿µ" |
| | | ) |
| | | |
| | | with steady_cols[1]: |
| | | st.slider( |
| | | "æ»å¨çªå£å¤§å° (ç§)", |
| | | min_value=5, |
| | | max_value=60, |
| | | value=st.session_state['forecast_steady_window'], |
| | | step=5, |
| | | key="forecast_steady_window", |
| | | help="ç¨äºç¨³æè¯å«çæ»å¨çªå£å¤§å°" |
| | | ) |
| | | |
| | | with steady_cols[2]: |
| | | st.slider( |
| | | "æ³¢å¨éå¼ (%)", |
| | | min_value=0.1, |
| | | max_value=2.0, |
| | | value=st.session_state['forecast_steady_threshold'], |
| | | step=0.1, |
| | | key="forecast_steady_threshold", |
| | | help="稳æè¯å«çæ³¢å¨èå´éå¼" |
| | | ) |
| | | |
| | | # 颿µåè½åºå |
| | | st.subheader("ð® ç±³é颿µ") |
| | | |
| | | if query_button and st.session_state['selected_model']: |
| | | with st.spinner("æ£å¨è·åæ°æ®å¹¶è¿è¡é¢æµ..."): |
| | | # 1. è·å宿´çæ¤åºæºæ°æ® |
| | | df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | else: |
| | | # æ°æ®æ´åä¸é¢å¤ç |
| | | def integrate_data(df_extruder_full, df_main_speed, df_temp): |
| | | # ç¡®ä¿æ¤åºæºæ°æ®åå¨ |
| | | if df_extruder_full is None or df_extruder_full.empty: |
| | | return None |
| | | |
| | | # å建åªå
å«ç±³éåæ¶é´çä¸»æ°æ®é |
| | | df_merged = df_extruder_full[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy() |
| | | |
| | | |
| | | # æ´å主æµç¨æ°æ® |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed = df_main_speed[['time', 'process_main_speed']] |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_main_speed.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´åæ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_cols = ['time', 'nakata_extruder_screw_display_temp', |
| | | 'nakata_extruder_rear_barrel_display_temp', |
| | | 'nakata_extruder_front_barrel_display_temp', |
| | | 'nakata_extruder_head_display_temp'] |
| | | df_temp_subset = df_temp[temp_cols].copy() |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_temp_subset.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # éå½åå以æé«å¯è¯»æ§ |
| | | df_merged.rename(columns={ |
| | | 'screw_speed_actual': 'èºæè½¬é', |
| | | 'head_pressure': 'æºå¤´åå', |
| | | 'process_main_speed': 'æµç¨ä¸»é', |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¸©åº¦', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¸©åº¦' |
| | | }, inplace=True) |
| | | |
| | | # æ¸
çæ°æ® |
| | | df_merged.dropna(subset=['metered_weight'], inplace=True) |
| | | |
| | | return df_merged |
| | | |
| | | # æ§è¡æ°æ®æ´å |
| | | df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp) |
| | | |
| | | if df_analysis is None or df_analysis.empty: |
| | | st.warning("æ°æ®æ´å失败ï¼è¯·æ£æ¥æ°æ®è´¨éæè°æ´æ¶é´èå´ã") |
| | | else: |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # 稳æè¯å« |
| | | steady_detector = SteadyStateDetector() |
| | | |
| | | # è·å稳æè¯å«åæ° |
| | | use_steady_only = st.session_state.get('forecast_use_steady_only', True) |
| | | steady_window = st.session_state.get('forecast_steady_window', 20) |
| | | steady_threshold = st.session_state.get('forecast_steady_threshold', 0.5) |
| | | |
| | | # æ§è¡ç¨³æè¯å« |
| | | df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state( |
| | | df_analysis, |
| | | weight_col='ç±³é', |
| | | window_size=steady_window, |
| | | std_threshold=steady_threshold |
| | | ) |
| | | |
| | | # æ´æ°df_analysis为å
å«ç¨³ææ è®°çæ°æ® |
| | | df_analysis = df_analysis_with_steady |
| | | |
| | | # æ¾ç¤ºç¨³æç»è®¡ä¿¡æ¯ |
| | | total_data = len(df_analysis) |
| | | steady_data = len(df_analysis[df_analysis['is_steady'] == 1]) |
| | | steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0 |
| | | |
| | | st.subheader("ð ç¨³ææ°æ®ç»è®¡") |
| | | stats_cols = st.columns(4) |
| | | stats_cols[0].metric("æ»æ°æ®é", total_data) |
| | | stats_cols[1].metric("ç¨³ææ°æ®é", steady_data) |
| | | stats_cols[2].metric("ç¨³ææ°æ®æ¯ä¾", f"{steady_ratio:.1f}%") |
| | | stats_cols[3].metric("ç¨³ææ®µæ°é", len(steady_segments)) |
| | | |
| | | # è·å模åä¿¡æ¯ |
| | | model_info = st.session_state['selected_model'] |
| | | required_features = model_info['features'] |
| | | |
| | | # æ£æ¥ææå¿
éçç¹å¾æ¯å¦å¨æ°æ®ä¸ |
| | | missing_features = [f for f in required_features if f not in df_analysis.columns] |
| | | if missing_features: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå°ä»¥ä¸ç¹å¾: {', '.join(missing_features)}") |
| | | else: |
| | | # åå¤æææ°æ®ç¨äºæ¾ç¤º |
| | | df_all = df_analysis.dropna(subset=required_features + ['ç±³é']).copy() |
| | | |
| | | if len(df_all) == 0: |
| | | st.warning("没æè¶³å¤çæææ°æ®è¿è¡é¢æµï¼è¯·è°æ´æ¶é´èå´ææ£æ¥æ°æ®è´¨éã") |
| | | else: |
| | | # æ ¹æ®é
ç½®å³å®æ¯å¦åªä½¿ç¨ç¨³ææ°æ®è¿è¡é¢æµ |
| | | if use_steady_only: |
| | | df_pred_steady = df_all[df_all['is_steady'] == 1].copy() |
| | | if len(df_pred_steady) > 0: |
| | | df_pred = df_pred_steady |
| | | st.info(f"å·²å¯ç¨ç¨³æè¿æ»¤ï¼ä½¿ç¨ {len(df_pred)} æ¡ç¨³ææ°æ®è¿è¡é¢æµ") |
| | | else: |
| | | df_pred = df_all.copy() |
| | | st.warning("æªæ¾å°ç¨³ææ°æ®ï¼å°ä½¿ç¨æææ°æ®è¿è¡é¢æµ") |
| | | else: |
| | | df_pred = df_all.copy() |
| | | |
| | | # æ§è¡é¢æµ - åªå¯¹éå®çæ°æ®ï¼ç¨³ææå
¨é¨ï¼è¿è¡é¢æµ |
| | | X_pred = df_pred[required_features] |
| | | predicted_weights = [] |
| | | |
| | | # è·å模å |
| | | model = model_info['model'] |
| | | |
| | | # æ£æ¥æ¨¡åç±»åå¹¶æ§è¡é¢æµ |
| | | if model_info['model_type'] in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 深度å¦ä¹ 模å颿µ |
| | | if not TORCH_AVAILABLE: |
| | | st.error("PyTorch æªå®è£
ï¼æ æ³ä½¿ç¨æ·±åº¦å¦ä¹ 模åè¿è¡é¢æµã") |
| | | st.stop() |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = model_info['scaler_X'] |
| | | scaler_y = model_info['scaler_y'] |
| | | X_scaled = scaler_X.transform(X_pred) |
| | | |
| | | # è·ååºåé¿åº¦ |
| | | sequence_length = model_info['sequence_length'] |
| | | |
| | | # 为深度å¦ä¹ 模åå建åºå |
| | | def create_sequences(data, seq_length): |
| | | sequences = [] |
| | | for i in range(len(data) - seq_length + 1): |
| | | seq = data[i:i+seq_length] |
| | | sequences.append(seq) |
| | | return np.array(sequences) |
| | | |
| | | X_sequences = create_sequences(X_scaled, sequence_length) |
| | | |
| | | # 转æ¢ä¸ºPyTorchå¼ é |
| | | import torch |
| | | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') |
| | | X_tensor = torch.tensor(X_sequences, dtype=torch.float32).to(device) |
| | | |
| | | # 颿µ |
| | | model.eval() |
| | | with torch.no_grad(): |
| | | y_pred_scaled_tensor = model(X_tensor) |
| | | y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel() |
| | | |
| | | # åå½ä¸å |
| | | predicted = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | # ç±äºåºå颿µï¼æä»¬éè¦å¡«å
åé¢çç¼ºå¤±å¼ |
| | | predicted_weights = [np.nan] * (sequence_length - 1) + list(predicted) |
| | | |
| | | elif model_info['model_type'] in ['SVR', 'MLP']: |
| | | # æ¯æåéæºæå¤å±æç¥å¨é¢æµ |
| | | # æ°æ®æ åå |
| | | scaler_X = model_info['scaler_X'] |
| | | scaler_y = model_info['scaler_y'] |
| | | X_scaled = scaler_X.transform(X_pred) |
| | | |
| | | # 颿µ |
| | | y_pred_scaled = model.predict(X_scaled) |
| | | |
| | | # åå½ä¸å |
| | | predicted_weights = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | else: |
| | | # å
¶ä»æ¨¡åï¼å¦éæºæ£®æã梯度æåã线æ§åå½çï¼ |
| | | predicted_weights = model.predict(X_pred) |
| | | |
| | | # å°é¢æµç»ææ·»å å°æ°æ®æ¡ä¸ |
| | | df_pred['颿µç±³é'] = predicted_weights |
| | | |
| | | # ç¡®ä¿æ¶é´åæ¯datetimeç±»å |
| | | df_pred['time'] = pd.to_datetime(df_pred['time']) |
| | | |
| | | # æ°æ®å¯¹æ¯åè½ |
| | | st.subheader("ð 颿µç»æå¯¹æ¯åæ") |
| | | |
| | | # 计ç®é¢æµè¯¯å·® |
| | | df_pred['误差'] = df_pred['颿µç±³é'] - df_pred['ç±³é'] |
| | | df_pred['ç»å¯¹è¯¯å·®'] = abs(df_pred['误差']) |
| | | df_pred['ç¸å¯¹è¯¯å·®'] = (df_pred['ç»å¯¹è¯¯å·®'] / df_pred['ç±³é']) * 100 |
| | | |
| | | # æ¾ç¤ºè¯¯å·®ç»è®¡ä¿¡æ¯ |
| | | error_stats = df_pred.dropna(subset=['颿µç±³é']).describe() |
| | | |
| | | stats_cols = st.columns(3) |
| | | with stats_cols[0]: |
| | | st.metric("å¹³åå®é
ç±³é", f"{error_stats['ç±³é']['mean']:.4f} Kg/m") |
| | | st.metric("å¹³å颿µç±³é", f"{error_stats['颿µç±³é']['mean']:.4f} Kg/m") |
| | | with stats_cols[1]: |
| | | st.metric("å¹³åç»å¯¹è¯¯å·®", f"{error_stats['ç»å¯¹è¯¯å·®']['mean']:.4f} Kg/m") |
| | | st.metric("æå¤§ç»å¯¹è¯¯å·®", f"{error_stats['ç»å¯¹è¯¯å·®']['max']:.4f} Kg/m") |
| | | with stats_cols[2]: |
| | | st.metric("å¹³åç¸å¯¹è¯¯å·®", f"{error_stats['ç¸å¯¹è¯¯å·®']['mean']:.2f}%") |
| | | st.metric("æå¤§ç¸å¯¹è¯¯å·®", f"{error_stats['ç¸å¯¹è¯¯å·®']['max']:.2f}%") |
| | | |
| | | # å¯è§åå±ç¤º |
| | | st.subheader("ð ç±³éè¶å¿å¯¹æ¯") |
| | | |
| | | # å建è¶å¿å¾ - ä½¿ç¨æææ°æ®df_allè¿è¡æ¾ç¤º |
| | | fig = go.Figure() |
| | | |
| | | # ç¡®ä¿æ¶é´åæ¯datetimeç±»å |
| | | df_all['time'] = pd.to_datetime(df_all['time']) |
| | | |
| | | # # æ·»å 宿¶ç±³éæ°æ®ç¹ï¼ç¨³ææ°æ®ç¨èè²ï¼éç¨³ææ°æ®ç¨ç°è²ï¼ |
| | | # if 'is_steady' in df_all.columns: |
| | | # # ç¨³ææ°æ® - 使ç¨ç¹æ¾ç¤º |
| | | # steady_data = df_all[df_all['is_steady'] == 1] |
| | | # non_steady_data = df_all[df_all['is_steady'] == 0] |
| | | |
| | | # if len(steady_data) > 0: |
| | | # fig.add_trace(go.Scatter( |
| | | # x=steady_data['time'], |
| | | # y=steady_data['ç±³é'], |
| | | # name='宿¶ç±³éï¼ç¨³æï¼', |
| | | # mode='markers', |
| | | # marker=dict(color='blue', size=3), |
| | | # hovertemplate='æ¶é´: %{x}<br>宿¶ç±³éï¼ç¨³æï¼: %{y:.4f} Kg/m<extra></extra>' |
| | | # )) |
| | | |
| | | # # éç¨³ææ°æ®ä¹æ¾ç¤ºï¼ä½ä¸è¿è¡é¢æµ |
| | | # if len(non_steady_data) > 0: |
| | | # fig.add_trace(go.Scatter( |
| | | # x=non_steady_data['time'], |
| | | # y=non_steady_data['ç±³é'], |
| | | # name='宿¶ç±³éï¼é稳æï¼', |
| | | # mode='markers', |
| | | # marker=dict(color='lightgray', size=3), |
| | | # hovertemplate='æ¶é´: %{x}<br>宿¶ç±³éï¼é稳æï¼: %{y:.4f} Kg/m<extra></extra>' |
| | | # )) |
| | | # else: |
| | | # å¦ææ²¡æç¨³ææ è®°ï¼æ¾ç¤ºæææ°æ®ç¹ |
| | | fig.add_trace(go.Scatter( |
| | | x=df_all['time'], |
| | | y=df_all['ç±³é'], |
| | | name='宿¶ç±³é', |
| | | mode='lines', |
| | | line=dict(color='blue', width=1.5), |
| | | # hovertemplate='æ¶é´: %{x}<br>宿¶ç±³é: %{y:.4f} Kg/m<extra></extra>' |
| | | )) |
| | | |
| | | # æ·»å 颿µç±³éæ²çº¿ - åªå¯¹é¢æµçæ°æ®ï¼ç¨³ææå
¨é¨ï¼æ¾ç¤º |
| | | fig.add_trace(go.Scatter( |
| | | x=df_pred['time'], |
| | | y=df_pred['颿µç±³é'], |
| | | name='颿µç±³é', |
| | | mode='lines', |
| | | line=dict(color='red', width=2, dash='dash'), |
| | | marker=dict(size=3), |
| | | # hovertemplate='æ¶é´: %{x}<br>颿µç±³é: %{y:.4f} Kg/m<extra></extra>' |
| | | )) |
| | | |
| | | # æ·»å æææ¤åºæºåæ°æ²çº¿ - ä½¿ç¨æææ°æ® |
| | | colors = ['green', 'orange', 'purple', 'brown', 'pink', 'gray', 'olive', 'cyan', 'magenta', 'yellow', 'lime', 'teal'] |
| | | for i, feature in enumerate(required_features): |
| | | # 为æ¯ä¸ªç¹å¾åé
ä¸åçé¢è² |
| | | color = colors[i % len(colors)] |
| | | |
| | | # ç¡®ä¿ç¹å¾åå¨äºæææ°æ®ä¸ |
| | | if feature in df_all.columns: |
| | | fig.add_trace(go.Scatter( |
| | | x=df_all['time'], |
| | | y=df_all[feature], |
| | | name=feature, |
| | | mode='lines', |
| | | line=dict(color=color, width=1.5), |
| | | yaxis=f'y{i+2}', |
| | | # hovertemplate=f'æ¶é´: %{{x}}<br>{feature}: %{{y}}<extra></extra>' |
| | | )) |
| | | |
| | | # é
ç½®å¾è¡¨å¸å± |
| | | layout = { |
| | | 'title': 'ç±³é颿µä¸å®æ¶æ°æ®å¯¹æ¯', |
| | | 'xaxis': { |
| | | 'title': 'æ¶é´', |
| | | 'rangeslider': {'visible': True}, |
| | | 'type': 'date', |
| | | 'tickformat': '%Y-%m-%d %H:%M' |
| | | }, |
| | | 'yaxis': { |
| | | 'title': 'ç±³é (Kg/m)', |
| | | 'title_font': {'color': 'blue'}, |
| | | 'tickfont': {'color': 'blue'}, |
| | | 'side': 'left', |
| | | 'fixedrange': False # å
许yè½´ç¼©æ¾ |
| | | }, |
| | | 'legend': { |
| | | 'orientation': 'h', |
| | | 'yanchor': 'bottom', |
| | | 'y': 1.02, |
| | | 'xanchor': 'right', |
| | | 'x': 1 |
| | | }, |
| | | 'height': 600, |
| | | 'margin': {'l': 100, 'r': 200, 't': 100, 'b': 100}, |
| | | 'hovermode': 'x unified' |
| | | } |
| | | |
| | | # æ·»å é¢å¤çyè½´é
ç½® - 为ææç¹å¾å建yè½´ |
| | | for i, feature in enumerate(required_features): |
| | | layout[f'yaxis{i+2}'] = { |
| | | 'title': feature, |
| | | 'title_font': {'color': colors[i % len(colors)]}, |
| | | 'tickfont': {'color': colors[i % len(colors)]}, |
| | | 'overlaying': 'y', |
| | | 'side': 'right', |
| | | 'anchor': 'free', |
| | | 'position': 1 - (i+1)*0.08, |
| | | 'fixedrange': False # å
许yè½´ç¼©æ¾ |
| | | } |
| | | |
| | | fig.update_layout(layout) |
| | | |
| | | # æ¾ç¤ºè¶å¿å¾ - å¯ç¨å®æ´ç交äºåè½ |
| | | st.plotly_chart(fig, use_container_width=True, config={ |
| | | 'scrollZoom': True, |
| | | 'displayModeBar': True, |
| | | 'modeBarButtonsToAdd': ['pan2d', 'select2d', 'lasso2d', 'resetScale2d'], |
| | | 'displaylogo': False |
| | | }) |
| | | |
| | | # 误差åæå¾ |
| | | st.subheader("ð 颿µè¯¯å·®åæ") |
| | | |
| | | # å建误差åå¸ç´æ¹å¾ |
| | | fig_error = px.histogram(df_pred.dropna(subset=['ç¸å¯¹è¯¯å·®']), x='ç¸å¯¹è¯¯å·®', nbins=50, |
| | | title='颿µç¸å¯¹è¯¯å·®åå¸', |
| | | labels={'ç¸å¯¹è¯¯å·®': 'ç¸å¯¹è¯¯å·® (%)'}) |
| | | fig_error.update_layout( |
| | | xaxis_title='ç¸å¯¹è¯¯å·® (%)', |
| | | yaxis_title='颿¬¡', |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_error, use_container_width=True) |
| | | |
| | | # æ°æ®é¢è§ |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | preview_columns = ['time', 'ç±³é', '颿µç±³é', '误差', 'ç»å¯¹è¯¯å·®', 'ç¸å¯¹è¯¯å·®'] |
| | | if 'is_steady' in df_pred.columns: |
| | | preview_columns.append('is_steady') |
| | | preview_columns.extend(required_features) |
| | | st.dataframe(df_pred[preview_columns].head(20), |
| | | use_container_width=True) |
| | | |
| | | # å¯¼åºæ°æ® |
| | | st.subheader("ð¾ å¯¼åºæ°æ®") |
| | | # å°æ°æ®è½¬æ¢ä¸ºCSVæ ¼å¼ |
| | | csv = df_pred.to_csv(index=False) |
| | | # å建ä¸è½½æé® |
| | | st.download_button( |
| | | label="导åºé¢æµç»ææ°æ® (CSV)", |
| | | data=csv, |
| | | file_name=f"metered_weight_forecast_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv", |
| | | mime="text/csv", |
| | | help="ç¹å»æé®å¯¼åºé¢æµç»ææ°æ®" |
| | | ) |
| | | elif query_button: |
| | | st.warning("请å
éæ©ä¸ä¸ªæ¨¡åã") |
| | | else: |
| | | st.info("è¯·éæ©æ¶é´èå´å模åï¼ç¶åç¹å»'æ¥è¯¢æ°æ®'æé®å¼å§é¢æµåæã") |
| | | |
| | | |
| | | # 页é¢å
¥å£ |
| | | if __name__ == "__main__": |
| | | show_metered_weight_forecast() |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | import joblib |
| | | import os |
| | | from datetime import datetime |
| | | |
| | | # å°è¯å¯¼å
¥torchï¼å¦æå¤±è´¥åç¦ç¨æ·±åº¦å¦ä¹ æ¨¡åæ¯æ |
| | | try: |
| | | import torch |
| | | TORCH_AVAILABLE = True |
| | | except ImportError: |
| | | TORCH_AVAILABLE = False |
| | | |
| | | # 页é¢å½æ°å®ä¹ |
| | | def show_metered_weight_prediction(): |
| | | # 页颿 é¢ |
| | | st.title("ç±³éç»ä¸é¢æµ") |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'selected_model' not in st.session_state: |
| | | st.session_state['selected_model'] = None |
| | | |
| | | # å建模åç®å½ï¼å¦æä¸åå¨ï¼ |
| | | model_dir = "saved_models" |
| | | os.makedirs(model_dir, exist_ok=True) |
| | | |
| | | # è·åææå·²ä¿åçæ¨¡åæä»¶ |
| | | model_files = [f for f in os.listdir(model_dir) if f.endswith('.joblib')] |
| | | model_files.sort(reverse=True) # ææ°çæ¨¡åæå¨åé¢ |
| | | |
| | | # 模åéæ©åºå |
| | | with st.expander("ð éæ©æ¨¡å", expanded=True): |
| | | if not model_files: |
| | | st.warning("å°æªä¿å任使¨¡åï¼è¯·å
è®ç»æ¨¡åå¹¶ä¿åã") |
| | | else: |
| | | # 模åéæ©ä¸ææ¡ |
| | | selected_model_file = st.selectbox( |
| | | "鿩已ä¿åçæ¨¡å", |
| | | options=model_files, |
| | | help="éæ©è¦ç¨äºé¢æµç模åæä»¶" |
| | | ) |
| | | |
| | | # å 载并æ¾ç¤ºæ¨¡åä¿¡æ¯ |
| | | if selected_model_file: |
| | | model_path = os.path.join(model_dir, selected_model_file) |
| | | model_info = joblib.load(model_path) |
| | | |
| | | # æ¾ç¤ºæ¨¡ååºæ¬ä¿¡æ¯ |
| | | st.subheader("ð æ¨¡åä¿¡æ¯") |
| | | info_cols = st.columns(2) |
| | | |
| | | with info_cols[0]: |
| | | st.metric("模åç±»å", model_info['model_type']) |
| | | st.metric("å建æ¶é´", model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S')) |
| | | st.metric("使ç¨ç¨³ææ°æ®", "æ¯" if model_info.get('use_steady_data', False) else "å¦") |
| | | |
| | | with info_cols[1]: |
| | | st.metric("R² å¾å", f"{model_info['r2_score']:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{model_info['mse']:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{model_info['rmse']:.6f}") |
| | | |
| | | # æ¾ç¤ºæ¨¡åç¹å¾ |
| | | st.write("ð æ¨¡å使ç¨çç¹å¾:") |
| | | st.code(", ".join(model_info['features'])) |
| | | |
| | | # å¦ææ¯æ·±åº¦å¦ä¹ 模åï¼æ¾ç¤ºåºåé¿åº¦ |
| | | if 'sequence_length' in model_info: |
| | | st.metric("åºåé¿åº¦", model_info['sequence_length']) |
| | | |
| | | # ä¿å模åä¿¡æ¯å°ä¼è¯ç¶æ |
| | | st.session_state['selected_model'] = model_info |
| | | st.session_state['selected_model_file'] = selected_model_file |
| | | |
| | | # 颿µåè½åºå |
| | | st.subheader("ð® ç±³é颿µ") |
| | | |
| | | if st.session_state['selected_model']: |
| | | model_info = st.session_state['selected_model'] |
| | | |
| | | # è·å模åéè¦çç¹å¾ |
| | | required_features = model_info['features'] |
| | | |
| | | # åå»ºé¢æµè¡¨å |
| | | st.write("è¾å
¥ç¹å¾å¼è¿è¡ç±³é颿µ:") |
| | | predict_cols = st.columns(2) |
| | | input_features = {} |
| | | |
| | | # æ¾ç¤ºè¾å
¥è¡¨å |
| | | for i, feature in enumerate(required_features): |
| | | with predict_cols[i % 2]: |
| | | input_features[feature] = st.number_input( |
| | | f"{feature}", |
| | | key=f"pred_{feature}", |
| | | value=0.0, |
| | | step=0.0001, |
| | | format="%.4f" |
| | | ) |
| | | |
| | | # 颿µæé® |
| | | if st.button("ð å¼å§é¢æµ"): |
| | | try: |
| | | # åå¤é¢æµæ°æ® |
| | | input_df = pd.DataFrame([input_features]) |
| | | |
| | | # æ ¹æ®æ¨¡åç±»åæ§è¡ä¸åç颿µé»è¾ |
| | | predicted_weight = None |
| | | |
| | | # è·å模å |
| | | model = model_info['model'] |
| | | |
| | | # æ£æ¥æ¨¡åç±»åå¹¶æ§è¡é¢æµ |
| | | if model_info['model_type'] in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 深度å¦ä¹ 模å颿µ |
| | | if not TORCH_AVAILABLE: |
| | | st.error("PyTorch æªå®è£
ï¼æ æ³ä½¿ç¨æ·±åº¦å¦ä¹ 模åè¿è¡é¢æµã") |
| | | return |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = model_info['scaler_X'] |
| | | scaler_y = model_info['scaler_y'] |
| | | input_scaled = scaler_X.transform(input_df) |
| | | |
| | | # è·ååºåé¿åº¦ |
| | | sequence_length = model_info['sequence_length'] |
| | | |
| | | # 为深度å¦ä¹ 模åå建åºå |
| | | input_seq = np.tile(input_scaled, (sequence_length, 1)).reshape(1, sequence_length, -1) |
| | | |
| | | # 转æ¢ä¸ºPyTorchå¼ é |
| | | import torch |
| | | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') |
| | | input_tensor = torch.tensor(input_seq, dtype=torch.float32).to(device) |
| | | |
| | | # 颿µ |
| | | model.eval() |
| | | with torch.no_grad(): |
| | | y_pred_scaled_tensor = model(input_tensor) |
| | | y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel()[0] |
| | | |
| | | # åå½ä¸å |
| | | predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0] |
| | | |
| | | elif model_info['model_type'] in ['SVR', 'MLP']: |
| | | # æ¯æåéæºæå¤å±æç¥å¨é¢æµ |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = model_info['scaler_X'] |
| | | scaler_y = model_info['scaler_y'] |
| | | input_scaled = scaler_X.transform(input_df) |
| | | |
| | | # 颿µ |
| | | y_pred_scaled = model.predict(input_scaled)[0] |
| | | |
| | | # åå½ä¸å |
| | | predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0] |
| | | |
| | | else: |
| | | # å
¶ä»æ¨¡åï¼å¦éæºæ£®æã梯度æåã线æ§åå½çï¼ |
| | | predicted_weight = model.predict(input_df)[0] |
| | | |
| | | # æ¾ç¤ºé¢æµç»æ |
| | | st.success(f"颿µç±³é: {predicted_weight:.4f} Kg/m") |
| | | |
| | | |
| | | except Exception as e: |
| | | st.error(f"颿µå¤±è´¥: {str(e)}") |
| | | else: |
| | | st.warning("请å
éæ©ä¸ä¸ªæ¨¡åã") |
| | | |
| | | # 模å管çåºå |
| | | if model_files: |
| | | with st.expander("ðï¸ æ¨¡å管ç", expanded=False): |
| | | st.write("管çå·²ä¿åçæ¨¡åæä»¶:") |
| | | |
| | | # æ¾ç¤ºæææ¨¡åæä»¶ |
| | | for model_file in model_files: |
| | | cols = st.columns([3, 1, 1]) |
| | | cols[0].write(model_file) |
| | | |
| | | # æ¥ç模åä¿¡æ¯æé® |
| | | if cols[1].button("æ¥ç", key=f"view_{model_file}", help="æ¥ç模åä¿¡æ¯"): |
| | | model_path = os.path.join(model_dir, model_file) |
| | | model_info = joblib.load(model_path) |
| | | st.write("模å详ç»ä¿¡æ¯:") |
| | | st.json({ |
| | | 'model_type': model_info['model_type'], |
| | | 'created_at': model_info['created_at'].strftime('%Y-%m-%d %H:%M:%S'), |
| | | 'r2_score': f"{model_info['r2_score']:.4f}", |
| | | 'mse': f"{model_info['mse']:.6f}", |
| | | 'mae': f"{model_info['mae']:.6f}", |
| | | 'rmse': f"{model_info['rmse']:.6f}", |
| | | 'features': model_info['features'], |
| | | 'use_steady_data': model_info.get('use_steady_data', False) |
| | | }) |
| | | |
| | | # å 餿¨¡åæé® |
| | | if cols[2].button("å é¤", key=f"delete_{model_file}", help="å 餿¨¡åæä»¶", type="primary"): |
| | | model_path = os.path.join(model_dir, model_file) |
| | | os.remove(model_path) |
| | | st.success(f"å·²å 餿¨¡å: {model_file}") |
| | | st.rerun() |
| | | |
| | | # 页é¢å
¥å£ |
| | | if __name__ == "__main__": |
| | | show_metered_weight_prediction() |
| | |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | import joblib |
| | | import os |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | from sklearn.linear_model import LinearRegression |
| | | from sklearn.model_selection import train_test_split |
| | | from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error |
| | | |
| | | |
| | | # 导å
¥ç¨³æè¯å«åè½ |
| | | class SteadyStateDetector: |
| | | def __init__(self): |
| | | pass |
| | | |
| | | def detect_steady_state(self, df, weight_col='ç±³é', window_size=20, std_threshold=0.5, duration_threshold=60): |
| | | """ |
| | | 稳æè¯å«é»è¾ï¼æ è®°ç±³éæ°æ®ä¸çç¨³ææ®µ |
| | | :param df: å
å«ç±³éæ°æ®çæ°æ®æ¡ |
| | | :param weight_col: ç±³éåå |
| | | :param window_size: æ»å¨çªå£å¤§å°ï¼ç§ï¼ |
| | | :param std_threshold: æ åå·®éå¼ |
| | | :param duration_threshold: 稳ææç»æ¶é´éå¼ï¼ç§ï¼ |
| | | :return: å
å«ç¨³ææ è®°çæ°æ®æ¡å稳æä¿¡æ¯ |
| | | """ |
| | | if df is None or df.empty: |
| | | return df, [] |
| | | |
| | | # ç¡®ä¿æ¶é´åæ¯datetimeç±»å |
| | | df['time'] = pd.to_datetime(df['time']) |
| | | |
| | | # è®¡ç®æ»å¨ç»è®¡é |
| | | df['rolling_std'] = df[weight_col].rolling(window=window_size, min_periods=5).std() |
| | | df['rolling_mean'] = df[weight_col].rolling(window=window_size, min_periods=5).mean() |
| | | |
| | | # è®¡ç®æ³¢å¨èå´ |
| | | df['fluctuation_range'] = (df['rolling_std'] / df['rolling_mean']) * 100 |
| | | df['fluctuation_range'] = df['fluctuation_range'].fillna(0) |
| | | |
| | | # æ 记稳æç¹ |
| | | df['is_steady'] = 0 |
| | | steady_condition = ( |
| | | (df['fluctuation_range'] < std_threshold) & |
| | | (df[weight_col] >= 0.1) |
| | | ) |
| | | df.loc[steady_condition, 'is_steady'] = 1 |
| | | |
| | | # è¯å«è¿ç»ç¨³ææ®µ |
| | | steady_segments = [] |
| | | current_segment = {} |
| | | |
| | | for i, row in df.iterrows(): |
| | | if row['is_steady'] == 1: |
| | | if not current_segment: |
| | | current_segment = { |
| | | 'start_time': row['time'], |
| | | 'start_idx': i, |
| | | 'weights': [row[weight_col]] |
| | | } |
| | | else: |
| | | current_segment['weights'].append(row[weight_col]) |
| | | else: |
| | | if current_segment: |
| | | current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time'] |
| | | current_segment['end_idx'] = i-1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | # 计ç®ç½®ä¿¡åº¦ |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | current_segment = {} |
| | | |
| | | # å¤çæåä¸ä¸ªç¨³ææ®µ |
| | | if current_segment: |
| | | current_segment['end_time'] = df['time'].iloc[-1] |
| | | current_segment['end_idx'] = len(df) - 1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | # 卿°æ®æ¡ä¸æ è®°å®æ´çç¨³ææ®µ |
| | | for segment in steady_segments: |
| | | df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1 |
| | | |
| | | return df, steady_segments |
| | | |
| | | |
| | | def show_metered_weight_regression(): |
| | |
| | | 'èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | | 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦' |
| | | ] |
| | | if 'mr_use_steady_data' not in st.session_state: |
| | | st.session_state['mr_use_steady_data'] = True |
| | | if 'mr_steady_window' not in st.session_state: |
| | | st.session_state['mr_steady_window'] = 20 |
| | | if 'mr_steady_threshold' not in st.session_state: |
| | | st.session_state['mr_steady_threshold'] = 0.5 |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | |
| | | st.session_state['mr_time_offset'] = time_offset |
| | | with offset_cols[2]: |
| | | st.write(f"å½ååç§»: {time_offset} åé") |
| | | |
| | | # 稳æè¯å«é
ç½® |
| | | st.markdown("---") |
| | | steady_cols = st.columns(3) |
| | | with steady_cols[0]: |
| | | st.write("âï¸ **稳æè¯å«é
ç½®**") |
| | | st.checkbox( |
| | | "ä»
使ç¨ç¨³ææ°æ®è¿è¡è®ç»", |
| | | value=st.session_state['mr_use_steady_data'], |
| | | key="mr_use_steady_data", |
| | | help="å¯ç¨åï¼åªä½¿ç¨ç±³éç¨³ææ¶æ®µçæ°æ®è¿è¡æ¨¡åè®ç»" |
| | | ) |
| | | |
| | | with steady_cols[1]: |
| | | st.write("ð **稳æåæ°**") |
| | | st.slider( |
| | | "æ»å¨çªå£å¤§å° (ç§)", |
| | | min_value=5, |
| | | max_value=60, |
| | | value=st.session_state['mr_steady_window'], |
| | | step=5, |
| | | key="mr_steady_window", |
| | | help="ç¨äºç¨³æè¯å«çæ»å¨çªå£å¤§å°" |
| | | ) |
| | | |
| | | with steady_cols[2]: |
| | | st.write("ð **稳æéå¼**") |
| | | st.slider( |
| | | "æ³¢å¨éå¼ (%)", |
| | | min_value=0.1, |
| | | max_value=2.0, |
| | | value=st.session_state['mr_steady_threshold'], |
| | | step=0.1, |
| | | key="mr_steady_threshold", |
| | | help="稳æè¯å«çæ³¢å¨èå´éå¼" |
| | | ) |
| | | |
| | | # ç¹å¾éæ© |
| | | st.markdown("---") |
| | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # 稳æè¯å« |
| | | steady_detector = SteadyStateDetector() |
| | | |
| | | # è·å稳æè¯å«åæ° |
| | | use_steady_data = st.session_state.get('mr_use_steady_data', True) |
| | | steady_window = st.session_state.get('mr_steady_window', 20) |
| | | steady_threshold = st.session_state.get('mr_steady_threshold', 0.5) |
| | | |
| | | # æ§è¡ç¨³æè¯å« |
| | | df_analysis_with_steady, steady_segments = steady_detector.detect_steady_state( |
| | | df_analysis, |
| | | weight_col='ç±³é', |
| | | window_size=steady_window, |
| | | std_threshold=steady_threshold |
| | | ) |
| | | |
| | | # æ´æ°df_analysis为å
å«ç¨³ææ è®°çæ°æ® |
| | | df_analysis = df_analysis_with_steady |
| | | |
| | | # ç¨³ææ°æ®å¯è§å |
| | | st.subheader("ð ç¨³ææ°æ®åå¸") |
| | | |
| | | # åå»ºç¨³ææ°æ®å¯è§åå¾è¡¨ |
| | | fig_steady = go.Figure() |
| | | |
| | | # æ·»å åå§ç±³éæ²çº¿ |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=df_analysis['time'], |
| | | y=df_analysis['ç±³é'], |
| | | name='åå§ç±³é', |
| | | mode='lines', |
| | | line=dict(color='lightgray', width=1) |
| | | )) |
| | | |
| | | # æ·»å ç¨³ææ°æ®ç¹ |
| | | steady_data_points = df_analysis[df_analysis['is_steady'] == 1] |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=steady_data_points['time'], |
| | | y=steady_data_points['ç±³é'], |
| | | name='稳æç±³é', |
| | | mode='markers', |
| | | marker=dict(color='green', size=3, opacity=0.6) |
| | | )) |
| | | |
| | | # æ·»å éç¨³ææ°æ®ç¹ |
| | | non_steady_data_points = df_analysis[df_analysis['is_steady'] == 0] |
| | | fig_steady.add_trace(go.Scatter( |
| | | x=non_steady_data_points['time'], |
| | | y=non_steady_data_points['ç±³é'], |
| | | name='é稳æç±³é', |
| | | mode='markers', |
| | | marker=dict(color='red', size=3, opacity=0.6) |
| | | )) |
| | | |
| | | # é
ç½®å¾è¡¨å¸å± |
| | | fig_steady.update_layout( |
| | | title="ç±³éæ°æ®ç¨³æåå¸", |
| | | xaxis=dict(title="æ¶é´"), |
| | | yaxis=dict(title="ç±³é (Kg/m)"), |
| | | legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1), |
| | | height=500 |
| | | ) |
| | | |
| | | # æ¾ç¤ºå¾è¡¨ |
| | | st.plotly_chart(fig_steady, use_container_width=True) |
| | | |
| | | # æ¾ç¤ºç¨³æç»è®¡ |
| | | total_data = len(df_analysis) |
| | | steady_data = len(df_analysis[df_analysis['is_steady'] == 1]) |
| | | steady_ratio = (steady_data / total_data * 100) if total_data > 0 else 0 |
| | | |
| | | stats_cols = st.columns(3) |
| | | stats_cols[0].metric("æ»æ°æ®é", total_data) |
| | | stats_cols[1].metric("ç¨³ææ°æ®é", steady_data) |
| | | stats_cols[2].metric("ç¨³ææ°æ®æ¯ä¾", f"{steady_ratio:.1f}%") |
| | | |
| | | # --- åå§æ°æ®è¶å¿å¾ --- |
| | | st.subheader("ð åå§æ°æ®è¶å¿å¾") |
| | | |
| | |
| | | st.warning(f"æ°æ®ä¸ç¼ºå°ä»¥ä¸ç¹å¾: {', '.join(missing_features)}") |
| | | else: |
| | | # å夿°æ® |
| | | X = df_analysis[st.session_state['mr_selected_features']] |
| | | y = df_analysis['ç±³é'] |
| | | # æ ¹æ®é
ç½®å³å®æ¯å¦åªä½¿ç¨ç¨³ææ°æ® |
| | | use_steady_data = st.session_state.get('mr_use_steady_data', True) |
| | | if use_steady_data: |
| | | df_filtered = df_analysis[df_analysis['is_steady'] == 1] |
| | | st.info(f"å·²è¿æ»¤éç¨³ææ°æ®ï¼ä½¿ç¨ {len(df_filtered)} æ¡ç¨³ææ°æ®è¿è¡è®ç»") |
| | | else: |
| | | df_filtered = df_analysis.copy() |
| | | |
| | | X = df_filtered[st.session_state['mr_selected_features']] |
| | | y = df_filtered['ç±³é'] |
| | | |
| | | # æ¸
çæ°æ®ä¸çNaNå¼ |
| | | combined = pd.concat([X, y], axis=1) |
| | |
| | | }) |
| | | st.dataframe(coef_df, use_container_width=True) |
| | | |
| | | # --- 颿µåè½ --- |
| | | st.subheader("ð® ç±³é颿µ") |
| | | # --- 模åä¿ååè½ --- |
| | | st.subheader("ð¾ æ¨¡åä¿å") |
| | | |
| | | # åå»ºé¢æµè¡¨å |
| | | st.write("è¾å
¥ç¹å¾å¼è¿è¡ç±³é颿µ:") |
| | | predict_cols = st.columns(2) |
| | | input_features = {} |
| | | |
| | | for i, feature in enumerate(st.session_state['mr_selected_features']): |
| | | with predict_cols[i % 2]: |
| | | # è·åç¹å¾çç»è®¡ä¿¡æ¯ |
| | | min_val = df_analysis[feature].min() |
| | | max_val = df_analysis[feature].max() |
| | | mean_val = df_analysis[feature].mean() |
| | | |
| | | input_features[feature] = st.number_input( |
| | | f"{feature}", |
| | | key=f"pred_{feature}", |
| | | value=float(mean_val), |
| | | min_value=float(min_val), |
| | | max_value=float(max_val), |
| | | step=0.1 |
| | | # å建模åä¿å表å |
| | | st.write("ä¿åè®ç»å¥½ç模åæé:") |
| | | model_name = st.text_input( |
| | | "模ååç§°", |
| | | value=f"linear_regression_{datetime.now().strftime('%Y%m%d_%H%M%S')}", |
| | | help="请è¾å
¥æ¨¡ååç§°ï¼æ¨¡åå°ä¿å为该åç§°ç.joblibæä»¶" |
| | | ) |
| | | |
| | | if st.button("颿µç±³é"): |
| | | # åå¤é¢æµæ°æ® |
| | | input_data = [[input_features[feature] for feature in st.session_state['mr_selected_features']]] |
| | | # 颿µ |
| | | predicted_weight = model.predict(input_data)[0] |
| | | # æ¾ç¤ºé¢æµç»æ |
| | | st.success(f"颿µç±³é: {predicted_weight:.4f} Kg/m") |
| | | if st.button("ä¿å模å"): |
| | | # ç¡®ä¿æ¨¡åç®å½åå¨ |
| | | model_dir = "saved_models" |
| | | os.makedirs(model_dir, exist_ok=True) |
| | | |
| | | # ä¿å模å |
| | | model_path = os.path.join(model_dir, f"{model_name}.joblib") |
| | | try: |
| | | # ä¿å模åæéåç¸å
³ä¿¡æ¯ |
| | | model_info = { |
| | | 'model': model, |
| | | 'features': st.session_state['mr_selected_features'], |
| | | 'scaler': None, # 线æ§åå½ä¸éè¦æ åå¨ |
| | | 'model_type': 'linear_regression', |
| | | 'created_at': datetime.now(), |
| | | 'r2_score': r2, |
| | | 'mse': mse, |
| | | 'mae': mae, |
| | | 'rmse': rmse, |
| | | 'use_steady_data': use_steady_data |
| | | } |
| | | joblib.dump(model_info, model_path) |
| | | st.success(f"模åå·²æåä¿åå°: {model_path}") |
| | | except Exception as e: |
| | | st.error(f"模åä¿å失败: {e}") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.data_processing_service import DataProcessingService |
| | | |
| | | class SteadyStateDetector: |
| | | def __init__(self): |
| | | self.data_processor = DataProcessingService() |
| | | |
| | | def preprocess_data(self, df, weight_col='metered_weight', window_size=20): |
| | | """ |
| | | æ°æ®é¢å¤çï¼ä»
å¤çç¼ºå¤±å¼ |
| | | :param df: åå§æ°æ®æ¡ |
| | | :param weight_col: ç±³éåå |
| | | :param window_size: æ»å¨çªå£å¤§å° |
| | | :return: é¢å¤çåçæ°æ®æ¡ |
| | | """ |
| | | if df is None or df.empty: |
| | | return df |
| | | |
| | | # å¤å¶æ°æ®é¿å
ä¿®æ¹åå§æ°æ® |
| | | df_processed = df.copy() |
| | | |
| | | # å¤çç¼ºå¤±å¼ |
| | | df_processed[weight_col] = df_processed[weight_col].ffill().bfill() |
| | | |
| | | # ç´æ¥ä½¿ç¨åå§æ°æ®ï¼ä¸è¿è¡å¼å¸¸å¼æ¿æ¢åå¹³æ»å¤ç |
| | | df_processed['smoothed_weight'] = df_processed[weight_col] |
| | | |
| | | # 计ç®ç§»å¨æ åå·® |
| | | df_processed['rolling_std'] = df_processed[weight_col].rolling(window=window_size, min_periods=1).std() |
| | | df_processed['rolling_mean'] = df_processed[weight_col].rolling(window=window_size, min_periods=1).mean() |
| | | |
| | | return df_processed |
| | | |
| | | def detect_steady_state(self, df, weight_col='smoothed_weight', window_size=20, std_threshold=0.5, duration_threshold=60): |
| | | """ |
| | | 稳æè¯å«é»è¾ |
| | | :param df: é¢å¤çåçæ°æ®æ¡ |
| | | :param weight_col: ç±³éååï¼å·²å¹³æ»ï¼ |
| | | :param window_size: æ»å¨çªå£å¤§å°ï¼ç§ï¼ |
| | | :param std_threshold: æ åå·®éå¼ |
| | | :param duration_threshold: 稳ææç»æ¶é´éå¼ï¼ç§ï¼ |
| | | :return: å
å«ç¨³ææ è®°çæ°æ®æ¡å稳æä¿¡æ¯ |
| | | """ |
| | | if df is None or df.empty: |
| | | return df, [] |
| | | |
| | | # ç¡®ä¿æ¶é´åæ¯datetimeç±»å |
| | | df['time'] = pd.to_datetime(df['time']) |
| | | |
| | | # è®¡ç®æ¶é´å·®ï¼ç§ï¼ |
| | | df['time_diff'] = df['time'].diff().dt.total_seconds().fillna(0) |
| | | |
| | | # åå§åç¨³ææ è®° |
| | | df['is_steady'] = 0 |
| | | |
| | | # è®¡ç®æ¯ä¸ªçªå£çç»è®¡ç¹å¾ |
| | | df['window_std'] = df['smoothed_weight'].rolling(window=window_size, min_periods=5).std() |
| | | df['window_mean'] = df['smoothed_weight'].rolling(window=window_size, min_periods=5).mean() |
| | | |
| | | # è®¡ç®æ³¢å¨èå´ï¼ç¸å¯¹äºåå¼çç¾åæ¯ï¼ |
| | | df['fluctuation_range'] = (df['window_std'] / df['window_mean']) * 100 |
| | | df['fluctuation_range'] = df['fluctuation_range'].fillna(0) |
| | | |
| | | # 忥æ 记稳æç¹ - æé¤ç±³éå°äº0.1kg/mçæ°æ® |
| | | df.loc[(df['fluctuation_range'] < std_threshold) & (df['smoothed_weight'] >= 0.1), 'is_steady'] = 1 |
| | | |
| | | # ç»è®¡è¿ç»ç¨³ææ®µ |
| | | steady_segments = [] |
| | | current_segment = {} |
| | | |
| | | for i, row in df.iterrows(): |
| | | if row['is_steady'] == 1: |
| | | if not current_segment: |
| | | # æ°çç¨³ææ®µå¼å§ |
| | | current_segment = { |
| | | 'start_time': row['time'], |
| | | 'start_idx': i, |
| | | 'weights': [row['smoothed_weight']] |
| | | } |
| | | else: |
| | | # ç»§ç»å½åç¨³ææ®µ |
| | | current_segment['weights'].append(row['smoothed_weight']) |
| | | else: |
| | | if current_segment: |
| | | # ç¨³ææ®µç»æï¼è®¡ç®æç»æ¶é´ |
| | | current_segment['end_time'] = df.loc[i-1, 'time'] if i > 0 else df.loc[i, 'time'] |
| | | current_segment['end_idx'] = i-1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | # 计ç®ç¨³ææ®µçç»è®¡ææ |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | # 计ç®ç½®ä¿¡åº¦ï¼åºäºæ³¢å¨èå´åæç»æ¶é´ï¼ |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) # 置信度èå´50-100 |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | # éç½®å½åç¨³ææ®µ |
| | | current_segment = {} |
| | | |
| | | # å¤çæåä¸ä¸ªç¨³ææ®µ |
| | | if current_segment: |
| | | current_segment['end_time'] = df['time'].iloc[-1] |
| | | current_segment['end_idx'] = len(df) - 1 |
| | | duration = (current_segment['end_time'] - current_segment['start_time']).total_seconds() |
| | | |
| | | if duration >= duration_threshold: |
| | | weights_array = np.array(current_segment['weights']) |
| | | current_segment['duration'] = duration |
| | | current_segment['mean_weight'] = np.mean(weights_array) |
| | | current_segment['std_weight'] = np.std(weights_array) |
| | | current_segment['min_weight'] = np.min(weights_array) |
| | | current_segment['max_weight'] = np.max(weights_array) |
| | | current_segment['fluctuation_range'] = (current_segment['std_weight'] / current_segment['mean_weight']) * 100 |
| | | |
| | | confidence = 100 - (current_segment['fluctuation_range'] / std_threshold) * 50 |
| | | confidence = max(50, min(100, confidence)) |
| | | current_segment['confidence'] = confidence |
| | | |
| | | steady_segments.append(current_segment) |
| | | |
| | | # 卿°æ®æ¡ä¸æ è®°ç¨³ææ®µ |
| | | for segment in steady_segments: |
| | | df.loc[segment['start_idx']:segment['end_idx'], 'is_steady'] = 1 |
| | | |
| | | return df, steady_segments |
| | | |
| | | def get_steady_state_metrics(self, steady_segments): |
| | | """ |
| | | 计ç®ç¨³æè¯å«çéåææ |
| | | :param steady_segments: ç¨³ææ®µå表 |
| | | :return: 稳æç»è®¡ææ åå
¸ |
| | | """ |
| | | if not steady_segments: |
| | | return {} |
| | | |
| | | # 计ç®å¹³å稳ææç»æ¶é´ |
| | | avg_duration = np.mean([seg['duration'] for seg in steady_segments]) |
| | | |
| | | # 计ç®å¹³åæ³¢å¨èå´ |
| | | avg_fluctuation = np.mean([seg['fluctuation_range'] for seg in steady_segments]) |
| | | |
| | | # 计ç®å¹³å置信度 |
| | | avg_confidence = np.mean([seg['confidence'] for seg in steady_segments]) |
| | | |
| | | # 计ç®ç¨³ææ»æ¶é¿ |
| | | total_steady_duration = sum([seg['duration'] for seg in steady_segments]) |
| | | |
| | | return { |
| | | 'total_steady_segments': len(steady_segments), |
| | | 'average_steady_duration': avg_duration, |
| | | 'average_fluctuation_range': avg_fluctuation, |
| | | 'average_confidence': avg_confidence, |
| | | 'total_steady_duration': total_steady_duration |
| | | } |
| | | |
| | | def show_metered_weight_steady_state(): |
| | | # åå§åæå¡åæ£æµå¨ |
| | | extruder_service = ExtruderService() |
| | | steady_state_detector = SteadyStateDetector() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³é稳æè¯å«åæ") |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'ss_start_date' not in st.session_state: |
| | | st.session_state['ss_start_date'] = datetime.now().date() - timedelta(days=1) |
| | | if 'ss_end_date' not in st.session_state: |
| | | st.session_state['ss_end_date'] = datetime.now().date() |
| | | if 'ss_quick_select' not in st.session_state: |
| | | st.session_state['ss_quick_select'] = "æè¿24å°æ¶" |
| | | if 'ss_window_size' not in st.session_state: |
| | | st.session_state['ss_window_size'] = 20 |
| | | if 'ss_std_threshold' not in st.session_state: |
| | | st.session_state['ss_std_threshold'] = 1.5 |
| | | if 'ss_duration_threshold' not in st.session_state: |
| | | st.session_state['ss_duration_threshold'] = 60 |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['ss_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['ss_start_date'] = today |
| | | st.session_state['ss_end_date'] = today |
| | | elif qs == "æè¿24å°æ¶": |
| | | st.session_state['ss_start_date'] = today - timedelta(days=1) |
| | | st.session_state['ss_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['ss_start_date'] = today - timedelta(days=7) |
| | | st.session_state['ss_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['ss_start_date'] = today - timedelta(days=30) |
| | | st.session_state['ss_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['ss_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿24å°æ¶", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | button_type = "primary" if st.session_state['ss_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_ss_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ss_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ss_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="ss_query", width='stretch') |
| | | |
| | | # 稳æåæ°é
ç½® |
| | | st.markdown("---") |
| | | param_cols = st.columns(3) |
| | | |
| | | with param_cols[0]: |
| | | st.write("âï¸ **稳æåæ°é
ç½®**") |
| | | window_size = st.slider( |
| | | "æ»å¨çªå£å¤§å° (ç§)", |
| | | min_value=5, |
| | | max_value=60, |
| | | value=st.session_state['ss_window_size'], |
| | | step=5, |
| | | key="ss_window_size", |
| | | help="ç¨äºå¹³æ»æ°æ®å计ç®ç»è®¡ç¹å¾çæ»å¨çªå£å¤§å°" |
| | | ) |
| | | |
| | | with param_cols[1]: |
| | | st.write("ð **æ³¢å¨éå¼é
ç½®**") |
| | | std_threshold = st.slider( |
| | | "æ åå·®éå¼", |
| | | min_value=0.1, |
| | | max_value=2.0, |
| | | value=st.session_state['ss_std_threshold'], |
| | | step=0.1, |
| | | key="ss_std_threshold", |
| | | help="ç±³éæ³¢å¨çæ åå·®éå¼ï¼ä½äºæ¤å¼è§ä¸ºç¨³æ" |
| | | ) |
| | | |
| | | with param_cols[2]: |
| | | st.write("â±ï¸ **æç»æ¶é´é
ç½®**") |
| | | duration_threshold = st.slider( |
| | | "稳ææç»æ¶é´ (ç§)", |
| | | min_value=30, |
| | | max_value=300, |
| | | value=st.session_state['ss_duration_threshold'], |
| | | step=10, |
| | | key="ss_duration_threshold", |
| | | help="稳ææç»çæå°æ¶é´ï¼ä½äºæ¤å¼ä¸è§ä¸ºç¨³ææ®µ" |
| | | ) |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç |
| | | if query_button: |
| | | with st.spinner("æ£å¨è·åæ°æ®..."): |
| | | # è·åæ¤åºæºæ°æ® |
| | | df_extruder = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | if df_extruder is None or df_extruder.empty: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # ç¼åæ°æ®å°ä¼è¯ç¶æ |
| | | st.session_state['cached_extruder_ss'] = df_extruder |
| | | st.session_state['last_query_start_ss'] = start_dt |
| | | st.session_state['last_query_end_ss'] = end_dt |
| | | |
| | | # æ°æ®å¤çååæ |
| | | if 'cached_extruder_ss' in st.session_state: |
| | | with st.spinner("æ£å¨åææ°æ®..."): |
| | | # è·åç¼åæ°æ® |
| | | df_extruder = st.session_state['cached_extruder_ss'] |
| | | |
| | | # æ°æ®é¢å¤ç |
| | | df_processed = steady_state_detector.preprocess_data(df_extruder, window_size=st.session_state['ss_window_size']) |
| | | |
| | | # 稳æè¯å« |
| | | df_with_steady, steady_segments = steady_state_detector.detect_steady_state( |
| | | df_processed, |
| | | window_size=st.session_state['ss_window_size'], |
| | | std_threshold=st.session_state['ss_std_threshold'], |
| | | duration_threshold=st.session_state['ss_duration_threshold'] |
| | | ) |
| | | |
| | | # 计ç®ç¨³æææ |
| | | steady_metrics = steady_state_detector.get_steady_state_metrics(steady_segments) |
| | | |
| | | # æ°æ®ç±»åæ£æ¥åè½¬æ¢ |
| | | df_with_steady['time'] = pd.to_datetime(df_with_steady['time']) |
| | | df_with_steady['metered_weight'] = pd.to_numeric(df_with_steady['metered_weight'], errors='coerce') |
| | | df_with_steady['smoothed_weight'] = pd.to_numeric(df_with_steady['smoothed_weight'], errors='coerce') |
| | | |
| | | # å»é¤å¯è½åå¨çNaNå¼ |
| | | df_with_steady = df_with_steady.dropna(subset=['time', 'metered_weight', 'smoothed_weight']) |
| | | |
| | | # æ°æ®å¯è§ååºå |
| | | st.subheader("ð ç±³é稳æè¯å«ç»æ") |
| | | |
| | | # å建å¾è¡¨ |
| | | fig = go.Figure() |
| | | |
| | | # æ·»å åå§ç±³éæ²çº¿ |
| | | fig.add_trace(go.Scatter( |
| | | x=df_with_steady['time'], |
| | | y=df_with_steady['metered_weight'], |
| | | name='åå§ç±³é', |
| | | mode='lines', |
| | | opacity=0.6, |
| | | line=dict(color='lightgray', width=1) |
| | | )) |
| | | |
| | | # æ·»å å¹³æ»ç±³éæ²çº¿ |
| | | fig.add_trace(go.Scatter( |
| | | x=df_with_steady['time'], |
| | | y=df_with_steady['smoothed_weight'], |
| | | name='å¹³æ»ç±³é', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | |
| | | # æ 记稳æåºå |
| | | for segment in steady_segments: |
| | | fig.add_shape( |
| | | type="rect", |
| | | x0=segment['start_time'], |
| | | y0=segment['min_weight'] * 0.95, |
| | | x1=segment['end_time'], |
| | | y1=segment['max_weight'] * 1.05, |
| | | fillcolor="rgba(0, 255, 0, 0.2)", |
| | | line=dict(color="rgba(0, 200, 0, 0.5)", width=1), |
| | | name="稳æåºå" |
| | | ) |
| | | |
| | | # é
ç½®å¾è¡¨å¸å± |
| | | fig.update_layout( |
| | | title="ç±³é稳æè¯å«ç»æ", |
| | | xaxis=dict(title="æ¶é´"), |
| | | yaxis=dict(title="ç±³é (Kg/m)"), |
| | | legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1), |
| | | height=600 |
| | | ) |
| | | |
| | | # æ¾ç¤ºå¾è¡¨ |
| | | st.plotly_chart(fig, use_container_width=True) |
| | | |
| | | # 稳æç»è®¡ææ |
| | | st.subheader("ð 稳æç»è®¡ææ ") |
| | | metrics_cols = st.columns(5) |
| | | |
| | | with metrics_cols[0]: |
| | | st.metric( |
| | | "ç¨³ææ®µæ»æ°", |
| | | steady_metrics.get('total_steady_segments', 0), |
| | | help="è¯å«å°çç¨³ææ®µæ°é" |
| | | ) |
| | | |
| | | with metrics_cols[1]: |
| | | st.metric( |
| | | "å¹³åç¨³ææ¶é¿", |
| | | f"{steady_metrics.get('average_steady_duration', 0):.2f} ç§", |
| | | help="ææç¨³ææ®µçå¹³åæç»æ¶é´" |
| | | ) |
| | | |
| | | with metrics_cols[2]: |
| | | st.metric( |
| | | "平忳¢å¨èå´", |
| | | f"{steady_metrics.get('average_fluctuation_range', 0):.2f}%", |
| | | help="ç¨³ææ®µå
ç±³éç平忳¢å¨èå´ï¼ç¸å¯¹äºåå¼çç¾åæ¯ï¼" |
| | | ) |
| | | |
| | | with metrics_cols[3]: |
| | | st.metric( |
| | | "å¹³å置信度", |
| | | f"{steady_metrics.get('average_confidence', 0):.1f}%", |
| | | help="稳æè¯å«ç»æçå¹³å置信度" |
| | | ) |
| | | |
| | | with metrics_cols[4]: |
| | | st.metric( |
| | | "æ»ç¨³ææ¶é¿", |
| | | f"{steady_metrics.get('total_steady_duration', 0)/60:.2f} åé", |
| | | help="ææç¨³ææ®µçæ»æç»æ¶é´" |
| | | ) |
| | | |
| | | # ç¨³ææ®µè¯¦æ
è¡¨æ ¼ |
| | | st.subheader("ð ç¨³ææ®µè¯¦æ
") |
| | | if steady_segments: |
| | | steady_df = pd.DataFrame(steady_segments) |
| | | |
| | | # éæ©è¦æ¾ç¤ºçå |
| | | display_cols = ['start_time', 'end_time', 'duration', 'mean_weight', 'std_weight', 'fluctuation_range', 'confidence'] |
| | | steady_df_display = steady_df[display_cols].copy() |
| | | |
| | | # æ ¼å¼åæ¾ç¤º |
| | | steady_df_display['duration'] = steady_df_display['duration'].apply(lambda x: f"{x:.1f} ç§") |
| | | steady_df_display['mean_weight'] = steady_df_display['mean_weight'].apply(lambda x: f"{x:.4f} Kg/m") |
| | | steady_df_display['std_weight'] = steady_df_display['std_weight'].apply(lambda x: f"{x:.4f} Kg/m") |
| | | steady_df_display['fluctuation_range'] = steady_df_display['fluctuation_range'].apply(lambda x: f"{x:.2f}%") |
| | | steady_df_display['confidence'] = steady_df_display['confidence'].apply(lambda x: f"{x:.1f}%") |
| | | |
| | | st.dataframe(steady_df_display, use_container_width=True) |
| | | |
| | | # 导åºç¨³æè¯å«ç»æ |
| | | st.subheader("ð¾ å¯¼åºæ°æ®") |
| | | |
| | | # åå¤å¯¼åºæ°æ® |
| | | export_df = df_with_steady[['time', 'metered_weight', 'smoothed_weight', 'is_steady']].copy() |
| | | export_csv = export_df.to_csv(index=False) |
| | | |
| | | # å建ä¸è½½æé® |
| | | st.download_button( |
| | | label="导åºç¨³æè¯å«ç»æ (CSV)", |
| | | data=export_csv, |
| | | file_name=f"metered_weight_steady_state_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv", |
| | | mime="text/csv", |
| | | help="ç¹å»æé®å¯¼åºç±³é稳æè¯å«ç»ææ°æ®" |
| | | ) |
| | | else: |
| | | st.info("æªè¯å«å°ä»»ä½ç¨³ææ®µï¼è¯·å°è¯è°æ´ç¨³æåæ°é
ç½®ã") |
| | | |
| | | # æ°æ®é¢è§ |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | st.dataframe(df_with_steady[['time', 'metered_weight', 'smoothed_weight', 'is_steady', 'fluctuation_range']].head(20), use_container_width=True) |
| | | else: |
| | | # æç¤ºç¨æ·ç¹å»å¼å§åææé® |
| | | st.info("è¯·éæ©æ¶é´èå´å¹¶ç¹å»'å¼å§åæ'æé®è·åæ°æ®ã") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import pandas as pd |
| | | import numpy as np |
| | | |
| | | # å°è¯å¯¼å
¥torchï¼å¦æå¤±è´¥åæ 记为ä¸å¯ç¨ |
| | | try: |
| | | import torch |
| | | except ImportError: |
| | | torch = None |
| | | |
| | | class ParameterAdjustmentAdvisor: |
| | | """ |
| | | æ¤åºæºåæ°è°èå»ºè®®å¨ |
| | | æ ¹æ®å®æ¶ç±³é䏿 åç±³éçåå·®ï¼ç»åºèºæè½¬éåæµç¨ä¸»éçè°æ´å»ºè®® |
| | | """ |
| | | |
| | | def __init__(self): |
| | | # åå§åé»è®¤åæ°å
³ç³»ç³»æ° |
| | | # è¿äºç³»æ°è¡¨ç¤ºç±³éåå·®1%æ¶ï¼åæ°éè¦è°æ´çç¾åæ¯ |
| | | # å¯ä»¥æ ¹æ®å®é
çäº§æ°æ®è¿è¡ä¼å |
| | | self.default_coefficients = { |
| | | 'screw_speed': 0.1, # ç±³éåå·®1%ï¼èºæè½¬éè°æ´0.3%ï¼éä½è°æ´å¹
åº¦ï¼ |
| | | 'process_main_speed': -0.1 # ç±³éåå·®1%ï¼æµç¨ä¸»éè°æ´-0.2%ï¼éä½è°æ´å¹
åº¦ï¼ |
| | | } |
| | | |
| | | # é»è®¤åæ°ä¸ä¸éï¼å¯æ ¹æ®å®é
è®¾å¤æ
åµè°æ´ï¼ |
| | | self.default_limits = { |
| | | 'screw_speed': {'min': 30, 'max': 500}, |
| | | 'process_main_speed': {'min': 0, 'max': 200} |
| | | } |
| | | |
| | | # æå¤§è°æ´å¹
度éå¶ï¼ç¾åæ¯ï¼ |
| | | self.max_adjustment_percentage = { |
| | | 'screw_speed': 15.0, # èºæè½¬é忬¡æå¤§è°æ´15% |
| | | 'process_main_speed': 10.0 # æµç¨ä¸»é忬¡æå¤§è°æ´10% |
| | | } |
| | | |
| | | def calculate_adjustment(self, real_time_weight, standard_weight, upper_limit, lower_limit, |
| | | current_screw_speed, current_process_speed, |
| | | current_screw_temperature=None, current_rear_barrel_temperature=None, |
| | | current_front_barrel_temperature=None, current_head_temperature=None, |
| | | coefficients=None, limits=None): |
| | | """ |
| | | 计ç®åæ°è°æ´å»ºè®® |
| | | |
| | | :param real_time_weight: 宿¶ç±³é (Kg/m) |
| | | :param standard_weight: æ åç±³é (Kg/m) |
| | | :param upper_limit: ç±³éä¸é (Kg/m) |
| | | :param lower_limit: ç±³éä¸é (Kg/m) |
| | | :param current_screw_speed: å½åèºæè½¬é (rpm) |
| | | :param current_process_speed: å½åæµç¨ä¸»é (m/min) |
| | | :param current_screw_temperature: å½åèºææ¸©åº¦ (°C) |
| | | :param current_rear_barrel_temperature: å½ååæºçæ¸©åº¦ (°C) |
| | | :param current_front_barrel_temperature: å½ååæºçæ¸©åº¦ (°C) |
| | | :param current_head_temperature: å½åæºå¤´æ¸©åº¦ (°C) |
| | | :param coefficients: èªå®ä¹åæ°å
³ç³»ç³»æ° (å¯é) |
| | | :param limits: èªå®ä¹åæ°ä¸ä¸é (å¯é) |
| | | :return: è°æ´å»ºè®®åå
¸ |
| | | """ |
| | | # 使ç¨é»è®¤ç³»æ°æèªå®ä¹ç³»æ° |
| | | coeffs = coefficients if coefficients else self.default_coefficients |
| | | |
| | | # 使ç¨é»è®¤ä¸ä¸éæèªå®ä¹ä¸ä¸é |
| | | param_limits = limits if limits else self.default_limits |
| | | |
| | | # 计ç®ç±³éåå·® |
| | | weight_deviation = real_time_weight - standard_weight |
| | | deviation_percentage = (weight_deviation / standard_weight) * 100 if standard_weight != 0 else 0 |
| | | |
| | | # ç¡®å®ç±³éç¶æ |
| | | if real_time_weight > upper_limit: |
| | | status = "è¶
ä¸é" |
| | | elif real_time_weight < lower_limit: |
| | | status = "è¶
ä¸é" |
| | | else: |
| | | status = "æ£å¸¸èå´" |
| | | |
| | | # è®¡ç®æ¸©åº¦å½±åå å - èèæ¸©åº¦å¯¹æ¤åºè¿ç¨çå½±å |
| | | # 温度è¶é«ï¼ç©ææµå¨æ§è¶å¥½ï¼ç¸åèºæè½¬é䏿¤åºéè¶å¤§ |
| | | temperature_factor = 1.0 |
| | | |
| | | # 妿æä¾äºæ¸©åº¦åæ°ï¼è®¡ç®æ¸©åº¦å½±åå å |
| | | if current_screw_temperature is not None and current_head_temperature is not None: |
| | | # 计ç®å¹³å温度ä¸åèæ¸©åº¦çåå·® |
| | | reference_temperature = 80.0 # åèæ¸©åº¦ï¼å¯æ ¹æ®å®é
å·¥èºè°æ´ |
| | | avg_temperature = (current_screw_temperature + current_head_temperature) / 2 |
| | | temperature_deviation = avg_temperature - reference_temperature |
| | | |
| | | # 温度åå·®æ¯åå10°Cï¼å½±åå ååå5% |
| | | temperature_factor = 1.0 + (temperature_deviation / 10.0) * 0.05 |
| | | |
| | | # éå¶æ¸©åº¦å½±åå åçèå´ |
| | | temperature_factor = max(0.8, min(1.2, temperature_factor)) |
| | | |
| | | # 计ç®è°æ´é - èèæ¸©åº¦å½±åå å |
| | | # ç±³éåå·® * ç³»æ° * 温度å å = è°æ´ç¾åæ¯ |
| | | screw_speed_adjustment_percent = -deviation_percentage * coeffs['screw_speed'] * temperature_factor |
| | | process_speed_adjustment_percent = -deviation_percentage * coeffs['process_main_speed'] * temperature_factor |
| | | |
| | | # éå¶è°æ´å¹
度ï¼é¿å
è°æ´éè¿å¤§ |
| | | screw_speed_adjustment_percent = max( |
| | | -self.max_adjustment_percentage['screw_speed'], |
| | | min(self.max_adjustment_percentage['screw_speed'], |
| | | screw_speed_adjustment_percent) |
| | | ) |
| | | |
| | | process_speed_adjustment_percent = max( |
| | | -self.max_adjustment_percentage['process_main_speed'], |
| | | min(self.max_adjustment_percentage['process_main_speed'], |
| | | process_speed_adjustment_percent) |
| | | ) |
| | | |
| | | # 计ç®å®é
è°æ´é |
| | | screw_speed_adjustment = (screw_speed_adjustment_percent / 100) * current_screw_speed |
| | | process_speed_adjustment = (process_speed_adjustment_percent / 100) * current_process_speed |
| | | |
| | | # 计ç®è°æ´åçåæ°å¼ |
| | | new_screw_speed = current_screw_speed + screw_speed_adjustment |
| | | new_process_speed = current_process_speed + process_speed_adjustment |
| | | |
| | | # ç¡®ä¿åæ°å¨åçèå´å
|
| | | new_screw_speed = max(param_limits['screw_speed']['min'], |
| | | min(param_limits['screw_speed']['max'], new_screw_speed)) |
| | | |
| | | new_process_speed = max(param_limits['process_main_speed']['min'], |
| | | min(param_limits['process_main_speed']['max'], new_process_speed)) |
| | | |
| | | # éæ°è®¡ç®å®é
è°æ´ç¾åæ¯ï¼èèäºæå¤§è°æ´å¹
度ååæ°ä¸ä¸éï¼ |
| | | screw_speed_adjust_percent = ((new_screw_speed - current_screw_speed) / current_screw_speed) * 100 if current_screw_speed != 0 else 0 |
| | | process_speed_adjust_percent = ((new_process_speed - current_process_speed) / current_process_speed) * 100 if current_process_speed != 0 else 0 |
| | | |
| | | # çæè°æ´å»ºè®® - æ è®ºç±³éæ¯å¦å¨èå´å
ï¼é½æä¾è°æ´å»ºè®®ï¼ç®æ æ¯è®©å®æ¶ç±³éå°½å¯è½æ¥è¿æ åç±³é |
| | | recommendation = self._generate_recommendation( |
| | | deviation_percentage, |
| | | screw_speed_adjust_percent, |
| | | new_screw_speed, |
| | | process_speed_adjust_percent, |
| | | new_process_speed |
| | | ) |
| | | |
| | | return { |
| | | 'status': status, |
| | | 'real_time_weight': real_time_weight, |
| | | 'standard_weight': standard_weight, |
| | | 'upper_limit': upper_limit, |
| | | 'lower_limit': lower_limit, |
| | | 'deviation': weight_deviation, |
| | | 'deviation_percentage': deviation_percentage, |
| | | 'current_screw_speed': current_screw_speed, |
| | | 'current_process_speed': current_process_speed, |
| | | 'new_screw_speed': new_screw_speed, |
| | | 'new_process_speed': new_process_speed, |
| | | 'screw_speed_adjustment': screw_speed_adjustment, |
| | | 'process_speed_adjustment': process_speed_adjustment, |
| | | 'screw_speed_adjust_percent': screw_speed_adjust_percent, |
| | | 'process_speed_adjust_percent': process_speed_adjust_percent, |
| | | 'recommendation': recommendation |
| | | } |
| | | |
| | | def _generate_recommendation(self, deviation_percentage, screw_speed_adjust_percent, |
| | | new_screw_speed, process_speed_adjust_percent, new_process_speed): |
| | | """ |
| | | çæè°æ´å»ºè®®ææ¬ |
| | | |
| | | :param deviation_percentage: ç±³éåå·®ç¾åæ¯ |
| | | :param screw_speed_adjust_percent: èºæè½¬éè°æ´ç¾åæ¯ |
| | | :param new_screw_speed: è°æ´åçèºæè½¬é |
| | | :param process_speed_adjust_percent: æµç¨ä¸»éè°æ´ç¾åæ¯ |
| | | :param new_process_speed: è°æ´åçæµç¨ä¸»é |
| | | :return: è°æ´å»ºè®®ææ¬ |
| | | """ |
| | | # æ ¹æ®åå·®æ¹åå大å°çæå»ºè®® |
| | | abs_deviation = abs(deviation_percentage) |
| | | |
| | | if abs_deviation < 0.5: |
| | | # åå·®å¾å°ï¼æ¥è¿æ åå¼ |
| | | return f"ç±³éåå·®å¾å° ({abs_deviation:.2f}%)ï¼æ¥è¿æ åå¼ã\n" \ |
| | | f"建议微è°ä»¥è¿ä¸æ¥æ¥è¿æ åå¼ï¼\n" \ |
| | | f"1. å°èºæè½¬éè°æ´è³ {new_screw_speed:.1f} rpm " \ |
| | | f"( {'éä½' if screw_speed_adjust_percent < 0 else 'æé«'} {abs(screw_speed_adjust_percent):.2f}% )\n" \ |
| | | f"2. å°æµç¨ä¸»éè°æ´è³ {new_process_speed:.1f} m/min " \ |
| | | f"( {'éä½' if process_speed_adjust_percent < 0 else 'æé«'} {abs(process_speed_adjust_percent):.2f}% )" |
| | | elif deviation_percentage > 0: |
| | | # ç±³éåé«ï¼éè¦éä½ç±³é |
| | | return f"ç±³éåå·® {abs_deviation:.2f}%ï¼åé«ï¼ï¼å»ºè®®è°æ´ï¼\n" \ |
| | | f"1. å°èºæè½¬éä»å½åå¼è°æ´è³ {new_screw_speed:.1f} rpm " \ |
| | | f"( {'éä½' if screw_speed_adjust_percent < 0 else 'æé«'} {abs(screw_speed_adjust_percent):.2f}% )\n" \ |
| | | f"2. å°æµç¨ä¸»éä»å½åå¼è°æ´è³ {new_process_speed:.1f} m/min " \ |
| | | f"( {'éä½' if process_speed_adjust_percent < 0 else 'æé«'} {abs(process_speed_adjust_percent):.2f}% )" |
| | | else: |
| | | # ç±³éåä½ï¼éè¦æé«ç±³é |
| | | return f"ç±³éåå·® {abs_deviation:.2f}%ï¼åä½ï¼ï¼å»ºè®®è°æ´ï¼\n" \ |
| | | f"1. å°èºæè½¬éä»å½åå¼è°æ´è³ {new_screw_speed:.1f} rpm " \ |
| | | f"( {'éä½' if screw_speed_adjust_percent < 0 else 'æé«'} {abs(screw_speed_adjust_percent):.2f}% )\n" \ |
| | | f"2. å°æµç¨ä¸»éä»å½åå¼è°æ´è³ {new_process_speed:.1f} m/min " \ |
| | | f"( {'éä½' if process_speed_adjust_percent < 0 else 'æé«'} {abs(process_speed_adjust_percent):.2f}% )" |
| | | |
| | | def predict_weight(self, model_info, screw_speed, head_pressure, process_speed, |
| | | screw_temperature, rear_barrel_temperature, |
| | | front_barrel_temperature, head_temperature): |
| | | """ |
| | | ä½¿ç¨æ¨¡å颿µç±³é |
| | | |
| | | :param model_info: å
嫿¨¡åå缩æ¾å¨ç模åä¿¡æ¯åå
¸ |
| | | :param screw_speed: èºæè½¬é (rpm) |
| | | :param head_pressure: æºå¤´åå (bar) |
| | | :param process_speed: æµç¨ä¸»é (m/min) |
| | | :param screw_temperature: èºææ¸©åº¦ (°C) |
| | | :param rear_barrel_temperature: åæºçæ¸©åº¦ (°C) |
| | | :param front_barrel_temperature: åæºçæ¸©åº¦ (°C) |
| | | :param head_temperature: æºå¤´æ¸©åº¦ (°C) |
| | | :return: 颿µçç±³éå¼ (Kg/m) |
| | | """ |
| | | try: |
| | | # è·å模åéè¦çç¹å¾ |
| | | required_features = model_info['features'] |
| | | |
| | | # åå¤è¾å
¥æ°æ®ï¼ä¿æä¸æ¨¡åç¹å¾é¡ºåºä¸è´ |
| | | input_features = { |
| | | 'èºæè½¬é': screw_speed, |
| | | 'æºå¤´åå': head_pressure, |
| | | 'æµç¨ä¸»é': process_speed, |
| | | 'èºææ¸©åº¦': screw_temperature, |
| | | 'åæºçæ¸©åº¦': rear_barrel_temperature, |
| | | 'åæºçæ¸©åº¦': front_barrel_temperature, |
| | | 'æºå¤´æ¸©åº¦': head_temperature |
| | | } |
| | | |
| | | # æ ¹æ®æ¨¡åç¹å¾å建è¾å
¥DataFrame |
| | | input_df = pd.DataFrame([input_features])[required_features] |
| | | |
| | | # åå§å颿µç»æ |
| | | predicted_weight = None |
| | | |
| | | # è·å模å |
| | | model = model_info['model'] |
| | | |
| | | # æ ¹æ®æ¨¡åç±»åæ§è¡ä¸åç颿µé»è¾ |
| | | if model_info['model_type'] in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 深度å¦ä¹ 模å颿µ |
| | | if torch is None: |
| | | print("PyTorch not available, cannot predict with deep learning models") |
| | | return None |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = model_info['scaler_X'] |
| | | scaler_y = model_info['scaler_y'] |
| | | input_scaled = scaler_X.transform(input_df) |
| | | |
| | | # è·ååºåé¿åº¦ |
| | | sequence_length = model_info['sequence_length'] |
| | | |
| | | # 为深度å¦ä¹ 模åå建åºå |
| | | input_seq = np.tile(input_scaled, (sequence_length, 1)).reshape(1, sequence_length, -1) |
| | | |
| | | # 转æ¢ä¸ºPyTorchå¼ é |
| | | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') |
| | | input_tensor = torch.tensor(input_seq, dtype=torch.float32).to(device) |
| | | |
| | | # 颿µ |
| | | model.eval() |
| | | with torch.no_grad(): |
| | | y_pred_scaled_tensor = model(input_tensor) |
| | | y_pred_scaled = y_pred_scaled_tensor.cpu().numpy().ravel()[0] |
| | | |
| | | # åå½ä¸å |
| | | predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0] |
| | | |
| | | elif model_info['model_type'] in ['SVR', 'MLP', 'GradientBoosting']: |
| | | # éè¦ç¹å¾ç¼©æ¾ç模å |
| | | scaler_X = model_info['scaler_X'] |
| | | scaler_y = model_info['scaler_y'] |
| | | input_scaled = scaler_X.transform(input_df) |
| | | |
| | | # 颿µ |
| | | y_pred_scaled = model.predict(input_scaled)[0] |
| | | |
| | | # åå½ä¸å |
| | | predicted_weight = scaler_y.inverse_transform(np.array([[y_pred_scaled]]))[0][0] |
| | | |
| | | else: |
| | | # å
¶ä»æ¨¡åï¼å¦éæºæ£®æã线æ§åå½çï¼ |
| | | predicted_weight = model.predict(input_df)[0] |
| | | |
| | | return predicted_weight |
| | | except Exception as e: |
| | | print(f"模å颿µå¤±è´¥: {e}") |
| | | import traceback |
| | | traceback.print_exc() |
| | | return None |
| | | def iterative_adjustment(self, initial_params, model_info, max_iterations=5, tolerance=0.5): |
| | | """ |
| | | è¿ä»£è°æ´åæ°ï¼ç´å°é¢æµç±³é满足åå·®è¦æ± |
| | | |
| | | :param initial_params: åå§åæ°åå
¸ï¼å
å«: |
| | | - real_time_weight: 宿¶ç±³é |
| | | - standard_weight: æ åç±³é |
| | | - upper_limit: ç±³éä¸é |
| | | - lower_limit: ç±³éä¸é |
| | | - current_screw_speed: å½åèºæè½¬é |
| | | - current_process_speed: å½åæµç¨ä¸»é |
| | | - current_screw_temperature: èºææ¸©åº¦ |
| | | - current_rear_barrel_temperature: åæºçæ¸©åº¦ |
| | | - current_front_barrel_temperature: åæºçæ¸©åº¦ |
| | | - current_head_temperature: æºå¤´æ¸©åº¦ |
| | | - current_head_pressure: æºå¤´åå |
| | | :param model_info: 模åä¿¡æ¯åå
¸ |
| | | :param max_iterations: æå¤§è¿ä»£æ¬¡æ° |
| | | :param tolerance: å
许çç±³éåå·®ç¾åæ¯éå¼ |
| | | :return: è¿ä»£è°æ´ç»æåå
¸ï¼å
å«: |
| | | - final_result: æç»è°æ´ç»æ |
| | | - iteration_history: æ¯æ¬¡è¿ä»£çåå²è®°å½ |
| | | - converged: æ¯å¦æ¶æ |
| | | """ |
| | | iteration_history = [] |
| | | converged = False |
| | | |
| | | # ä¿ååå§åæ°ï¼ç¨äºæç»ç»æè®¡ç® |
| | | original_params = initial_params.copy() |
| | | |
| | | # åå§åå½ååæ°ä¸ºåå§åæ° |
| | | current_params = initial_params.copy() |
| | | |
| | | for i in range(max_iterations): |
| | | # 计ç®å½åè¿ä»£çè°æ´å»ºè®® |
| | | adjustment_result = self.calculate_adjustment( |
| | | real_time_weight=current_params['real_time_weight'], |
| | | standard_weight=current_params['standard_weight'], |
| | | upper_limit=current_params['upper_limit'], |
| | | lower_limit=current_params['lower_limit'], |
| | | current_screw_speed=current_params['current_screw_speed'], |
| | | current_process_speed=current_params['current_process_speed'], |
| | | current_screw_temperature=current_params['current_screw_temperature'], |
| | | current_rear_barrel_temperature=current_params['current_rear_barrel_temperature'], |
| | | current_front_barrel_temperature=current_params['current_front_barrel_temperature'], |
| | | current_head_temperature=current_params['current_head_temperature'] |
| | | ) |
| | | |
| | | # ä½¿ç¨æ¨¡å颿µè°æ´åçç±³é |
| | | predicted_weight = self.predict_weight( |
| | | model_info=model_info, |
| | | screw_speed=adjustment_result['new_screw_speed'], |
| | | head_pressure=current_params['current_head_pressure'], |
| | | process_speed=adjustment_result['new_process_speed'], |
| | | screw_temperature=current_params['current_screw_temperature'], |
| | | rear_barrel_temperature=current_params['current_rear_barrel_temperature'], |
| | | front_barrel_temperature=current_params['current_front_barrel_temperature'], |
| | | head_temperature=current_params['current_head_temperature'] |
| | | ) |
| | | |
| | | # æ£æ¥æ¨¡å颿µæ¯å¦æå |
| | | if predicted_weight is None: |
| | | print(f"模å颿µå¤±è´¥ï¼ç»æ¢è¿ä»£è°æ´") |
| | | iteration_history.append({ |
| | | 'iteration': i + 1, |
| | | 'current_screw_speed': current_params['current_screw_speed'], |
| | | 'current_process_speed': current_params['current_process_speed'], |
| | | 'adjusted_screw_speed': adjustment_result['new_screw_speed'], |
| | | 'adjusted_process_speed': adjustment_result['new_process_speed'], |
| | | 'predicted_weight': None, |
| | | 'predicted_deviation': None, |
| | | 'predicted_deviation_percent': None, |
| | | 'screw_speed_adjustment': adjustment_result['screw_speed_adjustment'], |
| | | 'process_speed_adjustment': adjustment_result['process_speed_adjustment'] |
| | | }) |
| | | break |
| | | |
| | | # 计ç®é¢æµåå·® |
| | | predicted_deviation = predicted_weight - current_params['standard_weight'] |
| | | predicted_deviation_percent = (predicted_deviation / current_params['standard_weight']) * 100 |
| | | |
| | | # ä¿åè¿ä»£åå² |
| | | iteration_history.append({ |
| | | 'iteration': i + 1, |
| | | 'current_screw_speed': current_params['current_screw_speed'], |
| | | 'current_process_speed': current_params['current_process_speed'], |
| | | 'adjusted_screw_speed': adjustment_result['new_screw_speed'], |
| | | 'adjusted_process_speed': adjustment_result['new_process_speed'], |
| | | 'predicted_weight': predicted_weight, |
| | | 'predicted_deviation': predicted_deviation, |
| | | 'predicted_deviation_percent': predicted_deviation_percent, |
| | | 'screw_speed_adjustment': adjustment_result['screw_speed_adjustment'], |
| | | 'process_speed_adjustment': adjustment_result['process_speed_adjustment'] |
| | | }) |
| | | |
| | | # æ£æ¥æ¯å¦æ¶æ |
| | | if abs(predicted_deviation_percent) <= tolerance: |
| | | converged = True |
| | | break |
| | | |
| | | # æ´æ°å½ååæ°ï¼åå¤ä¸ä¸æ¬¡è¿ä»£ |
| | | current_params.update({ |
| | | 'real_time_weight': predicted_weight, |
| | | 'current_screw_speed': adjustment_result['new_screw_speed'], |
| | | 'current_process_speed': adjustment_result['new_process_speed'] |
| | | }) |
| | | |
| | | # è·åæç»è°æ´åçåæ° |
| | | final_screw_speed = iteration_history[-1]['adjusted_screw_speed'] |
| | | final_process_speed = iteration_history[-1]['adjusted_process_speed'] |
| | | final_predicted_weight = iteration_history[-1]['predicted_weight'] |
| | | |
| | | # 计ç®ä»åå§åæ°å°æç»åæ°çæ»è°æ´ |
| | | # å建ä¸ä¸ªå
å«åå§åæ°çè°æ´ç»æ |
| | | final_result = { |
| | | 'status': 'æ£å¸¸èå´' if abs((final_predicted_weight - original_params['standard_weight']) / original_params['standard_weight'] * 100) <= tolerance else 'è¶
ä¸é' if final_predicted_weight > original_params['upper_limit'] else 'è¶
ä¸é', |
| | | 'real_time_weight': original_params['real_time_weight'], # åå§å®æ¶ç±³é |
| | | 'standard_weight': original_params['standard_weight'], |
| | | 'upper_limit': original_params['upper_limit'], |
| | | 'lower_limit': original_params['lower_limit'], |
| | | 'deviation': original_params['real_time_weight'] - original_params['standard_weight'], # åå§åå·® |
| | | 'deviation_percentage': (original_params['real_time_weight'] - original_params['standard_weight']) / original_params['standard_weight'] * 100, # åå§åå·®ç¾åæ¯ |
| | | 'current_screw_speed': original_params['current_screw_speed'], # åå§èºæè½¬é |
| | | 'current_process_speed': original_params['current_process_speed'], # åå§æµç¨ä¸»é |
| | | 'new_screw_speed': final_screw_speed, # æç»è°æ´åçèºæè½¬é |
| | | 'new_process_speed': final_process_speed, # æç»è°æ´åçæµç¨ä¸»é |
| | | 'screw_speed_adjustment': final_screw_speed - original_params['current_screw_speed'], # æ»è°æ´é |
| | | 'process_speed_adjustment': final_process_speed - original_params['current_process_speed'], # æ»è°æ´é |
| | | 'screw_speed_adjust_percent': ((final_screw_speed - original_params['current_screw_speed']) / original_params['current_screw_speed']) * 100 if original_params['current_screw_speed'] != 0 else 0, # æ»è°æ´ç¾åæ¯ |
| | | 'process_speed_adjust_percent': ((final_process_speed - original_params['current_process_speed']) / original_params['current_process_speed']) * 100 if original_params['current_process_speed'] != 0 else 0, # æ»è°æ´ç¾åæ¯ |
| | | 'predicted_weight': final_predicted_weight # æç»é¢æµç±³é |
| | | } |
| | | |
| | | # çæè°æ´å»ºè®®ææ¬ |
| | | final_deviation_percent = (final_predicted_weight - original_params['standard_weight']) / original_params['standard_weight'] * 100 |
| | | final_result['recommendation'] = self._generate_recommendation( |
| | | final_deviation_percent, |
| | | final_result['screw_speed_adjust_percent'], |
| | | final_screw_speed, |
| | | final_result['process_speed_adjust_percent'], |
| | | final_process_speed |
| | | ) |
| | | |
| | | # æ·»å æ¸©åº¦ç¸å
³åæ°ï¼ç¨äºé¢æµï¼ |
| | | final_result['current_screw_temperature'] = original_params['current_screw_temperature'] |
| | | final_result['current_rear_barrel_temperature'] = original_params['current_rear_barrel_temperature'] |
| | | final_result['current_front_barrel_temperature'] = original_params['current_front_barrel_temperature'] |
| | | final_result['current_head_temperature'] = original_params['current_head_temperature'] |
| | | |
| | | # æ·»å æç»é¢æµç¸å
³ä¿¡æ¯ |
| | | final_predicted_deviation = final_predicted_weight - original_params['standard_weight'] |
| | | final_predicted_deviation_percent = (final_predicted_deviation / original_params['standard_weight']) * 100 |
| | | final_result['final_predicted_deviation'] = final_predicted_deviation |
| | | final_result['final_predicted_deviation_percent'] = final_predicted_deviation_percent |
| | | |
| | | return { |
| | | 'final_result': final_result, |
| | | 'iteration_history': iteration_history, |
| | | 'converged': converged, |
| | | 'total_iterations': len(iteration_history), |
| | | 'initial_params': original_params # ä¿ååå§åæ°ï¼ç¨äºç»æå±ç¤º |
| | | } |
| | | def analyze_historical_adjustments(self, df): |
| | | """ |
| | | åæåå²è°æ´æ°æ®ï¼ä¼åè°æ´ç³»æ° |
| | | |
| | | :param df: å
å«åå²è°æ´æ°æ®çDataFrameï¼éè¦å
å«ä»¥ä¸åï¼ |
| | | - real_time_weight: 宿¶ç±³é |
| | | - standard_weight: æ åç±³é |
| | | - current_screw_speed: å½åèºæè½¬é |
| | | - current_process_speed: å½åæµç¨ä¸»é |
| | | - adjusted_screw_speed: è°æ´åçèºæè½¬é |
| | | - adjusted_process_speed: è°æ´åçæµç¨ä¸»é |
| | | - result_weight: è°æ´åçç±³é |
| | | :return: ä¼ååçç³»æ° |
| | | """ |
| | | if df is None or df.empty: |
| | | return self.default_coefficients |
| | | |
| | | try: |
| | | # 计ç®ç±³éåå·® |
| | | df['weight_deviation'] = df['real_time_weight'] - df['standard_weight'] |
| | | df['deviation_percentage'] = (df['weight_deviation'] / df['standard_weight']) * 100 |
| | | |
| | | # 计ç®åæ°è°æ´ç¾åæ¯ |
| | | df['screw_speed_adjust_percent'] = ((df['adjusted_screw_speed'] - df['current_screw_speed']) / df['current_screw_speed']) * 100 |
| | | df['process_speed_adjust_percent'] = ((df['adjusted_process_speed'] - df['current_process_speed']) / df['current_process_speed']) * 100 |
| | | |
| | | # 计ç®è°æ´ææ |
| | | df['adjustment_effect'] = df['result_weight'] - df['real_time_weight'] |
| | | |
| | | # 使ç¨çº¿æ§åå½è®¡ç®ä¼åç³»æ° |
| | | # ç³»æ° = åæ°è°æ´ç¾åæ¯ / ç±³éåå·®ç¾åæ¯ |
| | | screw_speed_coeff = df['screw_speed_adjust_percent'].sum() / df['deviation_percentage'].sum() if df['deviation_percentage'].sum() != 0 else self.default_coefficients['screw_speed'] |
| | | process_speed_coeff = df['process_speed_adjust_percent'].sum() / df['deviation_percentage'].sum() if df['deviation_percentage'].sum() != 0 else self.default_coefficients['process_main_speed'] |
| | | |
| | | # åç»å¯¹å¼ï¼ç¡®ä¿æ¹åæ£ç¡® |
| | | optimized_coeffs = { |
| | | 'screw_speed': abs(screw_speed_coeff), |
| | | 'process_main_speed': -abs(process_speed_coeff) # æµç¨ä¸»éä¸ç±³éè´ç¸å
³ |
| | | } |
| | | |
| | | return optimized_coeffs |
| | | except Exception as e: |
| | | print(f"åæåå²è°æ´æ°æ®å¤±è´¥: {e}") |
| | | return self.default_coefficients |
| | |
| | | from app.pages.metered_weight_correlation import show_metered_weight_correlation |
| | | from app.pages.metered_weight_regression import show_metered_weight_regression |
| | | from app.pages.metered_weight_advanced import show_metered_weight_advanced |
| | | from app.pages.metered_weight_deep_learning import show_metered_weight_deep_learning |
| | | from app.pages.metered_weight_steady_state import show_metered_weight_steady_state |
| | | from app.pages.metered_weight_prediction import show_metered_weight_prediction |
| | | from app.pages.metered_weight_forecast import show_metered_weight_forecast |
| | | from app.pages.extruder_parameter_adjustment import show_extruder_parameter_adjustment |
| | | |
| | | # 设置页é¢é
ç½® |
| | | st.set_page_config( |
| | |
| | | url_path="metered_weight_advanced" |
| | | ) |
| | | |
| | | metered_weight_deep_learning_page = st.Page( |
| | | show_metered_weight_deep_learning, |
| | | title="ç±³éæ·±åº¦å¦ä¹ 颿µ", |
| | | icon="ð§ ", |
| | | url_path="metered_weight_deep_learning" |
| | | ) |
| | | |
| | | metered_weight_steady_state_page = st.Page( |
| | | show_metered_weight_steady_state, |
| | | title="ç±³é稳æè¯å«", |
| | | icon="âï¸", |
| | | url_path="metered_weight_steady_state" |
| | | ) |
| | | |
| | | metered_weight_prediction_page = st.Page( |
| | | show_metered_weight_prediction, |
| | | title="ç±³éç»ä¸é¢æµ", |
| | | icon="ð®", |
| | | url_path="metered_weight_prediction" |
| | | ) |
| | | |
| | | metered_weight_forecast_page = st.Page( |
| | | show_metered_weight_forecast, |
| | | title="ç±³é颿µåæ", |
| | | icon="ð", |
| | | url_path="metered_weight_forecast" |
| | | ) |
| | | |
| | | extruder_parameter_adjustment_page = st.Page( |
| | | show_extruder_parameter_adjustment, |
| | | title="æ¤åºæºåæ°è°è", |
| | | icon="âï¸", |
| | | url_path="extruder_parameter_adjustment" |
| | | ) |
| | | |
| | | # ä¾§è¾¹æ 页èä¿¡æ¯ |
| | | def show_footer(): |
| | | st.sidebar.markdown("---") |
| | |
| | | |
| | | # 导èªé
ç½® |
| | | pg = st.navigation({ |
| | | "综ååæ": [comprehensive_page, metered_weight_page, metered_weight_correlation_page, metered_weight_regression_page, metered_weight_advanced_page], |
| | | "综ååæ": [comprehensive_page, metered_weight_page, metered_weight_correlation_page, metered_weight_regression_page, metered_weight_advanced_page, metered_weight_deep_learning_page, metered_weight_steady_state_page, metered_weight_prediction_page, metered_weight_forecast_page, extruder_parameter_adjustment_page], |
| | | "å项åæ": [sorting_page, extruder_page, main_process_page] |
| | | }) |
| | | |