feat: 添加米重分析模块并优化综合看板功能
- 新增米重综合分析、相关性分析、回归分析和高级预测分析页面
- 在综合看板中添加时间偏移功能以对齐上下游数据
- 优化图表交互功能,添加统一悬停模式和缩放控制
- 更新数据库配置和依赖项,添加scikit-learn和pytorch
- 改进数据查询逻辑,添加裁切计数字段
- 修复数据展示问题,调整单位显示和过滤异常值
| | |
| | | # Database Configuration |
| | | DB_HOST=localhost |
| | | DB_PORT=5433 |
| | | DB_HOST=192.168.21.6 |
| | | DB_PORT=5432 |
| | | DB_NAME=aics |
| | | DB_USER=aics |
| | | DB_PASSWORD=123456lb |
| | | DB_PASSWORD=123456 |
| | |
| | | æÂ¬é¡¹ç®éÂÂç¨MIT许å¯è¯Âã |
| | | æ¬é¡¹ç®éç¨MIT许å¯è¯ï¿½ |
| | | |
| | | |
| | | æ¬å°æµè¯----postgresqlæ°æ®åº------------ |
| | | ç¨æ·ï¼postgres |
| | | å¯ç ï¼123456 |
| | | |
| | | æ°æ®åºï¼aics |
| | | ç¨æ·åï¼aics |
| | | å¯ç ï¼123456lb |
| | | ----------------------------------------- |
| | | |
| | | |
| | | æ£æ°é¡¹ç®ç°åº---postgresql------------- |
| | | ç¨æ·ï¼postgres |
| | | å¯ç ï¼123456 |
| | | |
| | | æ°æ®åºï¼aics |
| | | ç¨æ·åï¼aics |
| | | å¯ç ï¼lanbaoit-123 |
| | | ------------------------------------------ |
| | |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("å¤ç»´ç»¼ååæ") |
| | | st.title("æ¡é综ååæ") |
| | | |
| | | # åå§åä¼è¯ç¶æç¨äºæ¥æåæ¥ |
| | | if 'comp_start_date' not in st.session_state: |
| | |
| | | st.session_state['comp_end_date'] = datetime.now().date() |
| | | if 'comp_quick_select' not in st.session_state: |
| | | st.session_state['comp_quick_select'] = "æè¿7天" |
| | | if 'time_offset' not in st.session_state: |
| | | st.session_state['time_offset'] = 0 |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð æ¥è¯¢", key="comp_query", width='stretch') |
| | | # å¨ç¬¬äºè¡æ·»å æ¶é´åç§»é
ç½® |
| | | st.markdown("---") |
| | | offset_cols = st.columns([2, 4, 2]) |
| | | with offset_cols[0]: |
| | | st.write("â±ï¸ **ç产对é½é
ç½®**") |
| | | with offset_cols[1]: |
| | | time_offset = st.slider( |
| | | "æ¤åº/主æµç¨æ°æ®åååç§» (åé)", |
| | | min_value=0, |
| | | max_value=60, |
| | | value=st.session_state['time_offset'], |
| | | help="ç±äºèé¢ä»æ¤åºå°åæ£éè¦æ¶é´ï¼å°ä¸æ¸¸æ°æ®ååç§»å¨ï¼ä½¿å
¶ä¸åæ£ç£
秤ä¸çééæ°æ®å¨æ¶é´è½´ä¸å¯¹é½ã" |
| | | ) |
| | | st.session_state['time_offset'] = time_offset |
| | | with offset_cols[2]: |
| | | query_button = st.button("ð å¼å§åæ", key="comp_query", width='stretch') |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | |
| | | # æ¥è¯¢å¤ç |
| | | if query_button: |
| | | with st.spinner("æ£å¨èå夿ºæ°æ®..."): |
| | | # 1. è·å忣ç£
ç§¤æ°æ® |
| | | # è·ååç§»é |
| | | offset_delta = timedelta(minutes=st.session_state['time_offset']) |
| | | |
| | | # 1. è·å忣ç£
ç§¤æ°æ® (ä½ä¸ºåºåï¼ä¸åç§») |
| | | df_sorting = sorting_service.get_sorting_scale_data(start_dt, end_dt) |
| | | # 2. è·åæ¤åºæºæ°æ® |
| | | |
| | | # 2. è·åæ¤åºæºæ°æ® (åºç¨åç§») |
| | | df_extruder = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | # 3. è·å主æµç¨æ§å¶æ°æ® |
| | | if df_extruder is not None and not df_extruder.empty: |
| | | df_extruder['time'] = df_extruder['time'] + offset_delta |
| | | |
| | | # 3. è·å主æµç¨æ§å¶æ°æ® (åºç¨åç§») |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed['time'] = df_main_speed['time'] + offset_delta |
| | | |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | if df_temp is not None and not df_temp.empty: |
| | | df_temp['time'] = df_temp['time'] + offset_delta |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_sorting is not None and not df_sorting.empty, |
| | | df_extruder is not None and not df_extruder.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | |
| | | |
| | | # æ·»å æ¤åºæºç±³é |
| | | if df_extruder is not None and not df_extruder.empty: |
| | | fig.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | | y=df_extruder['metered_weight'], |
| | | name='æ¤åºæºç±³é (g/m)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5), |
| | | yaxis='y2' |
| | | )) |
| | | # fig.add_trace(go.Scatter( |
| | | # x=df_extruder['time'], |
| | | # y=df_extruder['metered_weight'], |
| | | # name='æ¤åºæºç±³é (Kg/m)', |
| | | # mode='lines', |
| | | # line=dict(color='green', width=1.5), |
| | | # yaxis='y2' |
| | | # )) |
| | | # æ·»å æ¤åºæºå®é
转é |
| | | fig.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y3' # å
±ç¨é度轴 |
| | | )) |
| | | |
| | | # æ·»å è£åè®¡æ° |
| | | if 'cutting_count' in df_main_speed.columns: |
| | | fig.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['cutting_count'], |
| | | name='è£å计æ°', |
| | | mode='lines', |
| | | line=dict(color='purple', width=1.5), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦è®¾å®å¼ |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_fields = { |
| | | 'nakata_extruder_screw_set_temp': 'èºæè®¾å® (°C)', |
| | | 'nakata_extruder_rear_barrel_set_temp': 'åæºçè®¾å® (°C)', |
| | | 'nakata_extruder_front_barrel_set_temp': 'åæºçè®¾å® (°C)', |
| | | 'nakata_extruder_head_set_temp': 'æºå¤´è®¾å® (°C)' |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¾ç¤º (°C)', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¾ç¤º (°C)', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¾ç¤º (°C)', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¾ç¤º (°C)' |
| | | |
| | | } |
| | | colors = ['#FF4B4B', '#FF8C00', '#FFD700', '#DA70D6'] |
| | | for i, (field, label) in enumerate(temp_fields.items()): |
| | |
| | | |
| | | # 设置å¤åæ è½´å¸å± |
| | | fig.update_layout( |
| | | title='å¤ç»´ç»¼åè¶å¿åæ', |
| | | title='æ¡é综åè¶å¿åæ', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='ç±³é (g/m)', |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | yaxis5=dict( |
| | | title='è£å计æ°', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.7 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | |
| | | st.plotly_chart(fig, width='stretch', config={'scrollZoom': True}) |
| | | |
| | | # æ°æ®æè¦ |
| | | st.subheader("ð æ°æ®æè¦") |
| | | summary_cols = st.columns(4) |
| | | # st.subheader("ð æ°æ®æè¦") |
| | | # summary_cols = st.columns(4) |
| | | |
| | | with summary_cols[0]: |
| | | if df_sorting is not None and not df_sorting.empty: |
| | | st.metric("å¹³åéé", f"{df_sorting['weight'].mean():.2f} kg") |
| | | # with summary_cols[0]: |
| | | # if df_sorting is not None and not df_sorting.empty: |
| | | # st.metric("å¹³åéé", f"{df_sorting['weight'].mean():.2f} kg") |
| | | |
| | | with summary_cols[1]: |
| | | if df_extruder is not None and not df_extruder.empty: |
| | | st.metric("å¹³åç±³é", f"{df_extruder['metered_weight'].mean():.2f} g/m") |
| | | # with summary_cols[1]: |
| | | # if df_extruder is not None and not df_extruder.empty: |
| | | # st.metric("å¹³åç±³é", f"{df_extruder['metered_weight'].mean():.2f} Kg/m") |
| | | |
| | | with summary_cols[2]: |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | st.metric("å¹³å主é", f"{df_main_speed['process_main_speed'].mean():.2f} M/Min") |
| | | # with summary_cols[2]: |
| | | # if df_main_speed is not None and not df_main_speed.empty: |
| | | # st.metric("å¹³å主é", f"{df_main_speed['process_main_speed'].mean():.2f} M/Min") |
| | | |
| | | with summary_cols[3]: |
| | | if df_temp is not None and not df_temp.empty: |
| | | st.metric("å¹³åèºææ¸©æ§", f"{df_temp['nakata_extruder_screw_set_temp'].mean():.1f} °C") |
| | | # with summary_cols[3]: |
| | | # if df_temp is not None and not df_temp.empty: |
| | | # st.metric("å¹³åèºææ¸©æ§", f"{df_temp['nakata_extruder_screw_set_temp'].mean():.1f} °C") |
| | |
| | | fig_speed = px.line(df_speed, x='time', y='process_main_speed', |
| | | title="æµç¨ä¸»é度 (M/Min)", |
| | | labels={'time': 'æ¶é´', 'process_main_speed': '主é度 (M/Min)'}) |
| | | fig_speed.update_layout(xaxis=dict(rangeslider=dict(visible=True), type='date')) |
| | | st.plotly_chart(fig_speed, width='stretch', config={'scrollZoom': True}) |
| | | fig_speed.update_layout( |
| | | xaxis=dict(rangeslider=dict(visible=True), type='date'), |
| | | yaxis=dict(fixedrange=False), |
| | | hovermode='x unified', |
| | | dragmode='zoom', |
| | | ) |
| | | st.plotly_chart(fig_speed, width='stretch', config={ |
| | | 'scrollZoom': True, |
| | | 'modeBarButtonsToAdd': ['zoom2d', 'zoomIn2d', 'zoomOut2d'], |
| | | 'doubleClick': 'reset', |
| | | 'displayModeBar': True, |
| | | 'toImageButtonOptions': {'format': 'png'} |
| | | }) |
| | | else: |
| | | st.info("该æ¶é´æ®µå
æ 主éåº¦æ°æ®") |
| | | |
| | |
| | | title="çµæºçº¿é (M/Min)", |
| | | xaxis_title="æ¶é´", |
| | | yaxis_title="线é (M/Min)", |
| | | xaxis=dict(rangeslider=dict(visible=True), type='date') |
| | | xaxis=dict(rangeslider=dict(visible=True), type='date'), |
| | | yaxis=dict(fixedrange=False), |
| | | hovermode='x unified', |
| | | dragmode='zoom' |
| | | ) |
| | | st.plotly_chart(fig_motor, width='stretch', config={'scrollZoom': True}) |
| | | st.plotly_chart(fig_motor, width='stretch', config={ |
| | | 'scrollZoom': True, |
| | | 'modeBarButtonsToAdd': ['zoom2d', 'zoomIn2d', 'zoomOut2d'], |
| | | 'doubleClick': 'reset', |
| | | 'displayModeBar': True |
| | | }) |
| | | else: |
| | | st.info("该æ¶é´æ®µå
æ çµæºçæ§æ°æ®") |
| | | |
| | |
| | | title="ä¸ç°æ¤åºæºæ¸©åº¦ (°C)", |
| | | xaxis_title="æ¶é´", |
| | | yaxis_title="温度 (°C)", |
| | | xaxis=dict(rangeslider=dict(visible=True), type='date') |
| | | xaxis=dict(rangeslider=dict(visible=True), type='date'), |
| | | yaxis=dict(fixedrange=False), |
| | | hovermode='x unified', |
| | | dragmode='zoom' |
| | | ) |
| | | st.plotly_chart(fig_temp, width='stretch', config={'scrollZoom': True}) |
| | | st.plotly_chart(fig_temp, width='stretch', config={ |
| | | 'scrollZoom': True, |
| | | 'modeBarButtonsToAdd': ['zoom2d', 'zoomIn2d', 'zoomOut2d'], |
| | | 'doubleClick': 'reset', |
| | | 'displayModeBar': True |
| | | }) |
| | | else: |
| | | st.info("该æ¶é´æ®µå
æ æ¸©åº¦æ§å¶æ°æ®") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | from sklearn.preprocessing import StandardScaler, MinMaxScaler |
| | | from sklearn.model_selection import train_test_split |
| | | from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error |
| | | from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor |
| | | from sklearn.svm import SVR |
| | | from sklearn.neural_network import MLPRegressor |
| | | |
| | | # å°è¯å¯¼å
¥æ·±åº¦å¦ä¹ åº |
| | | use_deep_learning = False |
| | | try: |
| | | |
| | | from tensorflow.keras.models import Sequential |
| | | from tensorflow.keras.layers import LSTM, GRU, Dense, Dropout, Bidirectional |
| | | from tensorflow.keras.optimizers import Adam |
| | | use_deep_learning = True |
| | | except ImportError: |
| | | st.warning("æªæ£æµå°TensorFlow/Kerasï¼æ·±åº¦å¦ä¹ 模åå°ä¸å¯ç¨ã请å®è£
tensorflow以使ç¨LSTM/GRU模åã") |
| | | |
| | | |
| | | def show_metered_weight_advanced(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³éé«çº§é¢æµåæ") |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'ma_start_date' not in st.session_state: |
| | | st.session_state['ma_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'ma_end_date' not in st.session_state: |
| | | st.session_state['ma_end_date'] = datetime.now().date() |
| | | if 'ma_quick_select' not in st.session_state: |
| | | st.session_state['ma_quick_select'] = "æè¿7天" |
| | | if 'ma_model_type' not in st.session_state: |
| | | st.session_state['ma_model_type'] = 'RandomForest' |
| | | if 'ma_sequence_length' not in st.session_state: |
| | | st.session_state['ma_sequence_length'] = 10 |
| | | |
| | | # é»è®¤ç¹å¾å表ï¼ä¸åå
è®¸ç¨æ·éæ©ï¼ |
| | | default_features = ['èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | | 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦'] |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['ma_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['ma_start_date'] = today |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=3) |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=7) |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=30) |
| | | st.session_state['ma_end_date'] = today |
| | | # æ¸
é¤ä¹åçç¼åæ°æ®ååææ å¿ |
| | | for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp', 'last_query_start', 'last_query_end', 'analysis_completed']: |
| | | if key in st.session_state: |
| | | del st.session_state[key] |
| | | |
| | | def on_date_change(): |
| | | st.session_state['ma_quick_select'] = "èªå®ä¹" |
| | | # æ¸
é¤ä¹åçç¼åæ°æ®ååææ å¿ |
| | | for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp', 'last_query_start', 'last_query_end', 'analysis_completed']: |
| | | if key in st.session_state: |
| | | del st.session_state[key] |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['ma_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_ma_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ma_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ma_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="ma_query", width='stretch') |
| | | |
| | | # 模åé
ç½® |
| | | st.markdown("---") |
| | | st.write("ð¤ **模åé
ç½®**") |
| | | model_cols = st.columns(2) |
| | | |
| | | with model_cols[0]: |
| | | # 模åç±»åéæ© |
| | | model_options = ['RandomForest', 'GradientBoosting', 'SVR', 'MLP'] |
| | | if use_deep_learning: |
| | | model_options.extend(['LSTM', 'GRU', 'BiLSTM']) |
| | | |
| | | model_type = st.selectbox( |
| | | "模åç±»å", |
| | | options=model_options, |
| | | key="ma_model_type", |
| | | help="éæ©ç¨äºé¢æµç模åç±»å" |
| | | ) |
| | | |
| | | with model_cols[1]: |
| | | # åºåé¿åº¦ï¼ä»
éç¨äºæ·±åº¦å¦ä¹ 模åï¼ |
| | | if model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | sequence_length = st.slider( |
| | | "åºåé¿åº¦", |
| | | min_value=5, |
| | | max_value=30, |
| | | value=st.session_state['ma_sequence_length'], |
| | | step=1, |
| | | help="ç¨äºæ·±åº¦å¦ä¹ 模åçæ¶é´åºåé¿åº¦", |
| | | key="ma_sequence_length" |
| | | ) |
| | | else: |
| | | st.session_state['ma_sequence_length'] = 10 |
| | | st.write("åºåé¿åº¦: 10 (é»è®¤ï¼ä»
éç¨äºæ·±åº¦å¦ä¹ 模å)") |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç |
| | | if query_button: |
| | | with st.spinner("æ£å¨è·åæ°æ®..."): |
| | | # 1. è·å宿´çæ¤åºæºæ°æ® |
| | | df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | # æ¸
é¤ç¼åæ°æ® |
| | | for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp', 'last_query_start', 'last_query_end']: |
| | | if key in st.session_state: |
| | | del st.session_state[key] |
| | | return |
| | | |
| | | # ç¼åæ°æ®å°ä¼è¯ç¶æ |
| | | st.session_state['cached_extruder_full'] = df_extruder_full |
| | | st.session_state['cached_main_speed'] = df_main_speed |
| | | st.session_state['cached_temp'] = df_temp |
| | | st.session_state['last_query_start'] = start_dt |
| | | st.session_state['last_query_end'] = end_dt |
| | | # 设置åæå®ææ å¿ |
| | | st.session_state['analysis_completed'] = True |
| | | |
| | | # æ°æ®å¤çååæ |
| | | if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']) and st.session_state.get('analysis_completed', False): |
| | | with st.spinner("æ£å¨åææ°æ®..."): |
| | | # è·åç¼åæ°æ® |
| | | df_extruder_full = st.session_state['cached_extruder_full'] |
| | | df_main_speed = st.session_state['cached_main_speed'] |
| | | df_temp = st.session_state['cached_temp'] |
| | | |
| | | |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # æ°æ®æ´åä¸é¢å¤ç |
| | | def integrate_data(df_extruder_full, df_main_speed, df_temp): |
| | | # ç¡®ä¿æ¤åºæºæ°æ®åå¨ |
| | | if df_extruder_full is None or df_extruder_full.empty: |
| | | return None |
| | | |
| | | # å建åªå
å«ç±³éåæ¶é´çä¸»æ°æ®é |
| | | df_merged = df_extruder_full[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy() |
| | | |
| | | |
| | | # æ´å主æµç¨æ°æ® |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed = df_main_speed[['time', 'process_main_speed']] |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_main_speed.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´åæ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_cols = ['time', 'nakata_extruder_screw_display_temp', |
| | | 'nakata_extruder_rear_barrel_display_temp', |
| | | 'nakata_extruder_front_barrel_display_temp', |
| | | 'nakata_extruder_head_display_temp'] |
| | | df_temp_subset = df_temp[temp_cols].copy() |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_temp_subset.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # éå½åå以æé«å¯è¯»æ§ |
| | | df_merged.rename(columns={ |
| | | 'screw_speed_actual': 'èºæè½¬é', |
| | | 'head_pressure': 'æºå¤´åå', |
| | | 'process_main_speed': 'æµç¨ä¸»é', |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¸©åº¦', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¸©åº¦' |
| | | }, inplace=True) |
| | | |
| | | # æ¸
çæ°æ® |
| | | df_merged.dropna(subset=['metered_weight'], inplace=True) |
| | | |
| | | return df_merged |
| | | |
| | | # æ§è¡æ°æ®æ´å |
| | | df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp) |
| | | |
| | | if df_analysis is None or df_analysis.empty: |
| | | st.warning("æ°æ®æ´å失败ï¼è¯·æ£æ¥æ°æ®è´¨éæè°æ´æ¶é´èå´ã") |
| | | return |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # --- åå§æ°æ®è¶å¿å¾ --- |
| | | st.subheader("ð åå§æ°æ®è¶å¿å¾") |
| | | |
| | | # å建è¶å¿å¾ |
| | | fig_trend = go.Figure() |
| | | |
| | | # æ·»å ç±³éæ°æ® |
| | | if df_extruder_full is not None and not df_extruder_full.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['metered_weight'], |
| | | name='ç±³é (Kg/m)', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | |
| | | # æ·»å èºæè½¬é |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['screw_speed_actual'], |
| | | name='èºæè½¬é (RPM)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5), |
| | | yaxis='y2' |
| | | )) |
| | | |
| | | # æ·»å æºå¤´åå |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['head_pressure'], |
| | | name='æºå¤´åå', |
| | | mode='lines', |
| | | line=dict(color='orange', width=1.5), |
| | | yaxis='y3' |
| | | )) |
| | | |
| | | # æ·»å æµç¨ä¸»é |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['process_main_speed'], |
| | | name='æµç¨ä¸»é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y4' |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | # èºææ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_screw_display_temp'], |
| | | name='èºææ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='purple', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # é
ç½®è¶å¿å¾å¸å± |
| | | fig_trend.update_layout( |
| | | title='åå§æ°æ®è¶å¿', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | | type='date' |
| | | ), |
| | | yaxis=dict( |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='blue'), |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='èºæè½¬é (RPM)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | | side='right' |
| | | ), |
| | | yaxis3=dict( |
| | | title='æºå¤´åå', |
| | | title_font=dict(color='orange'), |
| | | tickfont=dict(color='orange'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.85 |
| | | ), |
| | | yaxis4=dict( |
| | | title='æµç¨ä¸»é (M/Min)', |
| | | title_font=dict(color='red'), |
| | | tickfont=dict(color='red'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.75 |
| | | ), |
| | | yaxis5=dict( |
| | | title='温度 (°C)', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='left', |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | | y=1.02, |
| | | xanchor="right", |
| | | x=1 |
| | | ), |
| | | height=600, |
| | | margin=dict(l=100, r=200, t=100, b=100), |
| | | hovermode='x unified' |
| | | ) |
| | | |
| | | # æ¾ç¤ºè¶å¿å¾ |
| | | st.plotly_chart(fig_trend, width='stretch', config={'scrollZoom': True}) |
| | | |
| | | # --- é«çº§é¢æµåæ --- |
| | | st.subheader("ð é«çº§é¢æµåæ") |
| | | |
| | | # æ£æ¥ææé»è®¤ç¹å¾æ¯å¦å¨æ°æ®ä¸ |
| | | missing_features = [f for f in default_features if f not in df_analysis.columns] |
| | | if missing_features: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå°ä»¥ä¸ç¹å¾: {', '.join(missing_features)}") |
| | | else: |
| | | try: |
| | | # å夿°æ® |
| | | # é¦å
ç¡®ä¿df_analysis䏿²¡æNaNå¼ |
| | | df_analysis_clean = df_analysis.dropna(subset=default_features + ['ç±³é']) |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(df_analysis_clean) < 30: |
| | | st.warning("æ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç颿µåæ") |
| | | else: |
| | | # å建ä¸ä¸ªæ°çDataFrameæ¥å卿æç¹å¾åç®æ åé |
| | | all_features = df_analysis_clean[default_features + ['ç±³é']].copy() |
| | | |
| | | # æ·»å æ¶é´ç¸å
³ç¹å¾ |
| | | if 'time' in df_analysis_clean.columns: |
| | | all_features['hour'] = df_analysis_clean['time'].dt.hour |
| | | all_features['minute'] = df_analysis_clean['time'].dt.minute |
| | | all_features['second'] = df_analysis_clean['time'].dt.second |
| | | all_features['time_of_day'] = all_features['hour'] * 3600 + all_features['minute'] * 60 + all_features['second'] |
| | | else: |
| | | all_features['hour'] = 0 |
| | | all_features['minute'] = 0 |
| | | all_features['second'] = 0 |
| | | all_features['time_of_day'] = 0 |
| | | |
| | | # æ·»å æ»åç¹å¾ |
| | | for feature in default_features: |
| | | for lag in [1, 2, 3]: |
| | | all_features[f'{feature}_lag{lag}'] = all_features[feature].shift(lag) |
| | | all_features[f'{feature}_diff{lag}'] = all_features[feature].diff(lag) |
| | | |
| | | # æ·»å æ»å¨ç»è®¡ç¹å¾ |
| | | for feature in default_features: |
| | | all_features[f'{feature}_rolling_mean'] = all_features[feature].rolling(window=5).mean() |
| | | all_features[f'{feature}_rolling_std'] = all_features[feature].rolling(window=5).std() |
| | | all_features[f'{feature}_rolling_min'] = all_features[feature].rolling(window=5).min() |
| | | all_features[f'{feature}_rolling_max'] = all_features[feature].rolling(window=5).max() |
| | | |
| | | # æ¸
çææNaNå¼ |
| | | all_features_clean = all_features.dropna() |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(all_features_clean) < 20: |
| | | st.warning("ç¹å¾å·¥ç¨åæ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç颿µåæ") |
| | | else: |
| | | # å离ç¹å¾åç®æ åé |
| | | feature_columns = [col for col in all_features_clean.columns if col != 'ç±³é'] |
| | | X_final = all_features_clean[feature_columns] |
| | | y_final = all_features_clean['ç±³é'] |
| | | |
| | | # æ£æ¥æç»æ°æ®é |
| | | if len(X_final) >= 20: |
| | | # åå²è®ç»éåæµè¯é |
| | | X_train, X_test, y_train, y_test = train_test_split(X_final, y_final, test_size=0.2, random_state=42) |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = StandardScaler() |
| | | scaler_y = MinMaxScaler() |
| | | |
| | | X_train_scaled = scaler_X.fit_transform(X_train) |
| | | X_test_scaled = scaler_X.transform(X_test) |
| | | y_train_scaled = scaler_y.fit_transform(y_train.values.reshape(-1, 1)).ravel() |
| | | y_test_scaled = scaler_y.transform(y_test.values.reshape(-1, 1)).ravel() |
| | | |
| | | # 模åè®ç» |
| | | model = None |
| | | y_pred = None |
| | | |
| | | if model_type == 'RandomForest': |
| | | # éæºæ£®æåå½ |
| | | model = RandomForestRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | |
| | | elif model_type == 'GradientBoosting': |
| | | # 梯度æååå½ |
| | | model = GradientBoostingRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | |
| | | elif model_type == 'SVR': |
| | | # æ¯æåéåå½ |
| | | model = SVR(kernel='rbf', C=1.0, gamma='scale') |
| | | model.fit(X_train_scaled, y_train_scaled) |
| | | y_pred_scaled = model.predict(X_test_scaled) |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | elif model_type == 'MLP': |
| | | # å¤å±æç¥å¨åå½ |
| | | model = MLPRegressor(hidden_layer_sizes=(100, 50), max_iter=500, random_state=42) |
| | | model.fit(X_train_scaled, y_train_scaled) |
| | | y_pred_scaled = model.predict(X_test_scaled) |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | elif use_deep_learning and model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # å夿¶é´åºåæ°æ® |
| | | sequence_length = st.session_state['ma_sequence_length'] |
| | | |
| | | def create_sequences(X, y, seq_length): |
| | | X_seq = [] |
| | | y_seq = [] |
| | | # ç¡®ä¿Xåyçé¿åº¦ä¸è´ |
| | | min_len = min(len(X), len(y)) |
| | | # ç¡®ä¿Xåyçé¿åº¦è³å°ä¸ºseq_length + 1 |
| | | if min_len <= seq_length: |
| | | return np.array([]), np.array([]) |
| | | # æªæXåyå°ç¸åé¿åº¦ |
| | | X_trimmed = X[:min_len] |
| | | y_trimmed = y[:min_len] |
| | | # å建åºå |
| | | for i in range(len(X_trimmed) - seq_length): |
| | | X_seq.append(X_trimmed[i:i+seq_length]) |
| | | y_seq.append(y_trimmed[i+seq_length]) |
| | | return np.array(X_seq), np.array(y_seq) |
| | | |
| | | # 为深度å¦ä¹ 模åå建åºå |
| | | X_train_seq, y_train_seq = create_sequences(X_train_scaled, y_train_scaled, sequence_length) |
| | | X_test_seq, y_test_seq = create_sequences(X_test_scaled, y_test_scaled, sequence_length) |
| | | |
| | | # ç¡®ä¿åºåæ°æ®é¿åº¦ä¸è´ |
| | | if len(X_train_seq) != len(y_train_seq): |
| | | min_len_train = min(len(X_train_seq), len(y_train_seq)) |
| | | X_train_seq = X_train_seq[:min_len_train] |
| | | y_train_seq = y_train_seq[:min_len_train] |
| | | |
| | | if len(X_test_seq) != len(y_test_seq): |
| | | min_len_test = min(len(X_test_seq), len(y_test_seq)) |
| | | X_test_seq = X_test_seq[:min_len_test] |
| | | y_test_seq = y_test_seq[:min_len_test] |
| | | |
| | | # æ£æ¥å建çåºåæ¯å¦ä¸ºç©º |
| | | if len(X_train_seq) == 0 or len(y_train_seq) == 0: |
| | | st.warning(f"æ°æ®éä¸è¶³ï¼æ æ³å建ææçLSTMåºåãéè¦è³å° {sequence_length + 1} ä¸ªæ ·æ¬ï¼å½ååªæ {min(len(X_train_scaled), len(y_train_scaled))} ä¸ªæ ·æ¬ã") |
| | | # 使ç¨éæºæ£®æä½ä¸ºå¤é模å |
| | | model = RandomForestRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | else: |
| | | # æå»ºæ·±åº¦å¦ä¹ 模å |
| | | input_shape = (sequence_length, X_train_scaled.shape[1]) |
| | | |
| | | deep_model = Sequential() |
| | | |
| | | if model_type == 'LSTM': |
| | | deep_model.add(LSTM(64, return_sequences=True, input_shape=input_shape)) |
| | | deep_model.add(LSTM(32, return_sequences=False)) |
| | | elif model_type == 'GRU': |
| | | deep_model.add(GRU(64, return_sequences=True, input_shape=input_shape)) |
| | | deep_model.add(GRU(32, return_sequences=False)) |
| | | elif model_type == 'BiLSTM': |
| | | deep_model.add(Bidirectional(LSTM(64, return_sequences=True), input_shape=input_shape)) |
| | | deep_model.add(Bidirectional(LSTM(32, return_sequences=False))) |
| | | |
| | | deep_model.add(Dense(32, activation='relu')) |
| | | deep_model.add(Dropout(0.2)) |
| | | deep_model.add(Dense(1)) |
| | | |
| | | # ç¼è¯æ¨¡å |
| | | deep_model.compile(optimizer=Adam(learning_rate=0.001), loss='mean_squared_error') |
| | | |
| | | # è®ç»æ¨¡å |
| | | # ç¡®ä¿X_train_seqåy_train_seqé¿åº¦ä¸è´ |
| | | min_len_train = min(len(X_train_seq), len(y_train_seq)) |
| | | min_len_test = min(len(X_test_seq), len(y_test_seq)) |
| | | |
| | | if min_len_train > 0 and min_len_test > 0: |
| | | X_train_seq_trimmed = X_train_seq[:min_len_train] |
| | | y_train_seq_trimmed = y_train_seq[:min_len_train] |
| | | X_test_seq_trimmed = X_test_seq[:min_len_test] |
| | | y_test_seq_trimmed = y_test_seq[:min_len_test] |
| | | |
| | | history = deep_model.fit( |
| | | X_train_seq_trimmed, y_train_seq_trimmed, |
| | | validation_data=(X_test_seq_trimmed, y_test_seq_trimmed), |
| | | epochs=50, |
| | | batch_size=32, |
| | | verbose=0 |
| | | ) |
| | | else: |
| | | st.warning("æ°æ®éä¸è¶³ï¼æ æ³è®ç»æ·±åº¦å¦ä¹ 模å") |
| | | # 使ç¨éæºæ£®æä½ä¸ºå¤é模å |
| | | model = RandomForestRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | # ç¡®ä¿y_teståy_predé¿åº¦ä¸è´ |
| | | min_len = min(len(y_test), len(y_pred)) |
| | | if min_len > 0: |
| | | y_test_trimmed = y_test[:min_len] |
| | | y_pred_trimmed = y_pred[:min_len] |
| | | else: |
| | | y_test_trimmed = y_test |
| | | y_pred_trimmed = y_pred |
| | | |
| | | # 颿µ |
| | | if 'X_test_seq_trimmed' in locals(): |
| | | y_pred_scaled = deep_model.predict(X_test_seq_trimmed).ravel() |
| | | else: |
| | | y_pred_scaled = deep_model.predict(X_test_seq).ravel() |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | # ä¿å模å |
| | | model = deep_model |
| | | |
| | | # 计ç®è¯ä¼°ææ |
| | | # ç¡®ä¿y_teståy_predé¿åº¦ä¸è´ |
| | | min_len = min(len(y_test), len(y_pred)) |
| | | if min_len > 0: |
| | | y_test_trimmed = y_test[:min_len] |
| | | y_pred_trimmed = y_pred[:min_len] |
| | | r2 = r2_score(y_test_trimmed, y_pred_trimmed) |
| | | mse = mean_squared_error(y_test_trimmed, y_pred_trimmed) |
| | | mae = mean_absolute_error(y_test_trimmed, y_pred_trimmed) |
| | | rmse = np.sqrt(mse) |
| | | else: |
| | | r2 = 0 |
| | | mse = 0 |
| | | mae = 0 |
| | | rmse = 0 |
| | | |
| | | # æ¾ç¤ºæ¨¡åæ§è½ |
| | | metrics_cols = st.columns(2) |
| | | with metrics_cols[0]: |
| | | st.metric("R² å¾å", f"{r2:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{mse:.6f}") |
| | | with metrics_cols[1]: |
| | | st.metric("å¹³åç»å¯¹è¯¯å·® (MAE)", f"{mae:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{rmse:.6f}") |
| | | |
| | | # --- å®é
å¼ä¸é¢æµå¼å¯¹æ¯ --- |
| | | st.subheader("ð å®é
å¼ä¸é¢æµå¼å¯¹æ¯") |
| | | |
| | | # åå»ºå¯¹æ¯æ°æ® |
| | | compare_df = pd.DataFrame({ |
| | | 'å®é
å¼': y_test_trimmed, |
| | | '颿µå¼': y_pred_trimmed |
| | | }) |
| | | compare_df = compare_df.sort_index() |
| | | |
| | | # å建对æ¯å¾ |
| | | fig_compare = go.Figure() |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['å®é
å¼'], |
| | | name='å®é
å¼', |
| | | mode='lines+markers', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['颿µå¼'], |
| | | name='颿µå¼', |
| | | mode='lines+markers', |
| | | line=dict(color='red', width=2, dash='dash') |
| | | )) |
| | | fig_compare.update_layout( |
| | | title=f'æµè¯é: å®é
ç±³é vs 颿µç±³é ({model_type})', |
| | | xaxis=dict(title='æ¶é´'), |
| | | yaxis=dict(title='ç±³é (Kg/m)'), |
| | | legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_compare, width='stretch') |
| | | |
| | | # --- æ®å·®åæ --- |
| | | st.subheader("ð æ®å·®åæ") |
| | | |
| | | # è®¡ç®æ®å·® |
| | | residuals = y_test_trimmed - y_pred_trimmed |
| | | |
| | | # å建æ®å·®å¾ |
| | | fig_residual = go.Figure() |
| | | fig_residual.add_trace(go.Scatter( |
| | | x=y_pred, |
| | | y=residuals, |
| | | mode='markers', |
| | | marker=dict(color='green', size=8, opacity=0.6) |
| | | )) |
| | | fig_residual.add_shape( |
| | | type="line", |
| | | x0=y_pred.min(), |
| | | y0=0, |
| | | x1=y_pred.max(), |
| | | y1=0, |
| | | line=dict(color="red", width=2, dash="dash") |
| | | ) |
| | | fig_residual.update_layout( |
| | | title='æ®å·®å¾', |
| | | xaxis=dict(title='颿µå¼'), |
| | | yaxis=dict(title='æ®å·®'), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_residual, width='stretch') |
| | | |
| | | # --- ç¹å¾éè¦æ§ï¼å¦ææ¨¡åæ¯æï¼ --- |
| | | if model_type in ['RandomForest', 'GradientBoosting']: |
| | | st.subheader("âï¸ ç¹å¾éè¦æ§åæ") |
| | | |
| | | # 计ç®ç¹å¾éè¦æ§ |
| | | feature_importance = pd.DataFrame({ |
| | | 'ç¹å¾': X_train.columns, |
| | | 'éè¦æ§': model.feature_importances_ |
| | | }) |
| | | feature_importance = feature_importance.sort_values('éè¦æ§', ascending=False) |
| | | |
| | | # å建ç¹å¾éè¦æ§å¾ |
| | | fig_importance = px.bar( |
| | | feature_importance, |
| | | x='ç¹å¾', |
| | | y='éè¦æ§', |
| | | title='ç¹å¾éè¦æ§', |
| | | color='éè¦æ§', |
| | | color_continuous_scale='viridis' |
| | | ) |
| | | fig_importance.update_layout( |
| | | xaxis=dict(tickangle=-45), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_importance, width='stretch') |
| | | |
| | | # --- 颿µåè½ --- |
| | | st.subheader("ð® ç±³é颿µ") |
| | | |
| | | # åå»ºé¢æµè¡¨åï¼ä½¿ç¨formå
è£
以鲿¢è¾å
¥æ¶è§¦åéæ°åæ |
| | | with st.form(key="prediction_form"): |
| | | st.write("è¾å
¥ç¹å¾å¼è¿è¡ç±³é颿µ:") |
| | | predict_cols = st.columns(2) |
| | | input_features = {} |
| | | |
| | | for i, feature in enumerate(default_features): |
| | | with predict_cols[i % 2]: |
| | | # è·åç¹å¾çç»è®¡ä¿¡æ¯ |
| | | min_val = df_analysis_clean[feature].min() |
| | | max_val = df_analysis_clean[feature].max() |
| | | mean_val = df_analysis_clean[feature].mean() |
| | | |
| | | input_features[feature] = st.number_input( |
| | | f"{feature}", |
| | | key=f"ma_pred_{feature}", |
| | | value=float(mean_val), |
| | | min_value=float(min_val), |
| | | max_value=float(max_val), |
| | | step=0.1 |
| | | ) |
| | | |
| | | # 颿µæé® |
| | | predict_button = st.form_submit_button("颿µç±³é") |
| | | |
| | | if predict_button: |
| | | # åå¤é¢æµæ°æ® |
| | | input_df = pd.DataFrame([input_features]) |
| | | |
| | | # æ·»å æ¶é´ç¹å¾ï¼ä½¿ç¨å½åæ¶é´ï¼ |
| | | current_time = datetime.now() |
| | | time_features_input = pd.DataFrame({ |
| | | 'hour': [current_time.hour], |
| | | 'minute': [current_time.minute], |
| | | 'second': [current_time.second], |
| | | 'time_of_day': [current_time.hour * 3600 + current_time.minute * 60 + current_time.second] |
| | | }) |
| | | |
| | | # æ·»å æ»åç¹å¾ï¼ä½¿ç¨è¾å
¥å¼ä½ä¸ºæ¿ä»£ï¼ |
| | | for feature in default_features: |
| | | for lag in [1, 2, 3]: |
| | | time_features_input[f'{feature}_lag{lag}'] = input_features[feature] |
| | | time_features_input[f'{feature}_diff{lag}'] = 0.0 |
| | | |
| | | # æ·»å æ»å¨ç»è®¡ç¹å¾ï¼ä½¿ç¨è¾å
¥å¼ä½ä¸ºæ¿ä»£ï¼ |
| | | for feature in default_features: |
| | | time_features_input[f'{feature}_rolling_mean'] = input_features[feature] |
| | | time_features_input[f'{feature}_rolling_std'] = 0.0 |
| | | time_features_input[f'{feature}_rolling_min'] = input_features[feature] |
| | | time_features_input[f'{feature}_rolling_max'] = input_features[feature] |
| | | |
| | | # åå¹¶ç¹å¾ |
| | | input_combined = pd.concat([input_df, time_features_input], axis=1) |
| | | |
| | | # 颿µ |
| | | if model_type in ['SVR', 'MLP']: |
| | | input_scaled = scaler_X.transform(input_combined) |
| | | prediction_scaled = model.predict(input_scaled) |
| | | predicted_weight = scaler_y.inverse_transform(prediction_scaled.reshape(-1, 1)).ravel()[0] |
| | | elif use_deep_learning and model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 为深度å¦ä¹ 模åå建åºå |
| | | input_scaled = scaler_X.transform(input_combined) |
| | | # éå¤è¾å
¥ä»¥å建åºå |
| | | sequence_length = st.session_state['ma_sequence_length'] |
| | | input_seq = np.tile(input_scaled, (sequence_length, 1)).reshape(1, sequence_length, -1) |
| | | prediction_scaled = model.predict(input_seq).ravel()[0] |
| | | predicted_weight = scaler_y.inverse_transform(prediction_scaled.reshape(-1, 1)).ravel()[0] |
| | | else: |
| | | predicted_weight = model.predict(input_combined)[0] |
| | | |
| | | # æ¾ç¤ºé¢æµç»æ |
| | | st.success(f"颿µç±³é: {predicted_weight:.4f} Kg/m") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | st.dataframe(df_analysis.head(20), width='stretch') |
| | | |
| | | # --- å¯¼åºæ°æ® --- |
| | | st.subheader("ð¾ å¯¼åºæ°æ®") |
| | | # å°æ°æ®è½¬æ¢ä¸ºCSVæ ¼å¼ |
| | | csv = df_analysis.to_csv(index=False) |
| | | # å建ä¸è½½æé® |
| | | st.download_button( |
| | | label="å¯¼åºæ´ååçæ°æ® (CSV)", |
| | | data=csv, |
| | | file_name=f"metered_weight_advanced_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv", |
| | | mime="text/csv", |
| | | help="ç¹å»æé®å¯¼åºæ´ååçç±³éåææ°æ®" |
| | | ) |
| | | except Exception as e: |
| | | st.error(f"模åè®ç»æé¢æµå¤±è´¥: {str(e)}") |
| | | |
| | | else: |
| | | # æç¤ºç¨æ·ç¹å»å¼å§åææé® |
| | | st.info("è¯·éæ©æ¶é´èå´å¹¶ç¹å»'å¼å§åæ'æé®è·åæ°æ®ã") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | from sklearn.preprocessing import StandardScaler, MinMaxScaler |
| | | from sklearn.model_selection import train_test_split |
| | | from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error |
| | | from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor |
| | | from sklearn.svm import SVR |
| | | from sklearn.neural_network import MLPRegressor |
| | | |
| | | # å°è¯å¯¼å
¥æ·±åº¦å¦ä¹ åº |
| | | use_deep_learning = False |
| | | try: |
| | | |
| | | import torch |
| | | import torch.nn as nn |
| | | import torch.optim as optim |
| | | use_deep_learning = True |
| | | # æ£æµGPUæ¯å¦å¯ç¨ |
| | | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') |
| | | st.success(f"使ç¨è®¾å¤: {device}") |
| | | except ImportError: |
| | | st.warning("æªæ£æµå°PyTorchï¼æ·±åº¦å¦ä¹ 模åå°ä¸å¯ç¨ã请å®è£
pytorch以使ç¨LSTM/GRU模åã") |
| | | |
| | | |
| | | # PyTorch深度å¦ä¹ 模åå®ä¹ |
| | | class LSTMModel(nn.Module): |
| | | def __init__(self, input_dim, hidden_dim=64, num_layers=2): |
| | | super(LSTMModel, self).__init__() |
| | | self.lstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True) |
| | | self.fc1 = nn.Linear(hidden_dim, 32) |
| | | self.dropout = nn.Dropout(0.2) |
| | | self.fc2 = nn.Linear(32, 1) |
| | | |
| | | def forward(self, x): |
| | | out, _ = self.lstm(x) |
| | | out = out[:, -1, :] |
| | | out = torch.relu(self.fc1(out)) |
| | | out = self.dropout(out) |
| | | out = self.fc2(out) |
| | | return out |
| | | |
| | | class GRUModel(nn.Module): |
| | | def __init__(self, input_dim, hidden_dim=64, num_layers=2): |
| | | super(GRUModel, self).__init__() |
| | | self.gru = nn.GRU(input_dim, hidden_dim, num_layers, batch_first=True) |
| | | self.fc1 = nn.Linear(hidden_dim, 32) |
| | | self.dropout = nn.Dropout(0.2) |
| | | self.fc2 = nn.Linear(32, 1) |
| | | |
| | | def forward(self, x): |
| | | out, _ = self.gru(x) |
| | | out = out[:, -1, :] |
| | | out = torch.relu(self.fc1(out)) |
| | | out = self.dropout(out) |
| | | out = self.fc2(out) |
| | | return out |
| | | |
| | | class BiLSTMModel(nn.Module): |
| | | def __init__(self, input_dim, hidden_dim=64, num_layers=2): |
| | | super(BiLSTMModel, self).__init__() |
| | | self.bilstm = nn.LSTM(input_dim, hidden_dim, num_layers, batch_first=True, bidirectional=True) |
| | | self.fc1 = nn.Linear(hidden_dim * 2, 32) |
| | | self.dropout = nn.Dropout(0.2) |
| | | self.fc2 = nn.Linear(32, 1) |
| | | |
| | | def forward(self, x): |
| | | out, _ = self.bilstm(x) |
| | | out = out[:, -1, :] |
| | | out = torch.relu(self.fc1(out)) |
| | | out = self.dropout(out) |
| | | out = self.fc2(out) |
| | | return out |
| | | |
| | | def show_metered_weight_advanced(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³éé«çº§é¢æµåæ") |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'ma_start_date' not in st.session_state: |
| | | st.session_state['ma_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'ma_end_date' not in st.session_state: |
| | | st.session_state['ma_end_date'] = datetime.now().date() |
| | | if 'ma_quick_select' not in st.session_state: |
| | | st.session_state['ma_quick_select'] = "æè¿7天" |
| | | if 'ma_model_type' not in st.session_state: |
| | | st.session_state['ma_model_type'] = 'RandomForest' |
| | | if 'ma_sequence_length' not in st.session_state: |
| | | st.session_state['ma_sequence_length'] = 10 |
| | | |
| | | # é»è®¤ç¹å¾å表ï¼ä¸åå
è®¸ç¨æ·éæ©ï¼ |
| | | default_features = ['èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | | 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦'] |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['ma_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['ma_start_date'] = today |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=3) |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=7) |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=30) |
| | | st.session_state['ma_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['ma_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['ma_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_ma_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ma_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ma_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="ma_query", width='stretch') |
| | | |
| | | # 模åé
ç½® |
| | | st.markdown("---") |
| | | st.write("ð¤ **模åé
ç½®**") |
| | | model_cols = st.columns(2) |
| | | |
| | | with model_cols[0]: |
| | | # 模åç±»åéæ© |
| | | model_options = ['RandomForest', 'GradientBoosting', 'SVR', 'MLP'] |
| | | if use_deep_learning: |
| | | model_options.extend(['LSTM', 'GRU', 'BiLSTM']) |
| | | |
| | | model_type = st.selectbox( |
| | | "模åç±»å", |
| | | options=model_options, |
| | | key="ma_model_type", |
| | | help="éæ©ç¨äºé¢æµç模åç±»å" |
| | | ) |
| | | |
| | | with model_cols[1]: |
| | | # åºåé¿åº¦ï¼ä»
éç¨äºæ·±åº¦å¦ä¹ 模åï¼ |
| | | if model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | sequence_length = st.slider( |
| | | "åºåé¿åº¦", |
| | | min_value=5, |
| | | max_value=30, |
| | | value=st.session_state['ma_sequence_length'], |
| | | step=1, |
| | | help="ç¨äºæ·±åº¦å¦ä¹ 模åçæ¶é´åºåé¿åº¦", |
| | | key="ma_sequence_length" |
| | | ) |
| | | else: |
| | | st.session_state['ma_sequence_length'] = 10 |
| | | st.write("åºåé¿åº¦: 10 (é»è®¤ï¼ä»
éç¨äºæ·±åº¦å¦ä¹ 模å)") |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç |
| | | if query_button: |
| | | with st.spinner("æ£å¨è·åæ°æ®..."): |
| | | # 1. è·å宿´çæ¤åºæºæ°æ® |
| | | df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | # æ¸
é¤ç¼åæ°æ® |
| | | for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp', 'last_query_start', 'last_query_end']: |
| | | if key in st.session_state: |
| | | del st.session_state[key] |
| | | return |
| | | |
| | | # ç¼åæ°æ®å°ä¼è¯ç¶æ |
| | | st.session_state['cached_extruder_full'] = df_extruder_full |
| | | st.session_state['cached_main_speed'] = df_main_speed |
| | | st.session_state['cached_temp'] = df_temp |
| | | st.session_state['last_query_start'] = start_dt |
| | | st.session_state['last_query_end'] = end_dt |
| | | |
| | | # æ°æ®å¤çååæ |
| | | if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']): |
| | | with st.spinner("æ£å¨åææ°æ®..."): |
| | | # è·åç¼åæ°æ® |
| | | df_extruder_full = st.session_state['cached_extruder_full'] |
| | | df_main_speed = st.session_state['cached_main_speed'] |
| | | df_temp = st.session_state['cached_temp'] |
| | | |
| | | |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # æ°æ®æ´åä¸é¢å¤ç |
| | | def integrate_data(df_extruder_full, df_main_speed, df_temp): |
| | | # ç¡®ä¿æ¤åºæºæ°æ®åå¨ |
| | | if df_extruder_full is None or df_extruder_full.empty: |
| | | return None |
| | | |
| | | # å建åªå
å«ç±³éåæ¶é´çä¸»æ°æ®é |
| | | df_merged = df_extruder_full[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy() |
| | | |
| | | |
| | | # æ´å主æµç¨æ°æ® |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed = df_main_speed[['time', 'process_main_speed']] |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_main_speed.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´åæ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_cols = ['time', 'nakata_extruder_screw_display_temp', |
| | | 'nakata_extruder_rear_barrel_display_temp', |
| | | 'nakata_extruder_front_barrel_display_temp', |
| | | 'nakata_extruder_head_display_temp'] |
| | | df_temp_subset = df_temp[temp_cols].copy() |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_temp_subset.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # éå½åå以æé«å¯è¯»æ§ |
| | | df_merged.rename(columns={ |
| | | 'screw_speed_actual': 'èºæè½¬é', |
| | | 'head_pressure': 'æºå¤´åå', |
| | | 'process_main_speed': 'æµç¨ä¸»é', |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¸©åº¦', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¸©åº¦' |
| | | }, inplace=True) |
| | | |
| | | # æ¸
çæ°æ® |
| | | df_merged.dropna(subset=['metered_weight'], inplace=True) |
| | | |
| | | return df_merged |
| | | |
| | | # æ§è¡æ°æ®æ´å |
| | | df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp) |
| | | |
| | | if df_analysis is None or df_analysis.empty: |
| | | st.warning("æ°æ®æ´å失败ï¼è¯·æ£æ¥æ°æ®è´¨éæè°æ´æ¶é´èå´ã") |
| | | return |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # --- åå§æ°æ®è¶å¿å¾ --- |
| | | st.subheader("ð åå§æ°æ®è¶å¿å¾") |
| | | |
| | | # å建è¶å¿å¾ |
| | | fig_trend = go.Figure() |
| | | |
| | | # æ·»å ç±³éæ°æ® |
| | | if df_extruder_full is not None and not df_extruder_full.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['metered_weight'], |
| | | name='ç±³é (Kg/m)', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | |
| | | # æ·»å èºæè½¬é |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['screw_speed_actual'], |
| | | name='èºæè½¬é (RPM)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5), |
| | | yaxis='y2' |
| | | )) |
| | | |
| | | # æ·»å æºå¤´åå |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['head_pressure'], |
| | | name='æºå¤´åå', |
| | | mode='lines', |
| | | line=dict(color='orange', width=1.5), |
| | | yaxis='y3' |
| | | )) |
| | | |
| | | # æ·»å æµç¨ä¸»é |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['process_main_speed'], |
| | | name='æµç¨ä¸»é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y4' |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | # èºææ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_screw_display_temp'], |
| | | name='èºææ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='purple', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # é
ç½®è¶å¿å¾å¸å± |
| | | fig_trend.update_layout( |
| | | title='åå§æ°æ®è¶å¿', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | | type='date' |
| | | ), |
| | | yaxis=dict( |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='blue'), |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='èºæè½¬é (RPM)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | | side='right' |
| | | ), |
| | | yaxis3=dict( |
| | | title='æºå¤´åå', |
| | | title_font=dict(color='orange'), |
| | | tickfont=dict(color='orange'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.85 |
| | | ), |
| | | yaxis4=dict( |
| | | title='æµç¨ä¸»é (M/Min)', |
| | | title_font=dict(color='red'), |
| | | tickfont=dict(color='red'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.75 |
| | | ), |
| | | yaxis5=dict( |
| | | title='温度 (°C)', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='left', |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | | y=1.02, |
| | | xanchor="right", |
| | | x=1 |
| | | ), |
| | | height=600, |
| | | margin=dict(l=100, r=200, t=100, b=100), |
| | | hovermode='x unified' |
| | | ) |
| | | |
| | | # æ¾ç¤ºè¶å¿å¾ |
| | | st.plotly_chart(fig_trend, width='stretch', config={'scrollZoom': True}) |
| | | |
| | | # --- é«çº§é¢æµåæ --- |
| | | st.subheader("ð é«çº§é¢æµåæ") |
| | | |
| | | # æ£æ¥ææé»è®¤ç¹å¾æ¯å¦å¨æ°æ®ä¸ |
| | | missing_features = [f for f in default_features if f not in df_analysis.columns] |
| | | if missing_features: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå°ä»¥ä¸ç¹å¾: {', '.join(missing_features)}") |
| | | else: |
| | | # å夿°æ® |
| | | X = df_analysis[default_features] |
| | | y = df_analysis['ç±³é'] |
| | | |
| | | # æ¸
çæ°æ®ä¸çNaNå¼ |
| | | combined = pd.concat([X, y], axis=1) |
| | | combined_clean = combined.dropna() |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(combined_clean) < 30: |
| | | st.warning("æ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç颿µåæ") |
| | | else: |
| | | # éæ°å离Xåy |
| | | X_clean = combined_clean[default_features] |
| | | y_clean = combined_clean['ç±³é'] |
| | | |
| | | # ç¹å¾å·¥ç¨ï¼æ·»å æ¶é´ç¸å
³ç¹å¾ |
| | | # ç¡®ä¿ä½¿ç¨æ¶é´åä½ä¸ºç´¢å¼ |
| | | if 'time' in combined_clean.columns: |
| | | # å°timeåè®¾ç½®ä¸ºç´¢å¼ |
| | | combined_clean = combined_clean.set_index('time') |
| | | |
| | | # å建æ¶é´ç¹å¾ |
| | | time_features = pd.DataFrame(index=combined_clean.index) |
| | | time_features['hour'] = combined_clean.index.hour |
| | | time_features['minute'] = combined_clean.index.minute |
| | | time_features['second'] = combined_clean.index.second |
| | | time_features['time_of_day'] = time_features['hour'] * 3600 + time_features['minute'] * 60 + time_features['second'] |
| | | else: |
| | | # å¦ææ²¡ætimeåï¼åå»ºç©ºçæ¶é´ç¹å¾ |
| | | time_features = pd.DataFrame(index=combined_clean.index) |
| | | time_features['hour'] = 0 |
| | | time_features['minute'] = 0 |
| | | time_features['second'] = 0 |
| | | time_features['time_of_day'] = 0 |
| | | |
| | | # æ·»å æ»åç¹å¾ |
| | | for feature in default_features: |
| | | for lag in [1, 2, 3]: |
| | | time_features[f'{feature}_lag{lag}'] = X_clean[feature].shift(lag) |
| | | time_features[f'{feature}_diff{lag}'] = X_clean[feature].diff(lag) |
| | | |
| | | # æ·»å æ»å¨ç»è®¡ç¹å¾ |
| | | for feature in default_features: |
| | | time_features[f'{feature}_rolling_mean'] = X_clean[feature].rolling(window=5).mean() |
| | | time_features[f'{feature}_rolling_std'] = X_clean[feature].rolling(window=5).std() |
| | | time_features[f'{feature}_rolling_min'] = X_clean[feature].rolling(window=5).min() |
| | | time_features[f'{feature}_rolling_max'] = X_clean[feature].rolling(window=5).max() |
| | | |
| | | # æ¸
çæ»åç¹å¾åæ»å¨ç»è®¡ç¹å¾äº§ççNaNå¼ |
| | | time_features.dropna(inplace=True) |
| | | |
| | | # 对é½Xåy |
| | | common_index = time_features.index.intersection(y_clean.index) |
| | | X_final = pd.concat([X_clean.loc[common_index], time_features.loc[common_index]], axis=1) |
| | | y_final = y_clean.loc[common_index] |
| | | |
| | | # æ£æ¥æç»æ°æ®é |
| | | if len(X_final) < 20: |
| | | st.warning("ç¹å¾å·¥ç¨åæ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç颿µåæ") |
| | | else: |
| | | # åå²è®ç»éåæµè¯é |
| | | X_train, X_test, y_train, y_test = train_test_split(X_final, y_final, test_size=0.2, random_state=42) |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = StandardScaler() |
| | | scaler_y = MinMaxScaler() |
| | | |
| | | X_train_scaled = scaler_X.fit_transform(X_train) |
| | | X_test_scaled = scaler_X.transform(X_test) |
| | | y_train_scaled = scaler_y.fit_transform(y_train.values.reshape(-1, 1)).ravel() |
| | | y_test_scaled = scaler_y.transform(y_test.values.reshape(-1, 1)).ravel() |
| | | |
| | | # 模åè®ç» |
| | | model = None |
| | | y_pred = None |
| | | |
| | | try: |
| | | if model_type == 'RandomForest': |
| | | # éæºæ£®æåå½ |
| | | model = RandomForestRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | |
| | | elif model_type == 'GradientBoosting': |
| | | # 梯度æååå½ |
| | | model = GradientBoostingRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | |
| | | elif model_type == 'SVR': |
| | | # æ¯æåéåå½ |
| | | model = SVR(kernel='rbf', C=1.0, gamma='scale') |
| | | model.fit(X_train_scaled, y_train_scaled) |
| | | y_pred_scaled = model.predict(X_test_scaled) |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | elif model_type == 'MLP': |
| | | # å¤å±æç¥å¨åå½ |
| | | model = MLPRegressor(hidden_layer_sizes=(100, 50), max_iter=500, random_state=42) |
| | | model.fit(X_train_scaled, y_train_scaled) |
| | | y_pred_scaled = model.predict(X_test_scaled) |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | elif use_deep_learning and model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # å夿¶é´åºåæ°æ® |
| | | sequence_length = st.session_state['ma_sequence_length'] |
| | | |
| | | def create_sequences(X, y, seq_length): |
| | | X_seq = [] |
| | | y_seq = [] |
| | | # ç¡®ä¿Xåyçé¿åº¦ä¸è´ |
| | | min_len = min(len(X), len(y)) |
| | | # ç¡®ä¿Xåyçé¿åº¦è³å°ä¸ºseq_length + 1 |
| | | if min_len <= seq_length: |
| | | return np.array([]), np.array([]) |
| | | # æªæXåyå°ç¸åé¿åº¦ |
| | | X_trimmed = X[:min_len] |
| | | y_trimmed = y[:min_len] |
| | | # å建åºå |
| | | for i in range(len(X_trimmed) - seq_length): |
| | | X_seq.append(X_trimmed[i:i+seq_length]) |
| | | y_seq.append(y_trimmed[i+seq_length]) |
| | | return np.array(X_seq), np.array(y_seq) |
| | | |
| | | # 为深度å¦ä¹ 模åå建åºå |
| | | X_train_seq, y_train_seq = create_sequences(X_train_scaled, y_train_scaled, sequence_length) |
| | | X_test_seq, y_test_seq = create_sequences(X_test_scaled, y_test_scaled, sequence_length) |
| | | |
| | | # æ£æ¥å建çåºåæ¯å¦ä¸ºç©º |
| | | if len(X_train_seq) == 0 or len(y_train_seq) == 0: |
| | | st.warning(f"æ°æ®éä¸è¶³ï¼æ æ³å建ææçLSTMåºåãéè¦è³å° {sequence_length + 1} ä¸ªæ ·æ¬ï¼å½ååªæ {min(len(X_train_scaled), len(y_train_scaled))} ä¸ªæ ·æ¬ã") |
| | | # 使ç¨éæºæ£®æä½ä¸ºå¤é模å |
| | | model = RandomForestRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | else: |
| | | # 转æ¢ä¸ºPyTorchå¼ éå¹¶ç§»å¨å°è®¾å¤ |
| | | X_train_tensor = torch.tensor(X_train_seq, dtype=torch.float32).to(device) |
| | | y_train_tensor = torch.tensor(y_train_seq, dtype=torch.float32).unsqueeze(1).to(device) |
| | | X_test_tensor = torch.tensor(X_test_seq, dtype=torch.float32).to(device) |
| | | y_test_tensor = torch.tensor(y_test_seq, dtype=torch.float32).unsqueeze(1).to(device) |
| | | |
| | | # æå»ºPyTorch模åå¹¶ç§»å¨å°è®¾å¤ |
| | | input_dim = X_train_scaled.shape[1] |
| | | |
| | | if model_type == 'LSTM': |
| | | deep_model = LSTMModel(input_dim).to(device) |
| | | elif model_type == 'GRU': |
| | | deep_model = GRUModel(input_dim).to(device) |
| | | elif model_type == 'BiLSTM': |
| | | deep_model = BiLSTMModel(input_dim).to(device) |
| | | |
| | | # å®ä¹æå¤±å½æ°åä¼åå¨ |
| | | criterion = nn.MSELoss() |
| | | optimizer = optim.Adam(deep_model.parameters(), lr=0.001) |
| | | |
| | | # æ¾ç¤ºä½¿ç¨çè®¾å¤ |
| | | st.info(f"使ç¨è®¾å¤: {device}") |
| | | |
| | | # è®ç»æ¨¡å |
| | | num_epochs = 50 |
| | | batch_size = 32 |
| | | |
| | | for epoch in range(num_epochs): |
| | | deep_model.train() |
| | | optimizer.zero_grad() |
| | | |
| | | # ååä¼ æ |
| | | outputs = deep_model(X_train_tensor) |
| | | loss = criterion(outputs, y_train_tensor) |
| | | |
| | | # ååä¼ æåä¼å |
| | | loss.backward() |
| | | optimizer.step() |
| | | |
| | | # 颿µ |
| | | deep_model.eval() |
| | | with torch.no_grad(): |
| | | y_pred_scaled_tensor = deep_model(X_test_tensor) |
| | | y_pred_scaled = y_pred_scaled_tensor.numpy().ravel() |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | # å°y_test_seq转æ¢ååå§å°ºåº¦ |
| | | y_test_actual = scaler_y.inverse_transform(y_test_seq.reshape(-1, 1)).ravel() |
| | | |
| | | # ä¿å模å |
| | | model = deep_model |
| | | |
| | | # 计ç®è¯ä¼°ææ |
| | | if model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 使ç¨è½¬æ¢åçy_test_seqä½ä¸ºçå®å¼ |
| | | r2 = r2_score(y_test_actual, y_pred) |
| | | mse = mean_squared_error(y_test_actual, y_pred) |
| | | mae = mean_absolute_error(y_test_actual, y_pred) |
| | | rmse = np.sqrt(mse) |
| | | else: |
| | | # 使ç¨åå§çy_testä½ä¸ºçå®å¼ |
| | | r2 = r2_score(y_test, y_pred) |
| | | mse = mean_squared_error(y_test, y_pred) |
| | | mae = mean_absolute_error(y_test, y_pred) |
| | | rmse = np.sqrt(mse) |
| | | |
| | | # æ¾ç¤ºæ¨¡åæ§è½ |
| | | metrics_cols = st.columns(2) |
| | | with metrics_cols[0]: |
| | | st.metric("R² å¾å", f"{r2:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{mse:.6f}") |
| | | with metrics_cols[1]: |
| | | st.metric("å¹³åç»å¯¹è¯¯å·® (MAE)", f"{mae:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{rmse:.6f}") |
| | | |
| | | # --- å®é
å¼ä¸é¢æµå¼å¯¹æ¯ --- |
| | | st.subheader("ð å®é
å¼ä¸é¢æµå¼å¯¹æ¯") |
| | | |
| | | # åå»ºå¯¹æ¯æ°æ® |
| | | if model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 使ç¨è½¬æ¢åçy_test_actual |
| | | compare_df = pd.DataFrame({ |
| | | 'å®é
å¼': y_test_actual, |
| | | '颿µå¼': y_pred |
| | | }) |
| | | else: |
| | | # 使ç¨åå§çy_test |
| | | compare_df = pd.DataFrame({ |
| | | 'å®é
å¼': y_test, |
| | | '颿µå¼': y_pred |
| | | }) |
| | | compare_df = compare_df.sort_index() |
| | | |
| | | # å建对æ¯å¾ |
| | | fig_compare = go.Figure() |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['å®é
å¼'], |
| | | name='å®é
å¼', |
| | | mode='lines+markers', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['颿µå¼'], |
| | | name='颿µå¼', |
| | | mode='lines+markers', |
| | | line=dict(color='red', width=2, dash='dash') |
| | | )) |
| | | fig_compare.update_layout( |
| | | title=f'æµè¯é: å®é
ç±³é vs 颿µç±³é ({model_type})', |
| | | xaxis=dict(title='æ¶é´'), |
| | | yaxis=dict(title='ç±³é (Kg/m)'), |
| | | legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_compare, width='stretch') |
| | | |
| | | # --- æ®å·®åæ --- |
| | | st.subheader("ð æ®å·®åæ") |
| | | |
| | | # è®¡ç®æ®å·® |
| | | if model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 使ç¨è½¬æ¢åçy_test_actual |
| | | residuals = y_test_actual - y_pred |
| | | else: |
| | | # 使ç¨åå§çy_test |
| | | residuals = y_test - y_pred |
| | | |
| | | # å建æ®å·®å¾ |
| | | fig_residual = go.Figure() |
| | | fig_residual.add_trace(go.Scatter( |
| | | x=y_pred, |
| | | y=residuals, |
| | | mode='markers', |
| | | marker=dict(color='green', size=8, opacity=0.6) |
| | | )) |
| | | fig_residual.add_shape( |
| | | type="line", |
| | | x0=y_pred.min(), |
| | | y0=0, |
| | | x1=y_pred.max(), |
| | | y1=0, |
| | | line=dict(color="red", width=2, dash="dash") |
| | | ) |
| | | fig_residual.update_layout( |
| | | title='æ®å·®å¾', |
| | | xaxis=dict(title='颿µå¼'), |
| | | yaxis=dict(title='æ®å·®'), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_residual, width='stretch') |
| | | |
| | | # --- ç¹å¾éè¦æ§ï¼å¦ææ¨¡åæ¯æï¼ --- |
| | | if model_type in ['RandomForest', 'GradientBoosting']: |
| | | st.subheader("âï¸ ç¹å¾éè¦æ§åæ") |
| | | |
| | | # 计ç®ç¹å¾éè¦æ§ |
| | | feature_importance = pd.DataFrame({ |
| | | 'ç¹å¾': X_train.columns, |
| | | 'éè¦æ§': model.feature_importances_ |
| | | }) |
| | | feature_importance = feature_importance.sort_values('éè¦æ§', ascending=False) |
| | | |
| | | # å建ç¹å¾éè¦æ§å¾ |
| | | fig_importance = px.bar( |
| | | feature_importance, |
| | | x='ç¹å¾', |
| | | y='éè¦æ§', |
| | | title='ç¹å¾éè¦æ§', |
| | | color='éè¦æ§', |
| | | color_continuous_scale='viridis' |
| | | ) |
| | | fig_importance.update_layout( |
| | | xaxis=dict(tickangle=-45), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_importance, width='stretch') |
| | | |
| | | # --- 颿µåè½ --- |
| | | st.subheader("ð® ç±³é颿µ") |
| | | |
| | | # åå»ºé¢æµè¡¨å |
| | | st.write("è¾å
¥ç¹å¾å¼è¿è¡ç±³é颿µ:") |
| | | predict_cols = st.columns(2) |
| | | input_features = {} |
| | | |
| | | for i, feature in enumerate(default_features): |
| | | with predict_cols[i % 2]: |
| | | # è·åç¹å¾çç»è®¡ä¿¡æ¯ |
| | | min_val = X_clean[feature].min() |
| | | max_val = X_clean[feature].max() |
| | | mean_val = X_clean[feature].mean() |
| | | |
| | | input_features[feature] = st.number_input( |
| | | f"{feature}", |
| | | key=f"ma_pred_{feature}", |
| | | value=float(mean_val), |
| | | min_value=float(min_val), |
| | | max_value=float(max_val), |
| | | step=0.1 |
| | | ) |
| | | |
| | | if st.button("颿µç±³é"): |
| | | # åå¤é¢æµæ°æ® |
| | | input_df = pd.DataFrame([input_features]) |
| | | |
| | | # æ·»å æ¶é´ç¹å¾ï¼ä½¿ç¨å½åæ¶é´ï¼ |
| | | current_time = datetime.now() |
| | | time_features_input = pd.DataFrame({ |
| | | 'hour': [current_time.hour], |
| | | 'minute': [current_time.minute], |
| | | 'second': [current_time.second], |
| | | 'time_of_day': [current_time.hour * 3600 + current_time.minute * 60 + current_time.second] |
| | | }) |
| | | |
| | | # æ·»å æ»åç¹å¾ï¼ä½¿ç¨è¾å
¥å¼ä½ä¸ºæ¿ä»£ï¼ |
| | | for feature in default_features: |
| | | for lag in [1, 2, 3]: |
| | | time_features_input[f'{feature}_lag{lag}'] = input_features[feature] |
| | | time_features_input[f'{feature}_diff{lag}'] = 0.0 |
| | | |
| | | # æ·»å æ»å¨ç»è®¡ç¹å¾ï¼ä½¿ç¨è¾å
¥å¼ä½ä¸ºæ¿ä»£ï¼ |
| | | for feature in default_features: |
| | | time_features_input[f'{feature}_rolling_mean'] = input_features[feature] |
| | | time_features_input[f'{feature}_rolling_std'] = 0.0 |
| | | time_features_input[f'{feature}_rolling_min'] = input_features[feature] |
| | | time_features_input[f'{feature}_rolling_max'] = input_features[feature] |
| | | |
| | | # åå¹¶ç¹å¾ |
| | | input_combined = pd.concat([input_df, time_features_input], axis=1) |
| | | |
| | | # 颿µ |
| | | if model_type in ['SVR', 'MLP']: |
| | | input_scaled = scaler_X.transform(input_combined) |
| | | prediction_scaled = model.predict(input_scaled) |
| | | predicted_weight = scaler_y.inverse_transform(prediction_scaled.reshape(-1, 1)).ravel()[0] |
| | | elif use_deep_learning and model_type in ['LSTM', 'GRU', 'BiLSTM']: |
| | | # 为深度å¦ä¹ 模åå建åºå |
| | | input_scaled = scaler_X.transform(input_combined) |
| | | # éå¤è¾å
¥ä»¥å建åºå |
| | | input_seq = np.tile(input_scaled, (sequence_length, 1)).reshape(1, sequence_length, -1) |
| | | # 转æ¢ä¸ºPyTorchå¼ éå¹¶ç§»å¨å°è®¾å¤ |
| | | input_tensor = torch.tensor(input_seq, dtype=torch.float32).to(device) |
| | | # 颿µ |
| | | model.eval() |
| | | with torch.no_grad(): |
| | | prediction_scaled_tensor = model(input_tensor) |
| | | prediction_scaled = prediction_scaled_tensor.cpu().numpy().ravel()[0] |
| | | predicted_weight = scaler_y.inverse_transform(prediction_scaled.reshape(-1, 1)).ravel()[0] |
| | | else: |
| | | predicted_weight = model.predict(input_combined)[0] |
| | | |
| | | # æ¾ç¤ºé¢æµç»æ |
| | | st.success(f"颿µç±³é: {predicted_weight:.4f} Kg/m") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | st.dataframe(df_analysis.head(20), width='stretch') |
| | | |
| | | # --- å¯¼åºæ°æ® --- |
| | | st.subheader("ð¾ å¯¼åºæ°æ®") |
| | | # å°æ°æ®è½¬æ¢ä¸ºCSVæ ¼å¼ |
| | | csv = df_analysis.to_csv(index=False) |
| | | # å建ä¸è½½æé® |
| | | st.download_button( |
| | | label="å¯¼åºæ´ååçæ°æ® (CSV)", |
| | | data=csv, |
| | | file_name=f"metered_weight_advanced_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv", |
| | | mime="text/csv", |
| | | help="ç¹å»æé®å¯¼åºæ´ååçç±³éåææ°æ®" |
| | | ) |
| | | except Exception as e: |
| | | st.error(f"模åè®ç»æé¢æµå¤±è´¥: {str(e)}") |
| | | |
| | | else: |
| | | # æç¤ºç¨æ·ç¹å»å¼å§åææé® |
| | | st.info("è¯·éæ©æ¶é´èå´å¹¶ç¹å»'å¼å§åæ'æé®è·åæ°æ®ã") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | from sklearn.preprocessing import StandardScaler, MinMaxScaler |
| | | from sklearn.model_selection import train_test_split |
| | | from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error |
| | | from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor |
| | | from sklearn.svm import SVR |
| | | from sklearn.neural_network import MLPRegressor |
| | | |
| | | |
| | | |
| | | |
| | | def show_metered_weight_advanced(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³éé«çº§é¢æµåæ") |
| | | |
| | | # åå§åä¼è¯ç¶æ |
| | | if 'ma_start_date' not in st.session_state: |
| | | st.session_state['ma_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'ma_end_date' not in st.session_state: |
| | | st.session_state['ma_end_date'] = datetime.now().date() |
| | | if 'ma_quick_select' not in st.session_state: |
| | | st.session_state['ma_quick_select'] = "æè¿7天" |
| | | if 'ma_model_type' not in st.session_state: |
| | | st.session_state['ma_model_type'] = 'RandomForest' |
| | | if 'ma_sequence_length' not in st.session_state: |
| | | st.session_state['ma_sequence_length'] = 10 |
| | | |
| | | # é»è®¤ç¹å¾å表ï¼ä¸åå
è®¸ç¨æ·éæ©ï¼ |
| | | default_features = ['èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | | 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦'] |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['ma_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['ma_start_date'] = today |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=3) |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=7) |
| | | st.session_state['ma_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['ma_start_date'] = today - timedelta(days=30) |
| | | st.session_state['ma_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['ma_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['ma_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_ma_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ma_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="ma_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="ma_query", width='stretch') |
| | | |
| | | # 模åé
ç½® |
| | | st.markdown("---") |
| | | st.write("ð¤ **模åé
ç½®**") |
| | | model_cols = st.columns(2) |
| | | |
| | | with model_cols[0]: |
| | | # 模åç±»åéæ© |
| | | model_options = ['RandomForest', 'GradientBoosting', 'SVR', 'MLP'] |
| | | |
| | | model_type = st.selectbox( |
| | | "模åç±»å", |
| | | options=model_options, |
| | | key="ma_model_type", |
| | | help="éæ©ç¨äºé¢æµç模åç±»å" |
| | | ) |
| | | |
| | | |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç |
| | | if query_button: |
| | | with st.spinner("æ£å¨è·åæ°æ®..."): |
| | | # 1. è·å宿´çæ¤åºæºæ°æ® |
| | | df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | # æ¸
é¤ç¼åæ°æ® |
| | | for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp', 'last_query_start', 'last_query_end']: |
| | | if key in st.session_state: |
| | | del st.session_state[key] |
| | | return |
| | | |
| | | # ç¼åæ°æ®å°ä¼è¯ç¶æ |
| | | st.session_state['cached_extruder_full'] = df_extruder_full |
| | | st.session_state['cached_main_speed'] = df_main_speed |
| | | st.session_state['cached_temp'] = df_temp |
| | | st.session_state['last_query_start'] = start_dt |
| | | st.session_state['last_query_end'] = end_dt |
| | | |
| | | # æ°æ®å¤çååæ |
| | | if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']): |
| | | with st.spinner("æ£å¨åææ°æ®..."): |
| | | # è·åç¼åæ°æ® |
| | | df_extruder_full = st.session_state['cached_extruder_full'] |
| | | df_main_speed = st.session_state['cached_main_speed'] |
| | | df_temp = st.session_state['cached_temp'] |
| | | |
| | | |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # æ°æ®æ´åä¸é¢å¤ç |
| | | def integrate_data(df_extruder_full, df_main_speed, df_temp): |
| | | # ç¡®ä¿æ¤åºæºæ°æ®åå¨ |
| | | if df_extruder_full is None or df_extruder_full.empty: |
| | | return None |
| | | |
| | | # å建åªå
å«ç±³éåæ¶é´çä¸»æ°æ®é |
| | | df_merged = df_extruder_full[['time', 'metered_weight', 'screw_speed_actual', 'head_pressure']].copy() |
| | | |
| | | |
| | | # æ´å主æµç¨æ°æ® |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed = df_main_speed[['time', 'process_main_speed']] |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_main_speed.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´åæ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_cols = ['time', 'nakata_extruder_screw_display_temp', |
| | | 'nakata_extruder_rear_barrel_display_temp', |
| | | 'nakata_extruder_front_barrel_display_temp', |
| | | 'nakata_extruder_head_display_temp'] |
| | | df_temp_subset = df_temp[temp_cols].copy() |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_temp_subset.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # éå½åå以æé«å¯è¯»æ§ |
| | | df_merged.rename(columns={ |
| | | 'screw_speed_actual': 'èºæè½¬é', |
| | | 'head_pressure': 'æºå¤´åå', |
| | | 'process_main_speed': 'æµç¨ä¸»é', |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¸©åº¦', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¸©åº¦' |
| | | }, inplace=True) |
| | | |
| | | # æ¸
çæ°æ® |
| | | df_merged.dropna(subset=['metered_weight'], inplace=True) |
| | | |
| | | return df_merged |
| | | |
| | | # æ§è¡æ°æ®æ´å |
| | | df_analysis = integrate_data(df_extruder_full, df_main_speed, df_temp) |
| | | |
| | | if df_analysis is None or df_analysis.empty: |
| | | st.warning("æ°æ®æ´å失败ï¼è¯·æ£æ¥æ°æ®è´¨éæè°æ´æ¶é´èå´ã") |
| | | return |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # --- åå§æ°æ®è¶å¿å¾ --- |
| | | st.subheader("ð åå§æ°æ®è¶å¿å¾") |
| | | |
| | | # å建è¶å¿å¾ |
| | | fig_trend = go.Figure() |
| | | |
| | | # æ·»å ç±³éæ°æ® |
| | | if df_extruder_full is not None and not df_extruder_full.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['metered_weight'], |
| | | name='ç±³é (Kg/m)', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | |
| | | # æ·»å èºæè½¬é |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['screw_speed_actual'], |
| | | name='èºæè½¬é (RPM)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5), |
| | | yaxis='y2' |
| | | )) |
| | | |
| | | # æ·»å æºå¤´åå |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_full['time'], |
| | | y=df_extruder_full['head_pressure'], |
| | | name='æºå¤´åå', |
| | | mode='lines', |
| | | line=dict(color='orange', width=1.5), |
| | | yaxis='y3' |
| | | )) |
| | | |
| | | # æ·»å æµç¨ä¸»é |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['process_main_speed'], |
| | | name='æµç¨ä¸»é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y4' |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | # èºææ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_screw_display_temp'], |
| | | name='èºææ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='purple', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # é
ç½®è¶å¿å¾å¸å± |
| | | fig_trend.update_layout( |
| | | title='åå§æ°æ®è¶å¿', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | | type='date' |
| | | ), |
| | | yaxis=dict( |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='blue'), |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='èºæè½¬é (RPM)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | | side='right' |
| | | ), |
| | | yaxis3=dict( |
| | | title='æºå¤´åå', |
| | | title_font=dict(color='orange'), |
| | | tickfont=dict(color='orange'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.85 |
| | | ), |
| | | yaxis4=dict( |
| | | title='æµç¨ä¸»é (M/Min)', |
| | | title_font=dict(color='red'), |
| | | tickfont=dict(color='red'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.75 |
| | | ), |
| | | yaxis5=dict( |
| | | title='温度 (°C)', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='left', |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | | y=1.02, |
| | | xanchor="right", |
| | | x=1 |
| | | ), |
| | | height=600, |
| | | margin=dict(l=100, r=200, t=100, b=100), |
| | | hovermode='x unified' |
| | | ) |
| | | |
| | | # æ¾ç¤ºè¶å¿å¾ |
| | | st.plotly_chart(fig_trend, width='stretch', config={'scrollZoom': True}) |
| | | |
| | | # --- é«çº§é¢æµåæ --- |
| | | st.subheader("ð é«çº§é¢æµåæ") |
| | | |
| | | # æ£æ¥ææé»è®¤ç¹å¾æ¯å¦å¨æ°æ®ä¸ |
| | | missing_features = [f for f in default_features if f not in df_analysis.columns] |
| | | if missing_features: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå°ä»¥ä¸ç¹å¾: {', '.join(missing_features)}") |
| | | else: |
| | | try: |
| | | # å夿°æ® |
| | | # é¦å
ç¡®ä¿df_analysis䏿²¡æNaNå¼ |
| | | df_analysis_clean = df_analysis.dropna(subset=default_features + ['ç±³é']) |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(df_analysis_clean) < 30: |
| | | st.warning("æ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç颿µåæ") |
| | | else: |
| | | # å建ä¸ä¸ªæ°çDataFrameæ¥å卿æç¹å¾åç®æ åé |
| | | all_features = df_analysis_clean[default_features + ['ç±³é']].copy() |
| | | |
| | | |
| | | |
| | | |
| | | # æ¸
çææNaNå¼ |
| | | all_features_clean = all_features.dropna() |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(all_features_clean) < 20: |
| | | st.warning("ç¹å¾å·¥ç¨åæ°æ®éä¸è¶³ï¼æ æ³è¿è¡ææç颿µåæ") |
| | | else: |
| | | # å离ç¹å¾åç®æ åé |
| | | feature_columns = [col for col in all_features_clean.columns if col != 'ç±³é'] |
| | | X_final = all_features_clean[feature_columns] |
| | | y_final = all_features_clean['ç±³é'] |
| | | |
| | | # æ£æ¥æç»æ°æ®é |
| | | if len(X_final) >= 20: |
| | | # åå²è®ç»éåæµè¯é |
| | | X_train, X_test, y_train, y_test = train_test_split(X_final, y_final, test_size=0.2, random_state=42) |
| | | |
| | | # æ°æ®æ åå |
| | | scaler_X = StandardScaler() |
| | | scaler_y = MinMaxScaler() |
| | | |
| | | X_train_scaled = scaler_X.fit_transform(X_train) |
| | | X_test_scaled = scaler_X.transform(X_test) |
| | | y_train_scaled = scaler_y.fit_transform(y_train.values.reshape(-1, 1)).ravel() |
| | | y_test_scaled = scaler_y.transform(y_test.values.reshape(-1, 1)).ravel() |
| | | |
| | | # 模åè®ç» |
| | | model = None |
| | | y_pred = None |
| | | |
| | | if model_type == 'RandomForest': |
| | | # éæºæ£®æåå½ |
| | | model = RandomForestRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | |
| | | elif model_type == 'GradientBoosting': |
| | | # 梯度æååå½ |
| | | model = GradientBoostingRegressor(n_estimators=100, random_state=42) |
| | | model.fit(X_train, y_train) |
| | | y_pred = model.predict(X_test) |
| | | |
| | | elif model_type == 'SVR': |
| | | # æ¯æåéåå½ |
| | | model = SVR(kernel='rbf', C=1.0, gamma='scale') |
| | | model.fit(X_train_scaled, y_train_scaled) |
| | | y_pred_scaled = model.predict(X_test_scaled) |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | elif model_type == 'MLP': |
| | | # å¤å±æç¥å¨åå½ |
| | | model = MLPRegressor(hidden_layer_sizes=(100, 50), max_iter=500, random_state=42) |
| | | model.fit(X_train_scaled, y_train_scaled) |
| | | y_pred_scaled = model.predict(X_test_scaled) |
| | | y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel() |
| | | |
| | | |
| | | |
| | | # 计ç®è¯ä¼°ææ |
| | | # ç¡®ä¿y_teståy_predé¿åº¦ä¸è´ |
| | | min_len = min(len(y_test), len(y_pred)) |
| | | if min_len > 0: |
| | | y_test_trimmed = y_test[:min_len] |
| | | y_pred_trimmed = y_pred[:min_len] |
| | | r2 = r2_score(y_test_trimmed, y_pred_trimmed) |
| | | mse = mean_squared_error(y_test_trimmed, y_pred_trimmed) |
| | | mae = mean_absolute_error(y_test_trimmed, y_pred_trimmed) |
| | | rmse = np.sqrt(mse) |
| | | else: |
| | | r2 = 0 |
| | | mse = 0 |
| | | mae = 0 |
| | | rmse = 0 |
| | | |
| | | # æ¾ç¤ºæ¨¡åæ§è½ |
| | | metrics_cols = st.columns(2) |
| | | with metrics_cols[0]: |
| | | st.metric("R² å¾å", f"{r2:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{mse:.6f}") |
| | | with metrics_cols[1]: |
| | | st.metric("å¹³åç»å¯¹è¯¯å·® (MAE)", f"{mae:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{rmse:.6f}") |
| | | |
| | | # --- å®é
å¼ä¸é¢æµå¼å¯¹æ¯ --- |
| | | st.subheader("ð å®é
å¼ä¸é¢æµå¼å¯¹æ¯") |
| | | |
| | | # åå»ºå¯¹æ¯æ°æ® |
| | | compare_df = pd.DataFrame({ |
| | | 'å®é
å¼': y_test_trimmed, |
| | | '颿µå¼': y_pred_trimmed |
| | | }) |
| | | compare_df = compare_df.sort_index() |
| | | |
| | | # å建对æ¯å¾ |
| | | fig_compare = go.Figure() |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['å®é
å¼'], |
| | | name='å®é
å¼', |
| | | mode='lines+markers', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['颿µå¼'], |
| | | name='颿µå¼', |
| | | mode='lines+markers', |
| | | line=dict(color='red', width=2, dash='dash') |
| | | )) |
| | | fig_compare.update_layout( |
| | | title=f'æµè¯é: å®é
ç±³é vs 颿µç±³é ({model_type})', |
| | | xaxis=dict(title='æ¶é´'), |
| | | yaxis=dict(title='ç±³é (Kg/m)'), |
| | | legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_compare, width='stretch') |
| | | |
| | | # --- æ®å·®åæ --- |
| | | st.subheader("ð æ®å·®åæ") |
| | | |
| | | # è®¡ç®æ®å·® |
| | | residuals = y_test_trimmed - y_pred_trimmed |
| | | |
| | | # å建æ®å·®å¾ |
| | | fig_residual = go.Figure() |
| | | fig_residual.add_trace(go.Scatter( |
| | | x=y_pred, |
| | | y=residuals, |
| | | mode='markers', |
| | | marker=dict(color='green', size=8, opacity=0.6) |
| | | )) |
| | | fig_residual.add_shape( |
| | | type="line", |
| | | x0=y_pred.min(), |
| | | y0=0, |
| | | x1=y_pred.max(), |
| | | y1=0, |
| | | line=dict(color="red", width=2, dash="dash") |
| | | ) |
| | | fig_residual.update_layout( |
| | | title='æ®å·®å¾', |
| | | xaxis=dict(title='颿µå¼'), |
| | | yaxis=dict(title='æ®å·®'), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_residual, width='stretch') |
| | | |
| | | # --- ç¹å¾éè¦æ§ï¼å¦ææ¨¡åæ¯æï¼ --- |
| | | if model_type in ['RandomForest', 'GradientBoosting']: |
| | | st.subheader("âï¸ ç¹å¾éè¦æ§åæ") |
| | | |
| | | # 计ç®ç¹å¾éè¦æ§ |
| | | feature_importance = pd.DataFrame({ |
| | | 'ç¹å¾': X_train.columns, |
| | | 'éè¦æ§': model.feature_importances_ |
| | | }) |
| | | feature_importance = feature_importance.sort_values('éè¦æ§', ascending=False) |
| | | |
| | | # å建ç¹å¾éè¦æ§å¾ |
| | | fig_importance = px.bar( |
| | | feature_importance, |
| | | x='ç¹å¾', |
| | | y='éè¦æ§', |
| | | title='ç¹å¾éè¦æ§', |
| | | color='éè¦æ§', |
| | | color_continuous_scale='viridis' |
| | | ) |
| | | fig_importance.update_layout( |
| | | xaxis=dict(tickangle=-45), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_importance, width='stretch') |
| | | |
| | | # --- 颿µåè½ --- |
| | | st.subheader("ð® ç±³é颿µ") |
| | | |
| | | # åå»ºé¢æµè¡¨å |
| | | st.write("è¾å
¥ç¹å¾å¼è¿è¡ç±³é颿µ:") |
| | | predict_cols = st.columns(2) |
| | | input_features = {} |
| | | |
| | | for i, feature in enumerate(default_features): |
| | | with predict_cols[i % 2]: |
| | | # è·åç¹å¾çç»è®¡ä¿¡æ¯ |
| | | min_val = df_analysis_clean[feature].min() |
| | | max_val = df_analysis_clean[feature].max() |
| | | mean_val = df_analysis_clean[feature].mean() |
| | | |
| | | input_features[feature] = st.number_input( |
| | | f"{feature}", |
| | | key=f"ma_pred_{feature}", |
| | | value=float(mean_val), |
| | | min_value=float(min_val), |
| | | max_value=float(max_val), |
| | | step=0.1 |
| | | ) |
| | | |
| | | if st.button("颿µç±³é"): |
| | | # åå¤é¢æµæ°æ® |
| | | input_df = pd.DataFrame([input_features]) |
| | | |
| | | # åå¹¶ç¹å¾ |
| | | input_combined = pd.concat([input_df], axis=1) |
| | | |
| | | # 颿µ |
| | | if model_type in ['SVR', 'MLP']: |
| | | input_scaled = scaler_X.transform(input_combined) |
| | | prediction_scaled = model.predict(input_scaled) |
| | | predicted_weight = scaler_y.inverse_transform(prediction_scaled.reshape(-1, 1)).ravel()[0] |
| | | |
| | | else: |
| | | predicted_weight = model.predict(input_combined)[0] |
| | | |
| | | # æ¾ç¤ºé¢æµç»æ |
| | | st.success(f"颿µç±³é: {predicted_weight:.4f} Kg/m") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | st.dataframe(df_analysis.head(20), width='stretch') |
| | | |
| | | # --- å¯¼åºæ°æ® --- |
| | | st.subheader("ð¾ å¯¼åºæ°æ®") |
| | | # å°æ°æ®è½¬æ¢ä¸ºCSVæ ¼å¼ |
| | | csv = df_analysis.to_csv(index=False) |
| | | # å建ä¸è½½æé® |
| | | st.download_button( |
| | | label="å¯¼åºæ´ååçæ°æ® (CSV)", |
| | | data=csv, |
| | | file_name=f"metered_weight_advanced_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv", |
| | | mime="text/csv", |
| | | help="ç¹å»æé®å¯¼åºæ´ååçç±³éåææ°æ®" |
| | | ) |
| | | except Exception as e: |
| | | st.error(f"模åè®ç»æé¢æµå¤±è´¥: {str(e)}") |
| | | |
| | | else: |
| | | # æç¤ºç¨æ·ç¹å»å¼å§åææé® |
| | | st.info("è¯·éæ©æ¶é´èå´å¹¶ç¹å»'å¼å§åæ'æé®è·åæ°æ®ã") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | |
| | | def show_metered_weight_correlation(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³éç¸å
³æ§åæ") |
| | | |
| | | # åå§åä¼è¯ç¶æç¨äºæ¥æåæ¥ |
| | | if 'mc_start_date' not in st.session_state: |
| | | st.session_state['mc_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'mc_end_date' not in st.session_state: |
| | | st.session_state['mc_end_date'] = datetime.now().date() |
| | | if 'mc_quick_select' not in st.session_state: |
| | | st.session_state['mc_quick_select'] = "æè¿7天" |
| | | if 'mc_time_offset' not in st.session_state: |
| | | st.session_state['mc_time_offset'] = 0.0 |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['mc_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['mc_start_date'] = today |
| | | st.session_state['mc_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['mc_start_date'] = today - timedelta(days=3) |
| | | st.session_state['mc_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['mc_start_date'] = today - timedelta(days=7) |
| | | st.session_state['mc_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['mc_start_date'] = today - timedelta(days=30) |
| | | st.session_state['mc_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['mc_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['mc_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_mc_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mc_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mc_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="mc_query", width='stretch') |
| | | |
| | | # æ°æ®å¯¹é½è°æ´ |
| | | st.markdown("---") |
| | | offset_cols = st.columns([2, 4, 2]) |
| | | with offset_cols[0]: |
| | | st.write("â±ï¸ **æ°æ®å¯¹é½è°æ´**") |
| | | with offset_cols[1]: |
| | | time_offset = st.slider( |
| | | "æ¶é´åç§» (åé)", |
| | | min_value=0.0, |
| | | max_value=5.0, |
| | | value=st.session_state['mc_time_offset'], |
| | | step=0.1, |
| | | help="è°æ´ä¸»æµç¨åæ¸©åº¦æ°æ®çæ¶é´åç§»ï¼ä½¿å
¶ä¸æ¤åºæºç±³éæ°æ®å¯¹é½ã" |
| | | ) |
| | | st.session_state['mc_time_offset'] = time_offset |
| | | with offset_cols[2]: |
| | | st.write(f"å½ååç§»: {time_offset} åé") |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç - ä»
è·åæ°æ®å¹¶ç¼åå°ä¼è¯ç¶æ |
| | | if query_button: |
| | | with st.spinner("æ£å¨è·åæ°æ®..."): |
| | | # 1. è·å宿´çæ¤åºæºæ°æ®ï¼å
å«æææ¶é´ç¹çèºæè½¬éåæºå¤´ååï¼ |
| | | df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | # æ¸
é¤ç¼åæ°æ® |
| | | for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp', 'last_query_start', 'last_query_end']: |
| | | if key in st.session_state: |
| | | del st.session_state[key] |
| | | return |
| | | |
| | | # ç¼åæ°æ®å°ä¼è¯ç¶æ |
| | | st.session_state['cached_extruder_full'] = df_extruder_full |
| | | st.session_state['cached_main_speed'] = df_main_speed |
| | | st.session_state['cached_temp'] = df_temp |
| | | st.session_state['last_query_start'] = start_dt |
| | | st.session_state['last_query_end'] = end_dt |
| | | |
| | | # æ°æ®å¤çåå¾è¡¨æ¸²æ - æ¯æ¬¡åºç¨éæ°è¿è¡æ¶æ§è¡ï¼å
æ¬è°æ´æ¶é´åç§»æ¶ï¼ |
| | | if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']): |
| | | with st.spinner("æ£å¨åææ°æ®ç¸å
³æ§..."): |
| | | # è·åç¼åæ°æ® |
| | | df_extruder_full = st.session_state['cached_extruder_full'] |
| | | df_main_speed = st.session_state['cached_main_speed'] |
| | | df_temp = st.session_state['cached_temp'] |
| | | |
| | | # è·åå½åæ¶é´åç§»é |
| | | offset_delta = timedelta(minutes=st.session_state['mc_time_offset']) |
| | | |
| | | # å¤çæ°æ® |
| | | if df_extruder_full is not None and not df_extruder_full.empty: |
| | | # è¿æ»¤æºå¤´åå大äº2çå¼ |
| | | df_extruder_filtered = df_extruder_full[df_extruder_full['head_pressure'] <= 2] |
| | | |
| | | # ä¸ºç±³éæ°æ®å建åç§»åçæ¶é´åï¼åªå¯¹ç±³éæ°æ®è¿è¡æ¶é´åç§»ï¼ |
| | | df_extruder_filtered['weight_time'] = df_extruder_filtered['time'] - offset_delta |
| | | else: |
| | | df_extruder_filtered = None |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_filtered is not None and not df_extruder_filtered.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # æ°æ®æ´åä¸é¢å¤ç |
| | | def integrate_data(df_extruder_filtered, df_main_speed, df_temp): |
| | | # ç¡®ä¿æ¤åºæºæ°æ®åå¨ |
| | | if df_extruder_filtered is None or df_extruder_filtered.empty: |
| | | return None |
| | | |
| | | # å建åªå
å«ç±³éååç§»æ¶é´çä¸»æ°æ®é |
| | | df_weight = df_extruder_filtered[['weight_time', 'metered_weight']].copy() |
| | | df_weight.rename(columns={'weight_time': 'time'}, inplace=True) # å°weight_timeéå½å为timeä½ä¸ºåºåæ¶é´ |
| | | |
| | | # å建å
å«èºæè½¬éååå§æ¶é´ç宿´æ°æ®é |
| | | # 注æï¼è¿é使ç¨å®æ´çèºæè½¬éæ°æ®ï¼èä¸ä»
ä»
æ¯ä¸ç±³é对åºçæ°æ®ç¹ |
| | | df_screw = df_extruder_filtered[['time', 'screw_speed_actual']].copy() |
| | | |
| | | # å建å
嫿ºå¤´ååååå§æ¶é´ç宿´æ°æ®é |
| | | # 注æï¼è¿é使ç¨å®æ´çæºå¤´ååæ°æ®ï¼èä¸ä»
ä»
æ¯ä¸ç±³é对åºçæ°æ®ç¹ |
| | | df_pressure = df_extruder_filtered[['time', 'head_pressure']].copy() |
| | | |
| | | # 使ç¨åç§»åçç±³éæ¶é´æ´åèºæè½¬éæ°æ® |
| | | # å
³é®ï¼ä½¿ç¨merge_asofæ ¹æ®åç§»åçç±³éæ¶é´æ¥æ¾ææ¥è¿çèºæè½¬éæ°æ® |
| | | df_merged = pd.merge_asof( |
| | | df_weight.sort_values('time'), |
| | | df_screw.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # 使ç¨åç§»åçç±³éæ¶é´æ´åæºå¤´ååæ°æ® |
| | | # å
³é®ï¼ä½¿ç¨merge_asofæ ¹æ®åç§»åçç±³éæ¶é´æ¥æ¾ææ¥è¿çæºå¤´ååæ°æ® |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_pressure.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´å主æµç¨æ°æ® |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed = df_main_speed[['time', 'process_main_speed']] |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_main_speed.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´åæ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_cols = ['time', 'nakata_extruder_screw_display_temp', |
| | | 'nakata_extruder_rear_barrel_display_temp', |
| | | 'nakata_extruder_front_barrel_display_temp', |
| | | 'nakata_extruder_head_display_temp'] |
| | | df_temp_subset = df_temp[temp_cols].copy() |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_temp_subset.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # éå½åå以æé«å¯è¯»æ§ |
| | | df_merged.rename(columns={ |
| | | 'screw_speed_actual': 'èºæè½¬é', |
| | | 'head_pressure': 'æºå¤´åå', |
| | | 'process_main_speed': 'æµç¨ä¸»é', |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¸©åº¦', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¸©åº¦' |
| | | }, inplace=True) |
| | | |
| | | # æ¸
çæ°æ® |
| | | df_merged.dropna(subset=['metered_weight'], inplace=True) |
| | | |
| | | return df_merged |
| | | |
| | | # æ§è¡æ°æ®æ´å |
| | | df_analysis = integrate_data(df_extruder_filtered, df_main_speed, df_temp) |
| | | |
| | | if df_analysis is None or df_analysis.empty: |
| | | st.warning("æ°æ®æ´å失败ï¼è¯·æ£æ¥æ°æ®è´¨éæè°æ´æ¶é´èå´ã") |
| | | return |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # --- åå§æ°æ®è¶å¿å¾ --- |
| | | st.subheader("ð åå§æ°æ®è¶å¿å¾") |
| | | |
| | | # å建è¶å¿å¾ |
| | | fig_trend = go.Figure() |
| | | |
| | | # æ·»å ç±³éæ°æ®ï¼ä½¿ç¨åç§»åçæ¶é´ï¼ |
| | | if df_extruder_filtered is not None and not df_extruder_filtered.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_filtered['weight_time'], # 使ç¨åç§»åçæ¶é´ |
| | | y=df_extruder_filtered['metered_weight'], |
| | | name='ç±³é (Kg/m) [å·²åç§»]', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | |
| | | # æ·»å èºæè½¬éï¼ä½¿ç¨åå§æ¶é´ï¼ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_filtered['time'], # 使ç¨åå§æ¶é´ |
| | | y=df_extruder_filtered['screw_speed_actual'], |
| | | name='èºæè½¬é (RPM)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5), |
| | | yaxis='y2' |
| | | )) |
| | | |
| | | # æ·»å æºå¤´ååï¼ä½¿ç¨åå§æ¶é´ï¼å·²è¿æ»¤å¤§äº2çå¼ï¼ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_filtered['time'], # 使ç¨åå§æ¶é´ |
| | | y=df_extruder_filtered['head_pressure'], |
| | | name='æºå¤´åå (â¤2)', |
| | | mode='lines', |
| | | line=dict(color='orange', width=1.5), |
| | | yaxis='y3' |
| | | )) |
| | | |
| | | # æ·»å æµç¨ä¸»é |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['process_main_speed'], |
| | | name='æµç¨ä¸»é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y4' |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | # èºææ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_screw_display_temp'], |
| | | name='èºææ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='purple', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | # åæºçæ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_rear_barrel_display_temp'], |
| | | name='åæºçæ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='pink', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | # åæºçæ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_front_barrel_display_temp'], |
| | | name='åæºçæ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='brown', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | # æºå¤´æ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_head_display_temp'], |
| | | name='æºå¤´æ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='gray', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # é
ç½®è¶å¿å¾å¸å± |
| | | fig_trend.update_layout( |
| | | title=f'åå§æ°æ®è¶å¿ (ç±³éåååç§» {st.session_state["mc_time_offset"]} åé)', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | | type='date' |
| | | ), |
| | | yaxis=dict( |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='blue'), |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='èºæè½¬é (RPM)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | | side='right' |
| | | ), |
| | | yaxis3=dict( |
| | | title='æºå¤´åå', |
| | | title_font=dict(color='orange'), |
| | | tickfont=dict(color='orange'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.85 |
| | | ), |
| | | yaxis4=dict( |
| | | title='æµç¨ä¸»é (M/Min)', |
| | | title_font=dict(color='red'), |
| | | tickfont=dict(color='red'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.75 |
| | | ), |
| | | yaxis5=dict( |
| | | title='温度 (°C)', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='left', |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | | y=1.02, |
| | | xanchor="right", |
| | | x=1 |
| | | ), |
| | | height=600, |
| | | margin=dict(l=100, r=200, t=100, b=100), |
| | | hovermode='x unified', |
| | | dragmode='select', |
| | | ) |
| | | |
| | | # æ¾ç¤ºè¶å¿å¾ |
| | | selection = st.plotly_chart(fig_trend, width='stretch', config={'scrollZoom': True}, on_select='rerun' ) |
| | | |
| | | # è°è¯è¾åº |
| | | # st.write("åå§ selection 对象:", selection) |
| | | |
| | | # å®ä¹åæå |
| | | analysis_cols = ['ç±³é', 'èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦'] |
| | | |
| | | # å®ä¹è¦åæçåæ° |
| | | params = [ |
| | | ('èºæè½¬é', 'RPM'), |
| | | ('æºå¤´åå', ''), |
| | | ('æµç¨ä¸»é', 'M/Min'), |
| | | ('èºææ¸©åº¦', '°C'), |
| | | ('åæºçæ¸©åº¦', '°C'), |
| | | ('åæºçæ¸©åº¦', '°C'), |
| | | ('æºå¤´æ¸©åº¦', '°C') |
| | | ] |
| | | |
| | | # æ£ç¡®æå |
| | | selected_data = None |
| | | if selection.selection and selection.selection.box: |
| | | boxs = selection.selection.box |
| | | # è·åé䏿¡çxè½´èå´ |
| | | x_range = boxs[0]['x'][0], boxs[0]['x'][1] |
| | | st.write("xè½´èå´:", x_range) |
| | | |
| | | # è¿æ»¤åºå¨xè½´èå´å
çæ°æ® |
| | | # 注æï¼è¿ééè¦ä½¿ç¨df_analysisçtimeåè¿è¡è¿æ»¤ |
| | | # é¦å
éè¦ç¡®ä¿df_analysisætimeå |
| | | if 'time' in df_analysis.columns: |
| | | selected_data = df_analysis[ |
| | | (df_analysis['time'] >= x_range[0]) & |
| | | (df_analysis['time'] <= x_range[1]) |
| | | ].copy() # 使ç¨copy()é¿å
åçè¦å |
| | | st.write(f"éä¸èå´å
çæ°æ®ç¹æ°é: {len(selected_data)}") |
| | | # æ¾ç¤ºå¯ç¨çååï¼å¸®å©è°è¯ |
| | | st.write("å¯ç¨åå:", list(selected_data.columns)) |
| | | else: |
| | | st.warning("æ°æ®ä¸ç¼ºå°timeåï¼æ æ³è¿è¡èå´è¿æ»¤") |
| | | |
| | | else: |
| | | st.info("请使ç¨ç©å½¢æ¡éå·¥å
·éæ©æ¶é´èå´ï¼å·²èªå¨å¯ç¨éæ©æ¨¡å¼ï¼") |
| | | |
| | | # æ·»å ç»èåææé® |
| | | if selected_data is not None and not selected_data.empty: |
| | | if st.button("ð ç»èåæ"): |
| | | st.subheader("ð æ¡éèå´ç»èåæ") |
| | | |
| | | # 计ç®éä¸èå´å
çç¸å
³ç³»æ°ç©éµ |
| | | selected_corr_matrix = selected_data[analysis_cols].corr() |
| | | |
| | | # å建éä¸èå´ççåå¾ |
| | | selected_fig_heatmap = px.imshow( |
| | | selected_corr_matrix, |
| | | text_auto=True, |
| | | aspect="auto", |
| | | title="æ¡éèå´åæ°ç¸å
³æ§ç©éµ", |
| | | color_continuous_scale=["#0000FF", "#FFFFFF", "#FF0000"], |
| | | color_continuous_midpoint=0, |
| | | labels=dict(color="ç¸å
³ç³»æ°") |
| | | ) |
| | | |
| | | # èªå®ä¹å¸å± |
| | | selected_fig_heatmap.update_layout( |
| | | height=400, |
| | | margin=dict(l=80, r=80, t=80, b=80), |
| | | xaxis=dict(tickangle=-45), |
| | | yaxis=dict(tickangle=0) |
| | | ) |
| | | |
| | | # æ¾ç¤ºéä¸èå´ççåå¾ |
| | | st.plotly_chart(selected_fig_heatmap, width='stretch') |
| | | |
| | | # æ¾ç¤ºéä¸èå´çåæ°ä¸ç±³éæ£ç¹å¾ |
| | | st.subheader("ð æ¡éèå´åæ°ä¸ç±³éæ£ç¹å¾") |
| | | |
| | | # å建éä¸èå´çæ£ç¹å¾ |
| | | for i in range(0, len(params), 2): |
| | | row_cols = st.columns(2) |
| | | for j in range(2): |
| | | if i + j < len(params): |
| | | param_name, unit = params[i + j] |
| | | with row_cols[j]: |
| | | if param_name in selected_data.columns: |
| | | # 计ç®ç¸å
³ç³»æ°ï¼æ·»å é误å¤çï¼ |
| | | try: |
| | | # è¿æ»¤æNaNå¼ |
| | | valid_data = selected_data[[param_name, 'ç±³é']].dropna() |
| | | if len(valid_data) >= 2: # è³å°éè¦2ä¸ªæ°æ®ç¹ |
| | | corr_coef = np.corrcoef(valid_data['ç±³é'], valid_data[param_name])[0, 1] |
| | | else: |
| | | corr_coef = None |
| | | except Exception as e: |
| | | corr_coef = None |
| | | |
| | | # å建æ£ç¹å¾ |
| | | fig_scatter = px.scatter( |
| | | selected_data, |
| | | x=param_name, |
| | | y='ç±³é', |
| | | title=f"{param_name} vs ç±³éï¼æ¡éèå´ï¼", |
| | | labels={param_name: f"{param_name} ({unit})" if unit else param_name, 'ç±³é': 'ç±³é (Kg/m)'} |
| | | ) |
| | | |
| | | # æ·»å è¶å¿çº¿ï¼æ·»å é误å¤çï¼ |
| | | try: |
| | | # è¿æ»¤æNaNå¼ |
| | | valid_data = selected_data[[param_name, 'ç±³é']].dropna() |
| | | if len(valid_data) >= 2: # è³å°éè¦2ä¸ªæ°æ®ç¹ |
| | | trend_line = np.poly1d(np.polyfit(valid_data[param_name], valid_data['ç±³é'], 1))(valid_data[param_name]) |
| | | fig_scatter.add_trace(go.Scatter( |
| | | x=valid_data[param_name], |
| | | y=trend_line, |
| | | mode='lines', |
| | | name='è¶å¿çº¿', |
| | | line=dict(color='red', width=2) |
| | | )) |
| | | except Exception as e: |
| | | # 妿è¶å¿çº¿è®¡ç®å¤±è´¥ï¼è·³è¿æ·»å è¶å¿çº¿ |
| | | pass |
| | | |
| | | # æ·»å ç¸å
³ç³»æ°æ³¨éï¼æ·»å é误å¤çï¼ |
| | | if corr_coef is not None: |
| | | fig_scatter.add_annotation( |
| | | x=0.05, y=0.95, |
| | | xref='paper', yref='paper', |
| | | text=f"ç¸å
³ç³»æ°: {corr_coef:.4f}", |
| | | showarrow=False, |
| | | font=dict(size=12, color="black"), |
| | | bgcolor="white", |
| | | bordercolor="black", |
| | | borderwidth=1 |
| | | ) |
| | | else: |
| | | fig_scatter.add_annotation( |
| | | x=0.05, y=0.95, |
| | | xref='paper', yref='paper', |
| | | text="ç¸å
³ç³»æ°: æ æ³è®¡ç®", |
| | | showarrow=False, |
| | | font=dict(size=12, color="black"), |
| | | bgcolor="white", |
| | | bordercolor="black", |
| | | borderwidth=1 |
| | | ) |
| | | |
| | | # æ¾ç¤ºæ£ç¹å¾ |
| | | st.plotly_chart(fig_scatter, use_container_width=True) |
| | | else: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå° {param_name} å") |
| | | |
| | | # æ¾ç¤ºéä¸èå´çæ°æ®æè¦ |
| | | st.subheader("ð æ¡éèå´æ°æ®æè¦") |
| | | selected_summary_cols = st.columns(4) |
| | | |
| | | with selected_summary_cols[0]: |
| | | if 'ç±³é' in selected_data.columns: |
| | | st.metric("å¹³åç±³é", f"{selected_data['ç±³é'].mean():.2f} Kg/m") |
| | | |
| | | with selected_summary_cols[1]: |
| | | if 'èºæè½¬é' in selected_data.columns: |
| | | st.metric("å¹³åèºæè½¬é", f"{selected_data['èºæè½¬é'].mean():.2f} RPM") |
| | | |
| | | with selected_summary_cols[2]: |
| | | if 'æµç¨ä¸»é' in selected_data.columns: |
| | | st.metric("平念ç¨ä¸»é", f"{selected_data['æµç¨ä¸»é'].mean():.2f} M/Min") |
| | | |
| | | with selected_summary_cols[3]: |
| | | if 'æºå¤´åå' in selected_data.columns: |
| | | st.metric("å¹³åæºå¤´åå", f"{selected_data['æºå¤´åå'].mean():.2f}") |
| | | |
| | | # æ¾ç¤ºéä¸èå´çæ°æ®é¢è§ |
| | | st.subheader("ð æ¡éèå´æ°æ®é¢è§") |
| | | st.dataframe(selected_data[analysis_cols].head(10), use_container_width=True) |
| | | |
| | | # --- ç¸å
³æ§ç©éµçåå¾ --- |
| | | st.subheader("ð ç¸å
³æ§ç©éµçåå¾") |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # 计ç®ç¸å
³ç³»æ°ç©éµ |
| | | corr_matrix = df_analysis[analysis_cols].corr() |
| | | |
| | | # å建çåå¾ |
| | | fig_heatmap = px.imshow( |
| | | corr_matrix, |
| | | text_auto=True, |
| | | aspect="auto", |
| | | title="åæ°ç¸å
³æ§ç©éµ", |
| | | color_continuous_scale=["#0000FF", "#FFFFFF", "#FF0000"], |
| | | color_continuous_midpoint=0, |
| | | labels=dict(color="ç¸å
³ç³»æ°") |
| | | ) |
| | | |
| | | # èªå®ä¹å¸å± |
| | | fig_heatmap.update_layout( |
| | | height=500, |
| | | margin=dict(l=100, r=100, t=100, b=100), |
| | | xaxis=dict(tickangle=-45), |
| | | yaxis=dict(tickangle=0) |
| | | ) |
| | | |
| | | # æ¾ç¤ºçåå¾ |
| | | st.plotly_chart(fig_heatmap, width='stretch') |
| | | |
| | | # --- åæ°ä¸ç±³éæ£ç¹å¾ --- |
| | | st.subheader("ð åæ°ä¸ç±³éæ£ç¹å¾") |
| | | |
| | | # å建æ£ç¹å¾ |
| | | for i in range(0, len(params), 2): |
| | | row_cols = st.columns(2) |
| | | for j in range(2): |
| | | if i + j < len(params): |
| | | param_name, unit = params[i + j] |
| | | with row_cols[j]: |
| | | if param_name in df_analysis.columns: |
| | | # 计ç®ç¸å
³ç³»æ°ï¼æ·»å é误å¤çï¼ |
| | | try: |
| | | # è¿æ»¤æNaNå¼ |
| | | valid_data = df_analysis[[param_name, 'ç±³é']].dropna() |
| | | if len(valid_data) >= 2: # è³å°éè¦2ä¸ªæ°æ®ç¹ |
| | | corr_coef = np.corrcoef(valid_data['ç±³é'], valid_data[param_name])[0, 1] |
| | | else: |
| | | corr_coef = None |
| | | except Exception as e: |
| | | corr_coef = None |
| | | |
| | | # å建æ£ç¹å¾ |
| | | fig_scatter = px.scatter( |
| | | df_analysis, |
| | | x=param_name, |
| | | y='ç±³é', |
| | | title=f"{param_name} vs ç±³é", |
| | | labels={param_name: f"{param_name} ({unit})" if unit else param_name, 'ç±³é': 'ç±³é (Kg/m)'} |
| | | ) |
| | | |
| | | # æ·»å è¶å¿çº¿ï¼æ·»å é误å¤çï¼ |
| | | try: |
| | | # è¿æ»¤æNaNå¼ |
| | | valid_data = df_analysis[[param_name, 'ç±³é']].dropna() |
| | | if len(valid_data) >= 2: # è³å°éè¦2ä¸ªæ°æ®ç¹ |
| | | trend_line = np.poly1d(np.polyfit(valid_data[param_name], valid_data['ç±³é'], 1))(valid_data[param_name]) |
| | | fig_scatter.add_trace(go.Scatter( |
| | | x=valid_data[param_name], |
| | | y=trend_line, |
| | | mode='lines', |
| | | name='è¶å¿çº¿', |
| | | line=dict(color='red', width=2) |
| | | )) |
| | | except Exception as e: |
| | | # 妿è¶å¿çº¿è®¡ç®å¤±è´¥ï¼è·³è¿æ·»å è¶å¿çº¿ |
| | | pass |
| | | |
| | | # æ·»å ç¸å
³ç³»æ°æ³¨éï¼æ·»å é误å¤çï¼ |
| | | if corr_coef is not None: |
| | | fig_scatter.add_annotation( |
| | | x=0.05, y=0.95, |
| | | xref='paper', yref='paper', |
| | | text=f"ç¸å
³ç³»æ°: {corr_coef:.4f}", |
| | | showarrow=False, |
| | | font=dict(size=12, color="black"), |
| | | bgcolor="white", |
| | | bordercolor="black", |
| | | borderwidth=1 |
| | | ) |
| | | else: |
| | | fig_scatter.add_annotation( |
| | | x=0.05, y=0.95, |
| | | xref='paper', yref='paper', |
| | | text="ç¸å
³ç³»æ°: æ æ³è®¡ç®", |
| | | showarrow=False, |
| | | font=dict(size=12, color="black"), |
| | | bgcolor="white", |
| | | bordercolor="black", |
| | | borderwidth=1 |
| | | ) |
| | | |
| | | # æ¾ç¤ºæ£ç¹å¾ |
| | | st.plotly_chart(fig_scatter, use_container_width=True) |
| | | else: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå° {param_name} å") |
| | | |
| | | # --- ç¸å
³æ§ç»è®¡è¡¨æ ¼ --- |
| | | st.subheader("ð ç¸å
³æ§ç»è®¡") |
| | | |
| | | # è®¡ç®æ¯ä¸ªåæ°ä¸ç±³éçç¸å
³ç³»æ°ï¼æ·»å é误å¤çï¼ |
| | | corr_stats = [] |
| | | for param_name, _ in params: |
| | | if param_name in df_analysis.columns: |
| | | try: |
| | | # è¿æ»¤æNaNå¼ |
| | | valid_data = df_analysis[[param_name, 'ç±³é']].dropna() |
| | | if len(valid_data) >= 2: # è³å°éè¦2ä¸ªæ°æ®ç¹ |
| | | corr_coef = np.corrcoef(valid_data['ç±³é'], valid_data[param_name])[0, 1] |
| | | corr_stats.append({ |
| | | 'åæ°': param_name, |
| | | 'ç¸å
³ç³»æ°': corr_coef, |
| | | 'ç¸å
³ç¨åº¦': '强' if abs(corr_coef) > 0.7 else 'ä¸ç' if abs(corr_coef) > 0.3 else 'å¼±' |
| | | }) |
| | | else: |
| | | corr_stats.append({ |
| | | 'åæ°': param_name, |
| | | 'ç¸å
³ç³»æ°': None, |
| | | 'ç¸å
³ç¨åº¦': 'æ æ³è®¡ç®' |
| | | }) |
| | | except Exception as e: |
| | | corr_stats.append({ |
| | | 'åæ°': param_name, |
| | | 'ç¸å
³ç³»æ°': None, |
| | | 'ç¸å
³ç¨åº¦': 'æ æ³è®¡ç®' |
| | | }) |
| | | |
| | | # å建ç»è®¡è¡¨æ ¼ |
| | | corr_df = pd.DataFrame(corr_stats) |
| | | # æç¸å
³ç³»æ°ç»å¯¹å¼æåºï¼å¤çNoneå¼ï¼ |
| | | try: |
| | | # 计ç®ç¸å
³ç³»æ°ç»å¯¹å¼ï¼å¯¹äºNoneå¼ä½¿ç¨-1ï¼è¿æ ·ä¼æå¨æåï¼ |
| | | corr_df['ç¸å
³ç³»æ°ç»å¯¹å¼'] = corr_df['ç¸å
³ç³»æ°'].apply(lambda x: abs(x) if x is not None else -1) |
| | | corr_df.sort_values('ç¸å
³ç³»æ°ç»å¯¹å¼', ascending=False, inplace=True) |
| | | corr_df.drop('ç¸å
³ç³»æ°ç»å¯¹å¼', axis=1, inplace=True) |
| | | except Exception as e: |
| | | # 妿æåºå¤±è´¥ï¼ä¿æåå§é¡ºåº |
| | | pass |
| | | |
| | | # æ¾ç¤ºè¡¨æ ¼ |
| | | st.dataframe(corr_df, use_container_width=True) |
| | | |
| | | # --- æ°æ®æè¦ --- |
| | | # st.subheader("ð æ°æ®æè¦") |
| | | # summary_cols = st.columns(4) |
| | | |
| | | # with summary_cols[0]: |
| | | # if 'ç±³é' in df_analysis.columns: |
| | | # st.metric("å¹³åç±³é", f"{df_analysis['ç±³é'].mean():.2f} Kg/m") |
| | | |
| | | # with summary_cols[1]: |
| | | # if 'èºæè½¬é' in df_analysis.columns: |
| | | # st.metric("å¹³åèºæè½¬é", f"{df_analysis['èºæè½¬é'].mean():.2f} RPM") |
| | | |
| | | # with summary_cols[2]: |
| | | # if 'æµç¨ä¸»é' in df_analysis.columns: |
| | | # st.metric("平念ç¨ä¸»é", f"{df_analysis['æµç¨ä¸»é'].mean():.2f} M/Min") |
| | | |
| | | # with summary_cols[3]: |
| | | # if 'æºå¤´åå' in df_analysis.columns: |
| | | # st.metric("å¹³åæºå¤´åå", f"{df_analysis['æºå¤´åå'].mean():.2f}") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | st.dataframe(df_analysis[analysis_cols].head(20), use_container_width=True) |
| | | else: |
| | | # æç¤ºç¨æ·ç¹å»å¼å§åææé® |
| | | st.info("è¯·éæ©æ¶é´èå´å¹¶ç¹å»'å¼å§åæ'æé®è·åæ°æ®ã") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | |
| | | def show_metered_weight_dashboard(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³é综ååæ") |
| | | |
| | | # åå§åä¼è¯ç¶æç¨äºæ¥æåæ¥ |
| | | if 'mw_start_date' not in st.session_state: |
| | | st.session_state['mw_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'mw_end_date' not in st.session_state: |
| | | st.session_state['mw_end_date'] = datetime.now().date() |
| | | if 'mw_quick_select' not in st.session_state: |
| | | st.session_state['mw_quick_select'] = "æè¿7天" |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['mw_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['mw_start_date'] = today |
| | | st.session_state['mw_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['mw_start_date'] = today - timedelta(days=3) |
| | | st.session_state['mw_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['mw_start_date'] = today - timedelta(days=7) |
| | | st.session_state['mw_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['mw_start_date'] = today - timedelta(days=30) |
| | | st.session_state['mw_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['mw_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['mw_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_mw_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mw_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mw_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="mw_query", width='stretch') |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç |
| | | if query_button: |
| | | with st.spinner("æ£å¨èåæ°æ®..."): |
| | | # 1. è·åæ¤åºæºæ°æ® |
| | | df_extruder = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | # å¤çæºå¤´ååï¼å»é¤è¶
è¿2çå¼ |
| | | if df_extruder is not None and not df_extruder.empty: |
| | | df_extruder = df_extruder[df_extruder['head_pressure'] <= 2] |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | # è·åçµæºè¿è¡çè§æ°æ® |
| | | df_motor = main_process_service.get_motor_monitoring_data(start_dt, end_dt) |
| | | # å¤ççµæºçº¿éæ°æ®ï¼é¤ä»¥10 |
| | | if df_motor is not None and not df_motor.empty: |
| | | df_motor['m1_line_speed'] = df_motor['m1_line_speed'] / 10 |
| | | df_motor['m2_line_speed'] = df_motor['m2_line_speed'] / 10 |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder is not None and not df_extruder.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty, |
| | | df_motor is not None and not df_motor.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # --- å¾è¡¨1: ç±³éä¸å®é
åæ°åæ --- |
| | | st.subheader("ð ç±³éä¸å®é
åæ°åæ") |
| | | fig1 = go.Figure() |
| | | |
| | | # æ·»å ç±³é |
| | | if df_extruder is not None and not df_extruder.empty: |
| | | fig1.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | | y=df_extruder['metered_weight'], |
| | | name='ç±³é (Kg/m)', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2), |
| | | yaxis='y1' |
| | | )) |
| | | |
| | | # æ·»å æ¤åºæºå®é
转é |
| | | fig1.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | | y=df_extruder['screw_speed_actual'], |
| | | name='æ¤åºæºå®é
转é (RPM)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5), |
| | | yaxis='y2' |
| | | )) |
| | | |
| | | # æ·»å æ¤åºæºæºå¤´åå |
| | | fig1.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | | y=df_extruder['head_pressure'], |
| | | name='æ¤åºæºæºå¤´åå', |
| | | mode='lines', |
| | | line=dict(color='orange', width=1.5), |
| | | yaxis='y3' |
| | | )) |
| | | |
| | | # æ·»å æµç¨ä¸»é |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | fig1.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['process_main_speed'], |
| | | name='æµç¨ä¸»é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y4' # åç¬çé度轴 |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦æ¾ç¤ºå¼ |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_display_fields = { |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¾ç¤º (°C)', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¾ç¤º (°C)', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¾ç¤º (°C)', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¾ç¤º (°C)', |
| | | } |
| | | for field, label in temp_display_fields.items(): |
| | | fig1.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp[field], |
| | | name=label, |
| | | mode='lines', |
| | | line=dict(width=1), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # æ·»å çµæºçº¿éæ°æ® |
| | | if df_motor is not None and not df_motor.empty: |
| | | fig1.add_trace(go.Scatter( |
| | | x=df_motor['time'], |
| | | y=df_motor['m1_line_speed'], |
| | | name='æåºä¸æ®µçº¿é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='cyan', width=1.5), |
| | | yaxis='y4' |
| | | )) |
| | | fig1.add_trace(go.Scatter( |
| | | x=df_motor['time'], |
| | | y=df_motor['m2_line_speed'], |
| | | name='æåºäºæ®µçº¿é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='teal', width=1.5), |
| | | yaxis='y4' |
| | | )) |
| | | |
| | | # 设置å¾è¡¨1å¸å± |
| | | fig1.update_layout( |
| | | title='ç±³éä¸å®é
åæ°è¶å¿åæ', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | | type='date' |
| | | ), |
| | | yaxis=dict( |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='blue'), |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='æ¤åºæºè½¬é (RPM)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | | side='right' |
| | | ), |
| | | yaxis3=dict( |
| | | title='æºå¤´åå', |
| | | title_font=dict(color='orange'), |
| | | tickfont=dict(color='orange'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.85 |
| | | ), |
| | | yaxis4=dict( |
| | | title='æµç¨ä¸»é (M/Min)', |
| | | title_font=dict(color='red'), |
| | | tickfont=dict(color='red'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.75 |
| | | ), |
| | | yaxis5=dict( |
| | | title='温度æ¾ç¤º (°C)', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='left', |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | yaxis6=dict( |
| | | title='æåºçº¿é (M/Min)', |
| | | title_font=dict(color='cyan'), |
| | | tickfont=dict(color='cyan'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.65 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | | y=1.02, |
| | | xanchor="right", |
| | | x=1 |
| | | ), |
| | | height=600, |
| | | margin=dict(l=100, r=200, t=100, b=100), |
| | | hovermode='x unified' |
| | | ) |
| | | |
| | | # æ¾ç¤ºå¾è¡¨1 |
| | | st.plotly_chart(fig1, width='stretch', config={'scrollZoom': True}) |
| | | |
| | | # --- å¾è¡¨2: ç±³éä¸è®¾å®åæ°åæ --- |
| | | st.subheader("ð ç±³éä¸è®¾å®åæ°åæ") |
| | | fig2 = go.Figure() |
| | | |
| | | # æ·»å ç±³é |
| | | if df_extruder is not None and not df_extruder.empty: |
| | | fig2.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | | y=df_extruder['metered_weight'], |
| | | name='ç±³é (Kg/m)', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2), |
| | | yaxis='y1' |
| | | )) |
| | | |
| | | # æ·»å æ¤åºæºè®¾å®è½¬é |
| | | fig2.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | | y=df_extruder['screw_speed_set'], |
| | | name='æ¤åºæºè®¾å®è½¬é (RPM)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5, dash='dash'), |
| | | yaxis='y2' |
| | | )) |
| | | |
| | | # æ·»å æ¤åºæºæºå¤´åå |
| | | fig2.add_trace(go.Scatter( |
| | | x=df_extruder['time'], |
| | | y=df_extruder['head_pressure'], |
| | | name='æ¤åºæºæºå¤´åå', |
| | | mode='lines', |
| | | line=dict(color='orange', width=1.5), |
| | | yaxis='y3' |
| | | )) |
| | | |
| | | # æ·»å æµç¨ä¸»é |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | fig2.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['process_main_speed'], |
| | | name='æµç¨ä¸»é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y4' # åç¬çé度轴 |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦è®¾å®å¼ |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_set_fields = { |
| | | 'nakata_extruder_screw_set_temp': 'èºæè®¾å® (°C)', |
| | | 'nakata_extruder_rear_barrel_set_temp': 'åæºçè®¾å® (°C)', |
| | | 'nakata_extruder_front_barrel_set_temp': 'åæºçè®¾å® (°C)', |
| | | 'nakata_extruder_head_set_temp': 'æºå¤´è®¾å® (°C)', |
| | | } |
| | | for field, label in temp_set_fields.items(): |
| | | fig2.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp[field], |
| | | name=label, |
| | | mode='lines', |
| | | line=dict(width=1, dash='dash'), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # 设置å¾è¡¨2å¸å± |
| | | fig2.update_layout( |
| | | title='ç±³éä¸è®¾å®åæ°è¶å¿åæ', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | | type='date' |
| | | ), |
| | | yaxis=dict( |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='blue'), |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='æ¤åºæºè½¬é (RPM)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | | side='right' |
| | | ), |
| | | yaxis3=dict( |
| | | title='æºå¤´åå', |
| | | title_font=dict(color='orange'), |
| | | tickfont=dict(color='orange'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.85 |
| | | ), |
| | | yaxis4=dict( |
| | | title='æµç¨ä¸»é (M/Min)', |
| | | title_font=dict(color='red'), |
| | | tickfont=dict(color='red'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.75 |
| | | ), |
| | | yaxis5=dict( |
| | | title='æ¸©åº¦è®¾å® (°C)', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='left', |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | | y=1.02, |
| | | xanchor="right", |
| | | x=1 |
| | | ), |
| | | height=600, |
| | | margin=dict(l=100, r=150, t=100, b=100), |
| | | hovermode='x unified' |
| | | ) |
| | | |
| | | # æ¾ç¤ºå¾è¡¨2 |
| | | st.plotly_chart(fig2, width='stretch', config={'scrollZoom': True}) |
| | | |
| | | # æ°æ®æè¦ |
| | | # st.subheader("ð æ°æ®æè¦") |
| | | # summary_cols = st.columns(4) |
| | | |
| | | # with summary_cols[0]: |
| | | # if df_extruder is not None and not df_extruder.empty: |
| | | # st.metric("å¹³åç±³é", f"{df_extruder['metered_weight'].mean():.2f} Kg/m") |
| | | |
| | | # with summary_cols[1]: |
| | | # if df_extruder is not None and not df_extruder.empty: |
| | | # st.metric("å¹³å设å®è½¬é", f"{df_extruder['screw_speed_set'].mean():.2f} RPM") |
| | | |
| | | # with summary_cols[2]: |
| | | # if df_extruder is not None and not df_extruder.empty: |
| | | # st.metric("å¹³åå®é
转é", f"{df_extruder['screw_speed_actual'].mean():.2f} RPM") |
| | | |
| | | # with summary_cols[3]: |
| | | # if df_extruder is not None and not df_extruder.empty: |
| | | # st.metric("å¹³åæºå¤´åå", f"{df_extruder['head_pressure'].mean():.2f}") |
| ¶Ô±ÈÐÂÎļþ |
| | |
| | | import streamlit as st |
| | | import plotly.express as px |
| | | import plotly.graph_objects as go |
| | | import pandas as pd |
| | | import numpy as np |
| | | from datetime import datetime, timedelta |
| | | from app.services.extruder_service import ExtruderService |
| | | from app.services.main_process_service import MainProcessService |
| | | from sklearn.linear_model import LinearRegression |
| | | from sklearn.model_selection import train_test_split |
| | | from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error |
| | | |
| | | |
| | | def show_metered_weight_regression(): |
| | | # åå§åæå¡ |
| | | extruder_service = ExtruderService() |
| | | main_process_service = MainProcessService() |
| | | |
| | | # 页颿 é¢ |
| | | st.title("ç±³éå¤å
线æ§åå½åæ") |
| | | |
| | | # åå§åä¼è¯ç¶æç¨äºæ¥æåæ¥ |
| | | if 'mr_start_date' not in st.session_state: |
| | | st.session_state['mr_start_date'] = datetime.now().date() - timedelta(days=7) |
| | | if 'mr_end_date' not in st.session_state: |
| | | st.session_state['mr_end_date'] = datetime.now().date() |
| | | if 'mr_quick_select' not in st.session_state: |
| | | st.session_state['mr_quick_select'] = "æè¿7天" |
| | | if 'mr_time_offset' not in st.session_state: |
| | | st.session_state['mr_time_offset'] = 0.0 |
| | | if 'mr_selected_features' not in st.session_state: |
| | | st.session_state['mr_selected_features'] = [ |
| | | 'èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | | 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦' |
| | | ] |
| | | |
| | | # å®ä¹åè°å½æ° |
| | | def update_dates(qs): |
| | | st.session_state['mr_quick_select'] = qs |
| | | today = datetime.now().date() |
| | | if qs == "ä»å¤©": |
| | | st.session_state['mr_start_date'] = today |
| | | st.session_state['mr_end_date'] = today |
| | | elif qs == "æè¿3天": |
| | | st.session_state['mr_start_date'] = today - timedelta(days=3) |
| | | st.session_state['mr_end_date'] = today |
| | | elif qs == "æè¿7天": |
| | | st.session_state['mr_start_date'] = today - timedelta(days=7) |
| | | st.session_state['mr_end_date'] = today |
| | | elif qs == "æè¿30天": |
| | | st.session_state['mr_start_date'] = today - timedelta(days=30) |
| | | st.session_state['mr_end_date'] = today |
| | | |
| | | def on_date_change(): |
| | | st.session_state['mr_quick_select'] = "èªå®ä¹" |
| | | |
| | | # æ¥è¯¢æ¡ä»¶åºå |
| | | with st.expander("ð æ¥è¯¢é
ç½®", expanded=True): |
| | | # æ·»å èªå®ä¹ CSS å®ç°ååºå¼æ¢è¡ |
| | | st.markdown(""" |
| | | <style> |
| | | /* 强å¶å容卿¢è¡ */ |
| | | [data-testid="stExpander"] [data-testid="column"] { |
| | | flex: 1 1 120px !important; |
| | | min-width: 120px !important; |
| | | } |
| | | /* éå¯¹æ¥æè¾å
¥æ¡åç¨å¾®å 宽ä¸ç¹ */ |
| | | @media (min-width: 768px) { |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(6), |
| | | [data-testid="stExpander"] [data-testid="column"]:nth-child(7) { |
| | | flex: 2 1 180px !important; |
| | | min-width: 180px !important; |
| | | } |
| | | } |
| | | </style> |
| | | """, unsafe_allow_html=True) |
| | | |
| | | # å建å¸å± |
| | | cols = st.columns([1, 1, 1, 1, 1, 1.5, 1.5, 1]) |
| | | |
| | | options = ["ä»å¤©", "æè¿3天", "æè¿7天", "æè¿30天", "èªå®ä¹"] |
| | | for i, option in enumerate(options): |
| | | with cols[i]: |
| | | # æ ¹æ®å½åéæ©ç¶æå³å®æé®ç±»å |
| | | button_type = "primary" if st.session_state['mr_quick_select'] == option else "secondary" |
| | | if st.button(option, key=f"btn_mr_{option}", width='stretch', type=button_type): |
| | | update_dates(option) |
| | | st.rerun() |
| | | |
| | | with cols[5]: |
| | | start_date = st.date_input( |
| | | "å¼å§æ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mr_start_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[6]: |
| | | end_date = st.date_input( |
| | | "ç»ææ¥æ", |
| | | label_visibility="collapsed", |
| | | key="mr_end_date", |
| | | on_change=on_date_change |
| | | ) |
| | | |
| | | with cols[7]: |
| | | query_button = st.button("ð å¼å§åæ", key="mr_query", width='stretch') |
| | | |
| | | # æ°æ®å¯¹é½è°æ´ |
| | | st.markdown("---") |
| | | offset_cols = st.columns([2, 4, 2]) |
| | | with offset_cols[0]: |
| | | st.write("â±ï¸ **æ°æ®å¯¹é½è°æ´**") |
| | | with offset_cols[1]: |
| | | time_offset = st.slider( |
| | | "æ¶é´åç§» (åé)", |
| | | min_value=0.0, |
| | | max_value=5.0, |
| | | value=st.session_state['mr_time_offset'], |
| | | step=0.1, |
| | | help="è°æ´ä¸»æµç¨åæ¸©åº¦æ°æ®çæ¶é´åç§»ï¼ä½¿å
¶ä¸æ¤åºæºç±³éæ°æ®å¯¹é½ã" |
| | | ) |
| | | st.session_state['mr_time_offset'] = time_offset |
| | | with offset_cols[2]: |
| | | st.write(f"å½ååç§»: {time_offset} åé") |
| | | |
| | | # ç¹å¾éæ© |
| | | st.markdown("---") |
| | | st.write("ð **ç¹å¾éæ©**") |
| | | feature_cols = st.columns(2) |
| | | all_features = [ |
| | | 'èºæè½¬é', 'æºå¤´åå', 'æµç¨ä¸»é', 'èºææ¸©åº¦', |
| | | 'åæºçæ¸©åº¦', 'åæºçæ¸©åº¦', 'æºå¤´æ¸©åº¦' |
| | | ] |
| | | for i, feature in enumerate(all_features): |
| | | with feature_cols[i % 2]: |
| | | st.session_state['mr_selected_features'] = [ |
| | | f for f in st.session_state['mr_selected_features'] if f in all_features |
| | | ] |
| | | if st.checkbox( |
| | | feature, |
| | | key=f"feat_{feature}", |
| | | value=feature in st.session_state['mr_selected_features'] |
| | | ): |
| | | if feature not in st.session_state['mr_selected_features']: |
| | | st.session_state['mr_selected_features'].append(feature) |
| | | else: |
| | | if feature in st.session_state['mr_selected_features']: |
| | | st.session_state['mr_selected_features'].remove(feature) |
| | | |
| | | if not st.session_state['mr_selected_features']: |
| | | st.warning("è³å°éè¦éæ©ä¸ä¸ªç¹å¾åé") |
| | | |
| | | # 转æ¢ä¸ºdatetime对象 |
| | | start_dt = datetime.combine(start_date, datetime.min.time()) |
| | | end_dt = datetime.combine(end_date, datetime.max.time()) |
| | | |
| | | # æ¥è¯¢å¤ç - ä»
è·åæ°æ®å¹¶ç¼åå°ä¼è¯ç¶æ |
| | | if query_button: |
| | | with st.spinner("æ£å¨è·åæ°æ®..."): |
| | | # 1. è·å宿´çæ¤åºæºæ°æ®ï¼å
å«æææ¶é´ç¹çèºæè½¬éåæºå¤´ååï¼ |
| | | df_extruder_full = extruder_service.get_extruder_data(start_dt, end_dt) |
| | | |
| | | # 2. è·å主æµç¨æ§å¶æ°æ® |
| | | df_main_speed = main_process_service.get_cutting_setting_data(start_dt, end_dt) |
| | | |
| | | df_temp = main_process_service.get_temperature_control_data(start_dt, end_dt) |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_full is not None and not df_extruder_full.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | # æ¸
é¤ç¼åæ°æ® |
| | | for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp', 'last_query_start', 'last_query_end']: |
| | | if key in st.session_state: |
| | | del st.session_state[key] |
| | | return |
| | | |
| | | # ç¼åæ°æ®å°ä¼è¯ç¶æ |
| | | st.session_state['cached_extruder_full'] = df_extruder_full |
| | | st.session_state['cached_main_speed'] = df_main_speed |
| | | st.session_state['cached_temp'] = df_temp |
| | | st.session_state['last_query_start'] = start_dt |
| | | st.session_state['last_query_end'] = end_dt |
| | | |
| | | # æ°æ®å¤çåå¾è¡¨æ¸²æ - æ¯æ¬¡åºç¨éæ°è¿è¡æ¶æ§è¡ï¼å
æ¬è°æ´æ¶é´åç§»æ¶ï¼ |
| | | if all(key in st.session_state for key in ['cached_extruder_full', 'cached_main_speed', 'cached_temp']): |
| | | with st.spinner("æ£å¨åææ°æ®..."): |
| | | # è·åç¼åæ°æ® |
| | | df_extruder_full = st.session_state['cached_extruder_full'] |
| | | df_main_speed = st.session_state['cached_main_speed'] |
| | | df_temp = st.session_state['cached_temp'] |
| | | |
| | | # è·åå½åæ¶é´åç§»é |
| | | offset_delta = timedelta(minutes=st.session_state['mr_time_offset']) |
| | | |
| | | # å¤çæ°æ® |
| | | if df_extruder_full is not None and not df_extruder_full.empty: |
| | | # è¿æ»¤æºå¤´åå大äº2çå¼ |
| | | df_extruder_filtered = df_extruder_full[df_extruder_full['head_pressure'] <= 2] |
| | | |
| | | # ä¸ºç±³éæ°æ®å建åç§»åçæ¶é´åï¼åªå¯¹ç±³éæ°æ®è¿è¡æ¶é´åç§»ï¼ |
| | | df_extruder_filtered['weight_time'] = df_extruder_filtered['time'] - offset_delta |
| | | else: |
| | | df_extruder_filtered = None |
| | | |
| | | # æ£æ¥æ¯å¦ææ°æ® |
| | | has_data = any([ |
| | | df_extruder_filtered is not None and not df_extruder_filtered.empty, |
| | | df_main_speed is not None and not df_main_speed.empty, |
| | | df_temp is not None and not df_temp.empty |
| | | ]) |
| | | |
| | | if not has_data: |
| | | st.warning("æéæ¶é´æ®µå
æªæ¾å°ä»»ä½æ°æ®ï¼è¯·å°è¯è°æ´æ¥è¯¢æ¡ä»¶ã") |
| | | return |
| | | |
| | | # æ°æ®æ´åä¸é¢å¤ç |
| | | def integrate_data(df_extruder_filtered, df_main_speed, df_temp): |
| | | # ç¡®ä¿æ¤åºæºæ°æ®åå¨ |
| | | if df_extruder_filtered is None or df_extruder_filtered.empty: |
| | | return None |
| | | |
| | | # å建åªå
å«ç±³éååç§»æ¶é´çä¸»æ°æ®é |
| | | df_weight = df_extruder_filtered[['weight_time', 'metered_weight']].copy() |
| | | df_weight.rename(columns={'weight_time': 'time'}, inplace=True) # å°weight_timeéå½å为timeä½ä¸ºåºåæ¶é´ |
| | | |
| | | # å建å
å«èºæè½¬éååå§æ¶é´ç宿´æ°æ®é |
| | | df_screw = df_extruder_filtered[['time', 'screw_speed_actual']].copy() |
| | | |
| | | # å建å
嫿ºå¤´ååååå§æ¶é´ç宿´æ°æ®é |
| | | df_pressure = df_extruder_filtered[['time', 'head_pressure']].copy() |
| | | |
| | | # 使ç¨åç§»åçç±³éæ¶é´æ´åèºæè½¬éæ°æ® |
| | | df_merged = pd.merge_asof( |
| | | df_weight.sort_values('time'), |
| | | df_screw.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # 使ç¨åç§»åçç±³éæ¶é´æ´åæºå¤´ååæ°æ® |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_pressure.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´å主æµç¨æ°æ® |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | df_main_speed = df_main_speed[['time', 'process_main_speed']] |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_main_speed.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # æ´åæ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | temp_cols = ['time', 'nakata_extruder_screw_display_temp', |
| | | 'nakata_extruder_rear_barrel_display_temp', |
| | | 'nakata_extruder_front_barrel_display_temp', |
| | | 'nakata_extruder_head_display_temp'] |
| | | df_temp_subset = df_temp[temp_cols].copy() |
| | | df_merged = pd.merge_asof( |
| | | df_merged.sort_values('time'), |
| | | df_temp_subset.sort_values('time'), |
| | | on='time', |
| | | direction='nearest', |
| | | tolerance=pd.Timedelta('1min') |
| | | ) |
| | | |
| | | # éå½åå以æé«å¯è¯»æ§ |
| | | df_merged.rename(columns={ |
| | | 'screw_speed_actual': 'èºæè½¬é', |
| | | 'head_pressure': 'æºå¤´åå', |
| | | 'process_main_speed': 'æµç¨ä¸»é', |
| | | 'nakata_extruder_screw_display_temp': 'èºææ¸©åº¦', |
| | | 'nakata_extruder_rear_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_front_barrel_display_temp': 'åæºçæ¸©åº¦', |
| | | 'nakata_extruder_head_display_temp': 'æºå¤´æ¸©åº¦' |
| | | }, inplace=True) |
| | | |
| | | # æ¸
çæ°æ® |
| | | df_merged.dropna(subset=['metered_weight'], inplace=True) |
| | | |
| | | return df_merged |
| | | |
| | | # æ§è¡æ°æ®æ´å |
| | | df_analysis = integrate_data(df_extruder_filtered, df_main_speed, df_temp) |
| | | |
| | | if df_analysis is None or df_analysis.empty: |
| | | st.warning("æ°æ®æ´å失败ï¼è¯·æ£æ¥æ°æ®è´¨éæè°æ´æ¶é´èå´ã") |
| | | return |
| | | |
| | | # éå½åç±³éå |
| | | df_analysis.rename(columns={'metered_weight': 'ç±³é'}, inplace=True) |
| | | |
| | | # --- åå§æ°æ®è¶å¿å¾ --- |
| | | st.subheader("ð åå§æ°æ®è¶å¿å¾") |
| | | |
| | | # å建è¶å¿å¾ |
| | | fig_trend = go.Figure() |
| | | |
| | | # æ·»å ç±³éæ°æ®ï¼ä½¿ç¨åç§»åçæ¶é´ï¼ |
| | | if df_extruder_filtered is not None and not df_extruder_filtered.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_filtered['weight_time'], # 使ç¨åç§»åçæ¶é´ |
| | | y=df_extruder_filtered['metered_weight'], |
| | | name='ç±³é (Kg/m) [å·²åç§»]', |
| | | mode='lines', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | |
| | | # æ·»å èºæè½¬éï¼ä½¿ç¨åå§æ¶é´ï¼ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_filtered['time'], # 使ç¨åå§æ¶é´ |
| | | y=df_extruder_filtered['screw_speed_actual'], |
| | | name='èºæè½¬é (RPM)', |
| | | mode='lines', |
| | | line=dict(color='green', width=1.5), |
| | | yaxis='y2' |
| | | )) |
| | | |
| | | # æ·»å æºå¤´ååï¼ä½¿ç¨åå§æ¶é´ï¼å·²è¿æ»¤å¤§äº2çå¼ï¼ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_extruder_filtered['time'], # 使ç¨åå§æ¶é´ |
| | | y=df_extruder_filtered['head_pressure'], |
| | | name='æºå¤´åå (â¤2)', |
| | | mode='lines', |
| | | line=dict(color='orange', width=1.5), |
| | | yaxis='y3' |
| | | )) |
| | | |
| | | # æ·»å æµç¨ä¸»é |
| | | if df_main_speed is not None and not df_main_speed.empty: |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_main_speed['time'], |
| | | y=df_main_speed['process_main_speed'], |
| | | name='æµç¨ä¸»é (M/Min)', |
| | | mode='lines', |
| | | line=dict(color='red', width=1.5), |
| | | yaxis='y4' |
| | | )) |
| | | |
| | | # æ·»å æ¸©åº¦æ°æ® |
| | | if df_temp is not None and not df_temp.empty: |
| | | # èºææ¸©åº¦ |
| | | fig_trend.add_trace(go.Scatter( |
| | | x=df_temp['time'], |
| | | y=df_temp['nakata_extruder_screw_display_temp'], |
| | | name='èºææ¸©åº¦ (°C)', |
| | | mode='lines', |
| | | line=dict(color='purple', width=1), |
| | | yaxis='y5' |
| | | )) |
| | | |
| | | # é
ç½®è¶å¿å¾å¸å± |
| | | fig_trend.update_layout( |
| | | title=f'åå§æ°æ®è¶å¿ (ç±³éåååç§» {st.session_state["mr_time_offset"]} åé)', |
| | | xaxis=dict( |
| | | title='æ¶é´', |
| | | rangeslider=dict(visible=True), |
| | | type='date' |
| | | ), |
| | | yaxis=dict( |
| | | title='ç±³é (Kg/m)', |
| | | title_font=dict(color='blue'), |
| | | tickfont=dict(color='blue') |
| | | ), |
| | | yaxis2=dict( |
| | | title='èºæè½¬é (RPM)', |
| | | title_font=dict(color='green'), |
| | | tickfont=dict(color='green'), |
| | | overlaying='y', |
| | | side='right' |
| | | ), |
| | | yaxis3=dict( |
| | | title='æºå¤´åå', |
| | | title_font=dict(color='orange'), |
| | | tickfont=dict(color='orange'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.85 |
| | | ), |
| | | yaxis4=dict( |
| | | title='æµç¨ä¸»é (M/Min)', |
| | | title_font=dict(color='red'), |
| | | tickfont=dict(color='red'), |
| | | overlaying='y', |
| | | side='right', |
| | | anchor='free', |
| | | position=0.75 |
| | | ), |
| | | yaxis5=dict( |
| | | title='温度 (°C)', |
| | | title_font=dict(color='purple'), |
| | | tickfont=dict(color='purple'), |
| | | overlaying='y', |
| | | side='left', |
| | | anchor='free', |
| | | position=0.15 |
| | | ), |
| | | legend=dict( |
| | | orientation="h", |
| | | yanchor="bottom", |
| | | y=1.02, |
| | | xanchor="right", |
| | | x=1 |
| | | ), |
| | | height=600, |
| | | margin=dict(l=100, r=200, t=100, b=100), |
| | | hovermode='x unified' |
| | | ) |
| | | |
| | | # æ¾ç¤ºè¶å¿å¾ |
| | | st.plotly_chart(fig_trend, width='stretch', config={'scrollZoom': True}) |
| | | |
| | | # --- å¤å
线æ§åå½åæ --- |
| | | st.subheader("ð å¤å
线æ§åå½åæ") |
| | | |
| | | # æ£æ¥æ¯å¦éæ©äºç¹å¾ |
| | | if not st.session_state['mr_selected_features']: |
| | | st.warning("请è³å°éæ©ä¸ä¸ªç¹å¾åéè¿è¡åå½åæ") |
| | | else: |
| | | # æ£æ¥ææéæ©çç¹å¾æ¯å¦å¨æ°æ®ä¸ |
| | | missing_features = [f for f in st.session_state['mr_selected_features'] if f not in df_analysis.columns] |
| | | if missing_features: |
| | | st.warning(f"æ°æ®ä¸ç¼ºå°ä»¥ä¸ç¹å¾: {', '.join(missing_features)}") |
| | | else: |
| | | # å夿°æ® |
| | | X = df_analysis[st.session_state['mr_selected_features']] |
| | | y = df_analysis['ç±³é'] |
| | | |
| | | # æ¸
çæ°æ®ä¸çNaNå¼ |
| | | combined = pd.concat([X, y], axis=1) |
| | | combined_clean = combined.dropna() |
| | | |
| | | # æ£æ¥æ¸
çåçæ°æ®é |
| | | if len(combined_clean) < 10: |
| | | st.warning("æ°æ®éä¸è¶³æå
å«è¿å¤NaNå¼ï¼æ æ³è¿è¡ææçåå½åæ") |
| | | else: |
| | | # éæ°å离Xåy |
| | | X_clean = combined_clean[st.session_state['mr_selected_features']] |
| | | y_clean = combined_clean['ç±³é'] |
| | | |
| | | # åå²è®ç»éåæµè¯é |
| | | X_train, X_test, y_train, y_test = train_test_split(X_clean, y_clean, test_size=0.2, random_state=42) |
| | | |
| | | # è®ç»æ¨¡å |
| | | model = LinearRegression() |
| | | model.fit(X_train, y_train) |
| | | |
| | | # 颿µ |
| | | y_pred = model.predict(X_test) |
| | | y_train_pred = model.predict(X_train) |
| | | |
| | | # 计ç®è¯ä¼°ææ |
| | | r2 = r2_score(y_test, y_pred) |
| | | mse = mean_squared_error(y_test, y_pred) |
| | | mae = mean_absolute_error(y_test, y_pred) |
| | | rmse = np.sqrt(mse) |
| | | |
| | | # æ¾ç¤ºæ¨¡åæ§è½ |
| | | metrics_cols = st.columns(2) |
| | | with metrics_cols[0]: |
| | | st.metric("R² å¾å", f"{r2:.4f}") |
| | | st.metric("åæ¹è¯¯å·® (MSE)", f"{mse:.6f}") |
| | | with metrics_cols[1]: |
| | | st.metric("å¹³åç»å¯¹è¯¯å·® (MAE)", f"{mae:.6f}") |
| | | st.metric("åæ¹æ ¹è¯¯å·® (RMSE)", f"{rmse:.6f}") |
| | | |
| | | # --- å®é
å¼ä¸é¢æµå¼å¯¹æ¯ --- |
| | | st.subheader("ð å®é
å¼ä¸é¢æµå¼å¯¹æ¯") |
| | | |
| | | # åå»ºå¯¹æ¯æ°æ® |
| | | compare_df = pd.DataFrame({ |
| | | 'å®é
å¼': y_test, |
| | | '颿µå¼': y_pred |
| | | }) |
| | | compare_df = compare_df.sort_index() |
| | | |
| | | # å建对æ¯å¾ |
| | | fig_compare = go.Figure() |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['å®é
å¼'], |
| | | name='å®é
å¼', |
| | | mode='lines+markers', |
| | | line=dict(color='blue', width=2) |
| | | )) |
| | | fig_compare.add_trace(go.Scatter( |
| | | x=compare_df.index, |
| | | y=compare_df['颿µå¼'], |
| | | name='颿µå¼', |
| | | mode='lines+markers', |
| | | line=dict(color='red', width=2, dash='dash') |
| | | )) |
| | | fig_compare.update_layout( |
| | | title='æµè¯é: å®é
ç±³é vs 颿µç±³é', |
| | | xaxis=dict(title='æ ·æ¬ç´¢å¼'), |
| | | yaxis=dict(title='ç±³é (Kg/m)'), |
| | | legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_compare, width='stretch') |
| | | |
| | | # --- æ®å·®åæ --- |
| | | st.subheader("ð æ®å·®åæ") |
| | | |
| | | # è®¡ç®æ®å·® |
| | | residuals = y_test - y_pred |
| | | |
| | | # å建æ®å·®å¾ |
| | | fig_residual = go.Figure() |
| | | fig_residual.add_trace(go.Scatter( |
| | | x=y_pred, |
| | | y=residuals, |
| | | mode='markers', |
| | | marker=dict(color='green', size=8, opacity=0.6) |
| | | )) |
| | | fig_residual.add_shape( |
| | | type="line", |
| | | x0=y_pred.min(), |
| | | y0=0, |
| | | x1=y_pred.max(), |
| | | y1=0, |
| | | line=dict(color="red", width=2, dash="dash") |
| | | ) |
| | | fig_residual.update_layout( |
| | | title='æ®å·®å¾', |
| | | xaxis=dict(title='颿µå¼'), |
| | | yaxis=dict(title='æ®å·®'), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_residual, width='stretch') |
| | | |
| | | # --- ç¹å¾éè¦æ§ --- |
| | | st.subheader("âï¸ ç¹å¾éè¦æ§åæ") |
| | | |
| | | # 计ç®ç¹å¾éè¦æ§ï¼åºäºç³»æ°ç»å¯¹å¼ï¼ |
| | | feature_importance = pd.DataFrame({ |
| | | 'ç¹å¾': st.session_state['mr_selected_features'], |
| | | 'ç³»æ°': model.coef_, |
| | | 'éè¦æ§': np.abs(model.coef_) |
| | | }) |
| | | feature_importance = feature_importance.sort_values('éè¦æ§', ascending=False) |
| | | |
| | | # å建ç¹å¾éè¦æ§å¾ |
| | | fig_importance = px.bar( |
| | | feature_importance, |
| | | x='ç¹å¾', |
| | | y='éè¦æ§', |
| | | title='ç¹å¾éè¦æ§ï¼åºäºç³»æ°ç»å¯¹å¼ï¼', |
| | | color='éè¦æ§', |
| | | color_continuous_scale='viridis' |
| | | ) |
| | | fig_importance.update_layout( |
| | | xaxis=dict(tickangle=-45), |
| | | height=400 |
| | | ) |
| | | st.plotly_chart(fig_importance, width='stretch') |
| | | |
| | | # æ¾ç¤ºç³»æ°è¡¨ |
| | | st.write("### 模åç³»æ°") |
| | | coef_df = pd.DataFrame({ |
| | | 'ç¹å¾': ['æªè·'] + st.session_state['mr_selected_features'], |
| | | 'ç³»æ°': [model.intercept_] + list(model.coef_) |
| | | }) |
| | | st.dataframe(coef_df, use_container_width=True) |
| | | |
| | | # --- 颿µåè½ --- |
| | | st.subheader("ð® ç±³é颿µ") |
| | | |
| | | # åå»ºé¢æµè¡¨å |
| | | st.write("è¾å
¥ç¹å¾å¼è¿è¡ç±³é颿µ:") |
| | | predict_cols = st.columns(2) |
| | | input_features = {} |
| | | |
| | | for i, feature in enumerate(st.session_state['mr_selected_features']): |
| | | with predict_cols[i % 2]: |
| | | # è·åç¹å¾çç»è®¡ä¿¡æ¯ |
| | | min_val = df_analysis[feature].min() |
| | | max_val = df_analysis[feature].max() |
| | | mean_val = df_analysis[feature].mean() |
| | | |
| | | input_features[feature] = st.number_input( |
| | | f"{feature}", |
| | | key=f"pred_{feature}", |
| | | value=float(mean_val), |
| | | min_value=float(min_val), |
| | | max_value=float(max_val), |
| | | step=0.1 |
| | | ) |
| | | |
| | | if st.button("颿µç±³é"): |
| | | # åå¤é¢æµæ°æ® |
| | | input_data = [[input_features[feature] for feature in st.session_state['mr_selected_features']]] |
| | | # 颿µ |
| | | predicted_weight = model.predict(input_data)[0] |
| | | # æ¾ç¤ºé¢æµç»æ |
| | | st.success(f"颿µç±³é: {predicted_weight:.4f} Kg/m") |
| | | |
| | | # --- æ°æ®é¢è§ --- |
| | | st.subheader("ð æ°æ®é¢è§") |
| | | st.dataframe(df_analysis.head(20), use_container_width=True) |
| | | else: |
| | | # æç¤ºç¨æ·ç¹å»å¼å§åææé® |
| | | st.info("è¯·éæ©æ¶é´èå´å¹¶ç¹å»'å¼å§åæ'æé®è·åæ°æ®ã") |
| | |
| | | rangeslider=dict(visible=True) |
| | | ), |
| | | yaxis=dict(fixedrange=False), |
| | | hovermode='x unified', |
| | | dragmode='zoom' |
| | | |
| | | ) |
| | | |
| | | # é
ç½®å¾è¡¨åæ° |
| | |
| | | try: |
| | | # è¯å«æå¼ç¹ |
| | | extreme_points = self.identify_local_maxima(df) |
| | | |
| | | # print("è¯å«æå¼ç¹:", extreme_points) |
| | | # è¯å«é¶æ®µæå¤§å¼ |
| | | phase_maxima = self.identify_phase_maxima(df) |
| | | # phase_maxima = self.identify_phase_maxima(df) |
| | | # print("è¯å«é¶æ®µæå¤§å¼:", phase_maxima) |
| | | |
| | | # è®¡ç®æ¯ä¸ªæå¼ç¹çåæ ¼ç |
| | | if not extreme_points.empty: |
| | | extreme_points['pass_rate'] = extreme_points.apply(self.calculate_pass_rate, axis=1) |
| | | |
| | | # è®¡ç®æ´ä½åæ ¼ç |
| | | overall_pass_rate = self.calculate_overall_pass_rate(df) |
| | | overall_pass_rate = self.calculate_overall_pass_rate(extreme_points) |
| | | |
| | | return { |
| | | 'extreme_points': extreme_points, |
| | | 'phase_maxima': phase_maxima, |
| | | 'phase_maxima': pd.DataFrame(), |
| | | 'overall_pass_rate': overall_pass_rate |
| | | } |
| | | except Exception as e: |
| | |
| | | self.db.connect() |
| | | |
| | | query = """ |
| | | SELECT time, process_main_speed |
| | | SELECT time, process_main_speed, cutting_count |
| | | FROM public.aics_main_process_cutting_setting |
| | | WHERE time BETWEEN %s AND %s |
| | | ORDER BY time ASC |
| | |
| | | from app.pages.extruder_dashboard import show_extruder_dashboard |
| | | from app.pages.main_process_dashboard import show_main_process_dashboard |
| | | from app.pages.comprehensive_dashboard import show_comprehensive_dashboard |
| | | from app.pages.metered_weight_dashboard import show_metered_weight_dashboard |
| | | from app.pages.metered_weight_correlation import show_metered_weight_correlation |
| | | from app.pages.metered_weight_regression import show_metered_weight_regression |
| | | from app.pages.metered_weight_advanced import show_metered_weight_advanced |
| | | |
| | | # 设置页é¢é
ç½® |
| | | st.set_page_config( |
| | |
| | | |
| | | comprehensive_page = st.Page( |
| | | show_comprehensive_dashboard, |
| | | title="综ååæ", |
| | | title="æ¡é综ååæ", |
| | | icon="ð", |
| | | url_path="comprehensive" |
| | | ) |
| | | |
| | | metered_weight_page = st.Page( |
| | | show_metered_weight_dashboard, |
| | | title="ç±³é综ååæ", |
| | | icon="ð", |
| | | url_path="metered_weight" |
| | | ) |
| | | |
| | | metered_weight_correlation_page = st.Page( |
| | | show_metered_weight_correlation, |
| | | title="ç±³éç¸å
³æ§åæ", |
| | | icon="ð", |
| | | url_path="metered_weight_correlation" |
| | | ) |
| | | |
| | | metered_weight_regression_page = st.Page( |
| | | show_metered_weight_regression, |
| | | title="ç±³éå¤å
线æ§åå½åæ", |
| | | icon="ð", |
| | | url_path="metered_weight_regression" |
| | | ) |
| | | |
| | | metered_weight_advanced_page = st.Page( |
| | | show_metered_weight_advanced, |
| | | title="ç±³éé«çº§é¢æµåæ", |
| | | icon="ð¤", |
| | | url_path="metered_weight_advanced" |
| | | ) |
| | | |
| | | # ä¾§è¾¹æ 页èä¿¡æ¯ |
| | |
| | | |
| | | # 导èªé
ç½® |
| | | pg = st.navigation({ |
| | | "综ååæ": [comprehensive_page], |
| | | "综ååæ": [comprehensive_page, metered_weight_page, metered_weight_correlation_page, metered_weight_regression_page, metered_weight_advanced_page], |
| | | "å项åæ": [sorting_page, extruder_page, main_process_page] |
| | | }) |
| | | |
| | |
| | | psycopg2-binary |
| | | pandas |
| | | plotly |
| | | python-dotenv |
| | | python-dotenv |
| | | scikit-learn |
| | | pytorch |
| | | torchvision |