Browse Source

热释空报警

qingfeng 2 years ago
parent
commit
c4fdc686b3

+ 0 - 7
LIB/FRONTEND/FaultDetection/main_pred.py

@@ -151,17 +151,10 @@ if __name__ == "__main__":
     model_list=[]
     model2_list=[]
     for group in ['MGMLX','PK504','PK502','PK500','MGMCL']:
-<<<<<<< HEAD
         scaler = pickle.load(open('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerV_'+group+'_10.pkl', 'rb'))
         scaler2 = pickle.load(open('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerT_'+group+'_10.pkl', 'rb'))
         model = load_model('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelV_'+group+'_10.h5')
         model2 = load_model('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelT_'+group+'_10.h5')
-=======
-        scaler = pickle.load(open('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerV_'+group+'_10.pkl', 'rb'))
-        scaler2 = pickle.load(open('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerT_'+group+'_10.pkl', 'rb'))
-        model = load_model('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelV_'+group+'_10.h5')
-        model2 = load_model('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelT_'+group+'_10.h5')
->>>>>>> dev
         scaler_list.append(scaler)
         scaler2_list.append(scaler2)
         model_list.append(model)

+ 167 - 0
LIB/FRONTEND/ThermoRunaway/main_pred.py

@@ -0,0 +1,167 @@
+
+from LIB.MIDDLE.ThermoRunaway.V1_0_2.Trunaway import *
+import pymysql
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager
+dbManager = DBManager.DBManager()
+from sqlalchemy import create_engine
+from urllib import parse
+import datetime, time
+from apscheduler.schedulers.blocking import BlockingScheduler
+import traceback
+import pickle
+from keras.models import load_model
+import logging
+import logging.handlers
+import os
+import re
+
+#...................................故障检测函数......................................................................................................................
+def diag_cal():
+    global SNnums
+
+    start=time.time()
+    now_time=datetime.datetime.now()
+    start_time=now_time-datetime.timedelta(hours=6)
+    start_time=start_time.strftime('%Y-%m-%d %H:%M:%S')
+    end_time=now_time.strftime('%Y-%m-%d %H:%M:%S')
+
+    #数据库配置
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='safety_platform'
+    user='qx_read'
+    password='Qx@123456'
+
+    #读取结果库数据......................................................
+    param='product_id,start_time,end_time,diff_min,SOC,loss_sum,loss_max,diffV,downV,diffdownV'
+    tablename='thermo_runaway'
+    mysql = pymysql.connect (host=host, user=user, password=password, port=port, database=db)
+    cursor = mysql.cursor()
+    sql =  "select {} from {} where end_time='0000-00-00 00:00:00'".format(param,tablename)
+    cursor.execute(sql)
+    res = cursor.fetchall()
+    df_diag_ram= pd.DataFrame(res,columns=param.split(','))
+    
+
+    db_res_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, parse.quote_plus(password), host, port, db
+        ))
+    
+
+    
+
+    #调用主函数................................................................................................................................................................
+    for sn in SNnums:
+        try:
+            group=sn[:5]
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
+            data_bms = df_data['bms']
+            data_bms['sn']=sn
+            if len(data_bms)>0:
+                logger.info("SN: {} 数据开始预处理".format(sn))
+                data_bms=delete(data_bms)
+                data_stand=data_groups(data_bms,sn,start_time,end_time)
+                df_stand=split(data_stand)   
+                res=pd.DataFrame()
+                if len(df_stand)>0:
+                    #读取训练产出的缩放指标:均值&方差
+                    logger.info("SN: {} 数据开始模型预测".format(sn))
+                    scaler = scaler_dict[group]
+                    #读取训练产出的模型状态空间:电压模型&温度模型
+                    model = model_dict[group]
+                    res,diff=res_output(df_stand,scaler,model,group,end_time)
+
+                    df_diag_ram_sn=df_diag_ram[df_diag_ram['product_id']==sn]
+                    if not df_diag_ram_sn.empty:   #该sn相关结果非空
+                        new_res,update_res=arrange(res,df_diag_ram_sn,start_time,diff)
+                        if len(update_res)>0:
+                            cursor.execute("DELETE FROM thermo_runaway WHERE end_time = '0000-00-00 00:00:00' and product_id='{}'".format(sn))
+                            mysql.commit()
+                            update_res.to_sql("thermo_runaway",con=db_res_engine, if_exists="append",index=False)
+                        #新增结果存入结果库................................................................
+                        if len(new_res)>0:
+                            new_res.to_sql("thermo_runaway",con=db_res_engine, if_exists="append",index=False)
+                    else:
+                        res.to_sql("thermo_runaway",con=db_res_engine, if_exists="append",index=False)
+
+            # end=time.time()
+            # print(end-start)  
+                
+        except Exception as e:
+            logger.error(str(e))
+            logger.error(traceback.format_exc())
+
+    cursor.close()
+    mysql.close()
+
+#...............................................主函数起定时作用.......................................................................................................................
+if __name__ == "__main__":
+    
+    # 日志
+    log_path = 'log/'
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    logger = logging.getLogger("main")
+    logger.setLevel(logging.DEBUG)
+    
+     # 根据日期滚动(每天产生1个文件)
+    fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_info.log'.format(log_path), when="D", interval=1, backupCount=30,
+                                                    encoding="utf-8")
+    formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
+    fh.suffix = "%Y-%m-%d_%H-%M-%S"
+    fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
+    fh.setFormatter(formatter)
+    fh.setLevel(logging.INFO)
+    logger.addHandler(fh)
+
+    fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_error.log'.format(log_path), when="D", interval=1, backupCount=30,
+                                                    encoding="utf-8")
+    formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
+    fh.suffix = "%Y-%m-%d_%H-%M-%S"
+    fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
+    fh.setFormatter(formatter)
+    fh.setLevel(logging.ERROR)
+    logger.addHandler(fh)
+
+    logger.info("pid is {}".format(os.getpid()))
+    
+     # # 更新sn列表
+    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qixiang_oss'
+    user='qixiang_oss'
+    password='Qixiang2021'
+    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    cursor = conn.cursor()
+    cursor.execute("select sn, imei, add_time from app_device where status in (1,2,3)")
+    res = cursor.fetchall()
+    df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    df_sn = df_sn.reset_index(drop=True)
+    conn.close();
+    
+    SNnums = list(df_sn['sn'])
+    
+    scaler_list=[]
+    model_list=[]
+    for group in ['PK504','MGMCL','PK500']:
+        scaler=pickle.load(open('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/ThermoRunaway/V1_0_2/train_out/scaler_'+group+'_05.pkl', 'rb'))
+        model=load_model('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/ThermoRunaway/V1_0_2/train_out/model_'+group+'_05.h5')
+        scaler_list.append(scaler)
+        model_list.append(model)
+    scaler_dict={'PK504':scaler_list[0],'MGMCL':scaler_list[1],'PK500':scaler_list[2]}
+    model_dict={'PK504':model_list[0],'MGMCL':model_list[1],'PK500':model_list[2]}
+
+    diag_cal()
+    #定时任务.......................................................................................................................................................................
+    scheduler = BlockingScheduler()
+    scheduler.add_job(diag_cal, 'interval', hours=6)
+
+    try:  
+        scheduler.start()
+    except Exception as e:
+        scheduler.shutdown()
+        logger.error(str(e))
+        logger.error(traceback.format_exc())

+ 3 - 0
LIB/FRONTEND/ThermoRunaway/run.bat

@@ -0,0 +1,3 @@
+cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\ThermoRunaway
+title cal_ThermoRunaway
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\ThermoRunaway\main_pred.py