zhuxi 2 роки тому
батько
коміт
698075d868
1 змінених файлів з 93 додано та 37 видалено
  1. 93 37
      LIB/MIDDLE/FaultDetection/V1_0_2/main_pred.py

+ 93 - 37
LIB/MIDDLE/FaultDetection/V1_0_2/main_pred.py

@@ -1,5 +1,4 @@
-
-from LIB.MIDDLE.FaultDetection.V1_0_1.CoreAlgo.aelstm import *
+from LIB.MIDDLE.FaultDetection.V1_0_2.aelstm import *
 import pymysql
 import datetime
 import pandas as pd
@@ -9,10 +8,13 @@ from sqlalchemy import create_engine
 from urllib import parse
 import datetime, time
 from apscheduler.schedulers.blocking import BlockingScheduler
-from LIB.MIDDLE.CellStateEstimation.Common import log
 import traceback
 import pickle
 from keras.models import load_model
+import logging
+import logging.handlers
+import os
+import re
 
 
 #...................................故障检测函数......................................................................................................................
@@ -33,11 +35,11 @@ def diag_cal():
     password='Qx@123456'
 
     #读取结果库数据......................................................
-    param='product_id,start_time,end_time,diff_min,SOC[%],AnoScoreV_sum_max,AnoScoreV_max_max,AnoScoreT_sum_max,AnoScoreT_max_max'
+    param='product_id,start_time,end_time,diff_min,SOC,AnoScoreV_sum_max,AnoScoreV_max_max,AnoScoreT_sum_max,AnoScoreT_max_max'
     tablename='fault_detection'
     mysql = pymysql.connect (host=host, user=user, password=password, port=port, database=db)
     cursor = mysql.cursor()
-    sql =  "select %s from %s where time_end='0000-00-00 00:00:00'" %(param,tablename)
+    sql =  "select {} from {} where end_time='0000-00-00 00:00:00'".format(param,tablename)
     cursor.execute(sql)
     res = cursor.fetchall()
     df_diag_ram= pd.DataFrame(res,columns=param.split(','))
@@ -47,10 +49,25 @@ def diag_cal():
         "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
             user, parse.quote_plus(password), host, port, db
         ))
-
-    mylog=log.Mylog('log_info_charge.txt','error')
-    mylog.logcfg()
-
+    
+    scaler_list=[]
+    scaler2_list=[]
+    model_list=[]
+    model2_list=[]
+    for group in ['MGMLX','PK504','PK502','PK500','MGMCL']:
+        scaler = pickle.load(open('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerV_'+group+'_10.pkl', 'rb'))
+        scaler2 = pickle.load(open('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerT_'+group+'_10.pkl', 'rb'))
+        model = load_model('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelV_'+group+'_10.h5')
+        model2 = load_model('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelT_'+group+'_10.h5')
+        scaler_list.append(scaler)
+        scaler2_list.append(scaler2)
+        model_list.append(model)
+        model2_list.append(model2)
+    scaler_dict={'MGMLX':scaler_list[0],'PK504':scaler_list[1],'PK502':scaler_list[2],'PK500':scaler_list[3],'MGMCL':scaler_list[4]}
+    scaler2_dict={'MGMLX':scaler2_list[0],'PK504':scaler2_list[1],'PK502':scaler2_list[2],'PK500':scaler2_list[3],'MGMCL':scaler2_list[4]}
+    model_dict={'MGMLX':model_list[0],'PK504':model_list[1],'PK502':model_list[2],'PK500':model_list[3],'MGMCL':model_list[4]}
+    model2_dict={'MGMLX':model2_list[0],'PK504':model2_list[1],'PK502':model2_list[2],'PK500':model2_list[3],'MGMCL':model2_list[4]}
+    
 
     #调用主函数................................................................................................................................................................
     for sn in SNnums:
@@ -60,38 +77,40 @@ def diag_cal():
             data_bms = df_data['bms']
             data_bms['sn']=sn
             if len(data_bms)>0:
+                logger.info("SN: {} 数据开始预处理".format(sn))
                 data_stand=data_groups(data_bms,sn,start_time,end_time)
                 df_stand=split(data_stand)   
                 res=pd.DataFrame()
                 if len(df_stand)>0:
                     #读取训练产出的缩放指标:均值&方差
-                    scaler = pickle.load(open('D:/Develop/User/Zhuxi/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerV_'+group+'_10.pkl', 'rb'))
-                    scaler2 = pickle.load(open('D:/Develop/User/Zhuxi/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerT_'+group+'_10.pkl', 'rb'))
+                    logger.info("SN: {} 数据开始模型预测".format(sn))
+                    scaler = scaler_dict[group]
+                    scaler2 = scaler2_dict[group]
                     #读取训练产出的模型状态空间:电压模型&温度模型
-                    model = load_model('D:/Develop/User/Zhuxi/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelV_'+group+'_10.h5')
-                    model2 = load_model('D:/Develop/User/Zhuxi/data_analyze_platform/LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelT_'+group+'_10.h5')
+                    model = model_dict[group]
+                    model2 = model2_dict[group]
                     res=prediction(df_stand,scaler,scaler2,model,model2)
                     if len(res)>0:
                         df_res2,diff=threshold(res,group,end_time)
-                        df_diag_ram_sn=pd.Series()
-                        if not df_diag_ram.empty:   #结果非空
-                            df_diag_ram_sn=df_diag_ram[df_diag_ram['sn']==sn]
-                            if not df_diag_ram_sn.empty:   #该sn相关结果非空
-                                new_res,update_res=arrange(df_res2,df_diag_ram_sn,start_time,diff)
-                                if len(update_res)>0:
-                                    cursor.execute("DELETE FROM fault_detection WHERE time_end = '0000-00-00 00:00:00' and sn='{}'".format(sn))
-                                    mysql.commit()
-                                    update_res.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
-                                #新增结果存入结果库................................................................
-                                if len(new_res)>0:
-                                    new_res.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
+                        df_diag_ram_sn=df_diag_ram[df_diag_ram['product_id']==sn]
+                        if not df_diag_ram_sn.empty:   #该sn相关结果非空
+                            new_res,update_res=arrange(df_res2,df_diag_ram_sn,start_time,diff)
+                            if len(update_res)>0:
+                                cursor.execute("DELETE FROM fault_detection WHERE end_time = '0000-00-00 00:00:00' and product_id='{}'".format(sn))
+                                mysql.commit()
+                                update_res.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
+                            #新增结果存入结果库................................................................
+                            if len(new_res)>0:
+                                new_res.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
+                        else:
+                            df_res2.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
 
-            end=time.time()
-            print(end-start)  
+            # end=time.time()
+            # print(end-start)  
                 
         except Exception as e:
-            print(repr(e))
-            mylog.logopt(e)
+            logger.error(str(e))
+            logger.error(traceback.format_exc())
 
     cursor.close()
     mysql.close()
@@ -99,13 +118,50 @@ def diag_cal():
 #...............................................主函数起定时作用.......................................................................................................................
 if __name__ == "__main__":
     
-    #excelpath=r'D:\Platform\platform_python\data_analyze_platform\USER\spf\01qixiang\sn-20210903.xlsx'
-    excelpath='sn-20210903.xlsx'
-    dataSOH = pd.read_excel('sn-20210903.xlsx',sheet_name='sn-20210903')
-    SNnums = list(dataSOH['sn'])
+    # 日志
+    log_path = 'log/'
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    logger = logging.getLogger("main")
+    logger.setLevel(logging.DEBUG)
+    
+     # 根据日期滚动(每天产生1个文件)
+    fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_info.log'.format(log_path), when="D", interval=1, backupCount=30,
+                                                    encoding="utf-8")
+    formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
+    fh.suffix = "%Y-%m-%d_%H-%M-%S"
+    fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
+    fh.setFormatter(formatter)
+    fh.setLevel(logging.INFO)
+    logger.addHandler(fh)
+
+    fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_error.log'.format(log_path), when="D", interval=1, backupCount=30,
+                                                    encoding="utf-8")
+    formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
+    fh.suffix = "%Y-%m-%d_%H-%M-%S"
+    fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
+    fh.setFormatter(formatter)
+    fh.setLevel(logging.ERROR)
+    logger.addHandler(fh)
+
+    logger.info("pid is {}".format(os.getpid()))
+    
+     # # 更新sn列表
+    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qixiang_oss'
+    user='qixiang_oss'
+    password='Qixiang2021'
+    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    cursor = conn.cursor()
+    cursor.execute("select sn, imei, add_time from app_device where status in (1,2,3)")
+    res = cursor.fetchall()
+    df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    df_sn = df_sn.reset_index(drop=True)
+    conn.close();
+    
+    SNnums = list(df_sn['sn'])
     
-    mylog=log.Mylog('log_info_charge.txt','error')
-    mylog.logcfg()
 
     diag_cal()
     #定时任务.......................................................................................................................................................................
@@ -116,5 +172,5 @@ if __name__ == "__main__":
         scheduler.start()
     except Exception as e:
         scheduler.shutdown()
-        print(repr(e))
-        mylog.logopt(e)
+        logger.error(str(e))
+        logger.error(traceback.format_exc())