qingfeng il y a 2 ans
Parent
commit
89b4d2eb79
2 fichiers modifiés avec 0 ajouts et 181 suppressions
  1. 0 178
      LIB/FRONTEND/FaultDetection/main_pred.py
  2. 0 3
      LIB/FRONTEND/FaultDetection/run.bat

+ 0 - 178
LIB/FRONTEND/FaultDetection/main_pred.py

@@ -1,178 +0,0 @@
-
-from LIB.MIDDLE.FaultDetection.V1_0_2.aelstm import *
-import pymysql
-import datetime
-import pandas as pd
-from LIB.BACKEND import DBManager
-dbManager = DBManager.DBManager()
-from sqlalchemy import create_engine
-from urllib import parse
-import datetime, time
-from apscheduler.schedulers.blocking import BlockingScheduler
-import traceback
-import pickle
-from keras.models import load_model
-import logging
-import logging.handlers
-import os
-import re
-
-
-#...................................故障检测函数......................................................................................................................
-def diag_cal():
-    global SNnums
-    global scaler_dict, scaler2_dict, model_dict, model2_dict
-
-    start=time.time()
-    now_time=datetime.datetime.now()
-    start_time=now_time-datetime.timedelta(hours=3)
-    start_time=start_time.strftime('%Y-%m-%d %H:%M:%S')
-    end_time=now_time.strftime('%Y-%m-%d %H:%M:%S')
-
-    #数据库配置
-    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
-    port=3306
-    db='safety_platform'
-    user='qx_read'
-    password='Qx@123456'
-
-    #读取结果库数据......................................................
-    param='product_id,start_time,end_time,diff_min,SOC,AnoScoreV_sum_max,AnoScoreV_max_max,AnoScoreT_sum_max,AnoScoreT_max_max'
-    tablename='fault_detection'
-    mysql = pymysql.connect (host=host, user=user, password=password, port=port, database=db)
-    cursor = mysql.cursor()
-    sql =  "select {} from {} where end_time='0000-00-00 00:00:00'".format(param,tablename)
-    cursor.execute(sql)
-    res = cursor.fetchall()
-    df_diag_ram= pd.DataFrame(res,columns=param.split(','))
-    
-
-    db_res_engine = create_engine(
-        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
-            user, parse.quote_plus(password), host, port, db
-        ))
-
-
-    #调用主函数................................................................................................................................................................
-    for sn in SNnums:
-        try:
-            group=sn[:5]
-            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
-            data_bms = df_data['bms']
-            data_bms['sn']=sn
-            if len(data_bms)>0:
-                logger.info("SN: {} 数据开始预处理".format(sn))
-                data_stand=data_groups(data_bms,sn,start_time,end_time)
-                df_stand=split(data_stand)   
-                res=pd.DataFrame()
-                if len(df_stand)>0:
-                    #读取训练产出的缩放指标:均值&方差
-                    logger.info("SN: {} 数据开始模型预测".format(sn))
-                    scaler = scaler_dict[group]
-                    scaler2 = scaler2_dict[group]
-                    #读取训练产出的模型状态空间:电压模型&温度模型
-                    model = model_dict[group]
-                    model2 = model2_dict[group]
-                    res=prediction(df_stand,scaler,scaler2,model,model2)
-                    if len(res)>0:
-                        df_res2,diff=threshold(res,group,end_time)
-                        df_diag_ram_sn=df_diag_ram[df_diag_ram['product_id']==sn]
-                        if not df_diag_ram_sn.empty:   #该sn相关结果非空
-                            new_res,update_res=arrange(df_res2,df_diag_ram_sn,start_time,diff)
-                            if len(update_res)>0:
-                                cursor.execute("DELETE FROM fault_detection WHERE end_time = '0000-00-00 00:00:00' and product_id='{}'".format(sn))
-                                mysql.commit()
-                                update_res.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
-                            #新增结果存入结果库................................................................
-                            if len(new_res)>0:
-                                new_res.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
-                        else:
-                            df_res2.to_sql("fault_detection",con=db_res_engine, if_exists="append",index=False)
-
-            # end=time.time()
-            # print(end-start)  
-                
-        except Exception as e:
-            logger.error(str(e))
-            logger.error(traceback.format_exc())
-
-    cursor.close()
-    mysql.close()
-
-#...............................................主函数起定时作用.......................................................................................................................
-if __name__ == "__main__":
-    
-    # 日志
-    log_path = 'log/'
-    if not os.path.exists(log_path):
-        os.makedirs(log_path)
-    logger = logging.getLogger("main")
-    logger.setLevel(logging.DEBUG)
-    
-     # 根据日期滚动(每天产生1个文件)
-    fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_info.log'.format(log_path), when="D", interval=1, backupCount=30,
-                                                    encoding="utf-8")
-    formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
-    fh.suffix = "%Y-%m-%d_%H-%M-%S"
-    fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
-    fh.setFormatter(formatter)
-    fh.setLevel(logging.INFO)
-    logger.addHandler(fh)
-
-    fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_error.log'.format(log_path), when="D", interval=1, backupCount=30,
-                                                    encoding="utf-8")
-    formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
-    fh.suffix = "%Y-%m-%d_%H-%M-%S"
-    fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
-    fh.setFormatter(formatter)
-    fh.setLevel(logging.ERROR)
-    logger.addHandler(fh)
-
-    logger.info("pid is {}".format(os.getpid()))
-    
-     # # 更新sn列表
-    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
-    port=3306
-    db='qixiang_oss'
-    user='qixiang_oss'
-    password='Qixiang2021'
-    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
-    cursor = conn.cursor()
-    cursor.execute("select sn, imei, add_time from app_device where status in (1,2,3)")
-    res = cursor.fetchall()
-    df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
-    df_sn = df_sn.reset_index(drop=True)
-    conn.close();
-    
-    SNnums = list(df_sn['sn'])
-    
-    scaler_list=[]
-    scaler2_list=[]
-    model_list=[]
-    model2_list=[]
-    for group in ['MGMLX','PK504','PK502','PK500','MGMCL']:
-        scaler = pickle.load(open('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerV_'+group+'_10.pkl', 'rb'))
-        scaler2 = pickle.load(open('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/scalerT_'+group+'_10.pkl', 'rb'))
-        model = load_model('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelV_'+group+'_10.h5')
-        model2 = load_model('LIB/MIDDLE/FaultDetection/V1_0_2/train_out/modelT_'+group+'_10.h5')
-        scaler_list.append(scaler)
-        scaler2_list.append(scaler2)
-        model_list.append(model)
-        model2_list.append(model2)
-    scaler_dict={'MGMLX':scaler_list[0],'PK504':scaler_list[1],'PK502':scaler_list[2],'PK500':scaler_list[3],'MGMCL':scaler_list[4]}
-    scaler2_dict={'MGMLX':scaler2_list[0],'PK504':scaler2_list[1],'PK502':scaler2_list[2],'PK500':scaler2_list[3],'MGMCL':scaler2_list[4]}
-    model_dict={'MGMLX':model_list[0],'PK504':model_list[1],'PK502':model_list[2],'PK500':model_list[3],'MGMCL':model_list[4]}
-    model2_dict={'MGMLX':model2_list[0],'PK504':model2_list[1],'PK502':model2_list[2],'PK500':model2_list[3],'MGMCL':model2_list[4]}
-    logger.info("模型加载完成")
-
-    diag_cal()
-    #定时任务.......................................................................................................................................................................
-    scheduler = BlockingScheduler()
-    scheduler.add_job(diag_cal, 'interval', hours=3)
-
-    try:  
-        scheduler.start()
-    except Exception as e:
-        scheduler.shutdown()
-        logger.error(str(e))
-        logger.error(traceback.format_exc())

+ 0 - 3
LIB/FRONTEND/FaultDetection/run.bat

@@ -1,3 +0,0 @@
-cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\FaultDetection
-title cal_FaultDetection
-D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\FaultDetection\main_pred.py