main_pred.py 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164
  1. from LIB.MIDDLE.FaultClass.V1_0_0.faultclass import *
  2. import pymysql
  3. import datetime
  4. import pandas as pd
  5. from LIB.BACKEND import DBManager
  6. dbManager = DBManager.DBManager()
  7. from sqlalchemy import create_engine
  8. from urllib import parse
  9. import datetime, time
  10. from apscheduler.schedulers.blocking import BlockingScheduler
  11. import traceback
  12. import pickle
  13. from keras.models import load_model
  14. import logging
  15. import logging.handlers
  16. import os
  17. import re
  18. #...................................故障检测函数......................................................................................................................
  19. def diag_cal():
  20. global SNnums
  21. global scaler1,scaler2,model1,model2,col1,col2,time_steps1,time_steps2
  22. start=time.time()
  23. now_time=datetime.datetime.now()
  24. start_time=now_time-datetime.timedelta(hours=1)
  25. start_time=start_time.strftime('%Y-%m-%d %H:%M:%S')
  26. end_time=now_time.strftime('%Y-%m-%d %H:%M:%S')
  27. #数据库配置
  28. host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
  29. port=3306
  30. db='safety_platform'
  31. user='qx_read'
  32. password='Qx@123456'
  33. #读取结果库数据......................................................
  34. param='product_id,start_time,end_time,diff_min,SOC,AnoScoreV_sum_max,AnoScoreV_max_max,AnoScoreT_sum_max,AnoScoreT_max_max'
  35. tablename='fault_detection'
  36. mysql = pymysql.connect (host=host, user=user, password=password, port=port, database=db)
  37. cursor = mysql.cursor()
  38. sql = "select {} from {} where end_time='0000-00-00 00:00:00'".format(param,tablename)
  39. cursor.execute(sql)
  40. res = cursor.fetchall()
  41. df_diag_ram= pd.DataFrame(res,columns=param.split(','))
  42. db_res_engine = create_engine(
  43. "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
  44. user, parse.quote_plus(password), host, port, db
  45. ))
  46. #调用主函数................................................................................................................................................................
  47. for sn in SNnums:
  48. try:
  49. df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
  50. data_bms = df_data['bms']
  51. data_bms['sn']=sn
  52. if len(data_bms)>0:
  53. logger.info("SN: {} 数据开始预处理".format(sn))
  54. data_fea1=features1(data_bms)
  55. data_fea2=features2(data_bms)
  56. logger.info("SN: {} 数据开始模型预测".format(sn))
  57. df_res1=pred(data_fea1,model1,scaler1,col1,end_time,time_steps1)
  58. df_res2=pred(data_fea2,model2,scaler2,col2,end_time,time_steps2)
  59. df_diag_ram_sn=df_diag_ram[df_diag_ram['product_id']==sn]
  60. res_new1,res_update1=arrange2(df_diag_ram_sn,df_res1,start_time,'B板采样失效')
  61. res_new2,res_update2=arrange2(df_diag_ram_sn,df_res2,start_time,'传感器_电芯NTC漂移')
  62. if len(res_update1)>0:
  63. cursor.execute("DELETE FROM fault_class WHERE end_time = '0000-00-00 00:00:00' and product_id='{}' and fault_class='{}'".format(sn,'B板采样失效'))
  64. mysql.commit()
  65. res_update1.to_sql("fault_class",con=db_res_engine, if_exists="append",index=False)
  66. res_new1.to_sql("fault_class",con=db_res_engine, if_exists="append",index=False)
  67. if len(res_update2)>0:
  68. cursor.execute("DELETE FROM fault_class WHERE end_time = '0000-00-00 00:00:00' and product_id='{}' and fault_class='{}'".format(sn,'传感器_电芯NTC漂移'))
  69. mysql.commit()
  70. res_update2.to_sql("fault_class",con=db_res_engine, if_exists="append",index=False)
  71. res_new2.to_sql("fault_class",con=db_res_engine, if_exists="append",index=False)
  72. #新增结果存入结果库................................................................
  73. # end=time.time()
  74. # print(end-start)
  75. except Exception as e:
  76. logger.error(str(e))
  77. logger.error(traceback.format_exc())
  78. cursor.close()
  79. mysql.close()
  80. #...............................................主函数起定时作用.......................................................................................................................
  81. if __name__ == "__main__":
  82. # 日志
  83. log_path = 'log/'
  84. if not os.path.exists(log_path):
  85. os.makedirs(log_path)
  86. logger = logging.getLogger("main")
  87. logger.setLevel(logging.DEBUG)
  88. # 根据日期滚动(每天产生1个文件)
  89. fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_info.log'.format(log_path), when="D", interval=1, backupCount=30,
  90. encoding="utf-8")
  91. formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
  92. fh.suffix = "%Y-%m-%d_%H-%M-%S"
  93. fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
  94. fh.setFormatter(formatter)
  95. fh.setLevel(logging.INFO)
  96. logger.addHandler(fh)
  97. fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_error.log'.format(log_path), when="D", interval=1, backupCount=30,
  98. encoding="utf-8")
  99. formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
  100. fh.suffix = "%Y-%m-%d_%H-%M-%S"
  101. fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
  102. fh.setFormatter(formatter)
  103. fh.setLevel(logging.ERROR)
  104. logger.addHandler(fh)
  105. logger.info("pid is {}".format(os.getpid()))
  106. # # 更新sn列表
  107. host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
  108. port=3306
  109. db='qixiang_oss'
  110. user='qixiang_oss'
  111. password='Qixiang2021'
  112. conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
  113. cursor = conn.cursor()
  114. cursor.execute("select sn, imei, add_time from app_device where status in (1,2,3)")
  115. res = cursor.fetchall()
  116. df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
  117. df_sn = df_sn.reset_index(drop=True)
  118. conn.close();
  119. SNnums = list(df_sn['sn'])
  120. scaler1=pickle.load(open('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultClass/V1_0_0/models/scaler_B板采样失效.pkl','rb'))
  121. scaler2=pickle.load(open('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultClass/V1_0_0/models/scaler_传感器_电芯NTC漂移.pkl','rb'))
  122. model1=load_model('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultClass/V1_0_0/models/model_B板采样失效.h5')
  123. model2=load_model('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/FaultClass/V1_0_0/models/model_传感器_电芯NTC漂移.h5')
  124. col1=['B板采样失效','正常']
  125. col2=['传感器_电芯NTC漂移','正常']
  126. time_steps1=60
  127. time_steps2=60
  128. logger.info("模型加载完成")
  129. diag_cal()
  130. #定时任务.......................................................................................................................................................................
  131. scheduler = BlockingScheduler()
  132. scheduler.add_job(diag_cal, 'interval', hours=1)
  133. try:
  134. scheduler.start()
  135. except Exception as e:
  136. scheduler.shutdown()
  137. logger.error(str(e))
  138. logger.error(traceback.format_exc())