main.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302
  1. from datetime import datetime
  2. from multiprocessing import Pool
  3. import json
  4. import os
  5. import time
  6. import traceback
  7. import warnings
  8. from sqlalchemy import text, delete, and_, or_, update
  9. import pandas as pd
  10. from ZlwlAlgosCommon.utils.ProUtils import *
  11. from ZlwlAlgosCommon.service.iotp.IotpAlgoService import IotpAlgoService
  12. from ZlwlAlgosCommon.service.iotp.Beans import DataField
  13. from ZlwlAlgosCommon.orm.models import *
  14. from healthscore.V_1_0_0.BatHealthScore import HealthScore
  15. # from sohdiag.V_1_0_0.SOHBatDiag import sohdiag
  16. from OffLineAlarm.V1_0_0 import off_line_warning
  17. def get_battery_info(mysql_iotp_conn):
  18. sql = "select * from ff_battery_status "
  19. t_battery = pd.read_sql(sql, mysql_iotp_conn)
  20. return t_battery
  21. def get_bat_health_info(mysql_algo_conn,sn_list):
  22. if len(sn_list) == 1:
  23. sn_tuple = f"('{sn_list[0]}')"
  24. else:
  25. sn_tuple = tuple(sn_list)
  26. sql = "select * from algo_all_fault_info_ing where sn in {}".format(sn_tuple)
  27. df_diag_ram = pd.read_sql(sql, mysql_algo_conn)
  28. sql = "SELECT * FROM algo_soh where sn in {}".format(sn_tuple)
  29. df_soh = pd.read_sql(sql, mysql_algo_conn)
  30. sql = "SELECT * FROM algo_mid_uniform_result where sn in {}".format(sn_tuple)
  31. df_uniform = pd.read_sql(sql, mysql_algo_conn)
  32. sql = "SELECT * FROM algo_mid_sorout where sn in {}".format(sn_tuple)
  33. df_sor = pd.read_sql(sql, mysql_algo_conn)
  34. return df_diag_ram, df_soh, df_uniform, df_sor
  35. def main(process_num):
  36. # 程序不能停止
  37. while(True):
  38. try:
  39. warnings.filterwarnings("ignore")
  40. try:
  41. # 调用算法前的准备工作
  42. kafka_topic_key = 'topic_task_day_1_1'
  43. kafka_groupid_key = 'group_id_task_day_1_1'
  44. algo_list = ['healthscore', 'sohdiag','offline_diag'] # 本调度所包含的算法名列表。
  45. loggers = sysUtils.get_loggers(algo_list, log_base_path, process_num) # 为每个算法分配一个logger
  46. logger_main.info(f"process-{process_num}: 配置中间件")
  47. # mysql
  48. mysql_algo_params = sysUtils.get_cf_param('mysql-algo')
  49. mysqlUtils = MysqlUtils()
  50. mysql_algo_engine, mysql_algo_Session= mysqlUtils.get_mysql_engine(mysql_algo_params)
  51. mysql_algo_conn = mysql_algo_engine.connect()
  52. mysql_iotp_data = sysUtils.get_cf_param('mysql-iotp')
  53. mysqlUtils = MysqlUtils()
  54. mysql_iotp_engine, mysql_iopt_Session= mysqlUtils.get_mysql_engine(mysql_iotp_data)
  55. mysql_iotp_conn = mysql_iotp_engine.connect()
  56. # kafka
  57. kafka_params = sysUtils.get_cf_param('kafka')
  58. kafkaUtils = KafkaUtils()
  59. kafka_consumer = kafkaUtils.get_kafka_consumer(kafka_params, kafka_topic_key, kafka_groupid_key, client_id=kafka_topic_key)
  60. #Hbase
  61. hbase_params = sysUtils.get_cf_param('hbase')
  62. iotp_service = IotpAlgoService(hbase_params=hbase_params)
  63. #redis
  64. redis_params = sysUtils.get_cf_param('redis')
  65. reidsUtils = RedisUtils()
  66. rc = reidsUtils.get_redis_conncect(redis_params)
  67. except Exception as e:
  68. logger_main.error(f'process-{process_num}: {e}')
  69. logger_main.error(f'process-{process_num}: {traceback.format_exc()}')
  70. # 开始准备调度
  71. logger_main.info(f"process-{process_num}: 监听topic {kafka_params[kafka_topic_key]}等待kafka 调度")
  72. for message in kafka_consumer:
  73. try:
  74. logger_main.info(f'收到调度')
  75. if mysql_algo_conn.closed:
  76. mysql_algo_conn = mysql_algo_engine.connect() # 从连接池中获取一个myslq连接
  77. schedule_params = json.loads(message.value)
  78. if (schedule_params is None) or (schedule_params ==''):
  79. logger_main.info('{} kafka数据异常,跳过本次运算'.format(str(message.value)))
  80. continue
  81. # kafka 调度参数解析
  82. df_snlist = pd.DataFrame(schedule_params['snlist'])
  83. df_algo_adjustable_param = pd.DataFrame([(d['algo_id'], d['param'],d['param_ai']) for d in schedule_params['adjustable_param']], columns=['algo_id', 'param','param_ai'])
  84. df_algo_pack_param = json.loads(schedule_params['pack_param'][0]['param'])
  85. df_algo_pack_param = {k: eval(v) if isinstance(v, str) else v for k, v in df_algo_pack_param.items()}
  86. df_algo_param = pd.DataFrame(schedule_params['algo_list'])
  87. start_time = schedule_params['start_time']
  88. end_time = schedule_params['end_time']
  89. pack_code = schedule_params['pack_code']
  90. cell_type = schedule_params['cell_type']
  91. sn_list=df_snlist['sn'].tolist()
  92. df_res_new=pd.DataFrame()
  93. df_res_update=pd.DataFrame()
  94. df_res_end=pd.DataFrame()
  95. except Exception as e:
  96. logger_main.error(f"process-{process_num}:获取mysql数据库数据出错")
  97. logger_main.error(f"process-{process_num}:{e}")
  98. logger_main.error(f"process-{process_num}:{traceback.format_exc()}")
  99. continue
  100. # mysql数据读取
  101. try:
  102. time_st = time.time()
  103. logger_main.info(f'process-{process_num}开始读取mysql故障数据')
  104. df_diag_ram,df_soh,df_uniform,df_sor=get_bat_health_info(mysql_algo_conn,sn_list)
  105. logger_main.info(f'process-{process_num}读取mysql耗时{time.time()-time_st}')
  106. except Exception as e:
  107. logger_main.error(f"process-{process_num}:读取redis出错")
  108. logger_main.error(f"process-{process_num}:{e}")
  109. logger_main.error(f"process-{process_num}:{traceback.format_exc()}")
  110. continue
  111. # 算法调用
  112. # 健康度评分算法调用
  113. try:
  114. time_st = time.time()
  115. loggers['healthscore'].info('开始执行算法')
  116. healthscole = HealthScore(df_soh, df_uniform, df_sor)
  117. df_res_healthscore = df_snlist['sn'].apply(lambda x : healthscole.health_score(x))
  118. df_res_healthscore = pd.concat(df_res_healthscore.tolist(), axis=0)
  119. loggers['healthscore'].info(f'算法运行完成,算法耗时{time.time()-time_st}')
  120. except Exception as e:
  121. loggers['healthscore'].error('算法运行出错')
  122. loggers['healthscore'].error(str(e))
  123. loggers['healthscore'].error(traceback.format_exc())
  124. df_res_healthscore=pd.DataFrame()
  125. # SOH诊断算法调用
  126. # try:
  127. # time_st = time.time()
  128. # loggers['sohdiag'].info('开始执行算法')
  129. # df_res_new_soh, df_res_end_soh = sohdiag(df_soh, df_diag_ram, df_sn_process, df_adjustable_param)
  130. # loggers['sohdiag'].info(f'算法运行完成,算法耗时{time.time()-time_st}')
  131. # except Exception as e:
  132. # loggers['sohdiag'].error('算法运行出错')
  133. # loggers['sohdiag'].error(str(e))
  134. # loggers['sohdiag'].error(traceback.format_exc())
  135. # 离线诊断算法调用
  136. try:
  137. time_st = time.time()
  138. loggers['offline_diag'].info('开始执行算法')
  139. t_battery=get_battery_info(mysql_iotp_conn)
  140. offline_diag=off_line_warning.Off_Line_Warning()
  141. df_res_new_ofl,df_res_update_ofl,df_res_end_ofl=offline_diag.diag(t_battery,df_diag_ram,df_algo_adjustable_param,df_snlist,df_algo_param)
  142. loggers['offline_diag'].info(f'算法运行完成,算法耗时{time.time()-time_st}')
  143. except Exception as e:
  144. loggers['offline_diag'].error('算法运行出错')
  145. loggers['offline_diag'].error(str(e))
  146. loggers['offline_diag'].error(traceback.format_exc())
  147. df_res_new_ofl=pd.DataFrame()
  148. df_res_update_ofl=pd.DataFrame()
  149. df_res_end_ofl=pd.DataFrame()
  150. #结果写入mysql
  151. try:
  152. df_res_new =df_res_new_ofl #pd.concat([df_res_new_ofl,df_res_new_soh]) #, res1
  153. df_res_update=df_res_update_ofl#df_res_update_lw_soc#pd.concat([df_res_update_lw_soc,df_res_update_crnt, df_res_update_temp]) #, res1
  154. df_res_end = df_res_end_ofl#pd.concat([df_res_end_ofl,df_res_end_soh]) #, res2
  155. df_res_new.reset_index(drop=True, inplace=True)
  156. df_res_update.reset_index(drop=True, inplace=True)
  157. df_res_end.reset_index(drop=True, inplace=True)
  158. time_st = time.time()
  159. session = mysql_algo_Session()
  160. if not df_res_healthscore.empty:
  161. df_res_healthscore.to_sql("algo_health_score",con=mysql_algo_conn, if_exists="append",index=False)
  162. if not df_res_new.empty:
  163. df_res_new['date_info'] = df_res_new['start_time']
  164. df_res_new['create_time'] = datetime.now()
  165. df_res_new['create_by'] = 'algo'
  166. df_res_new['is_delete'] = 0
  167. df_res_new.to_sql("algo_all_fault_info_ing", con=mysql_algo_conn, if_exists="append", index=False)
  168. logger_main.info(f'process-{process_num}新增未结束故障入库{pack_code}完成')
  169. if not df_res_end.empty:
  170. df_res_end=df_res_end.where(pd.notnull(df_res_end),None)
  171. df_res_end=df_res_end.fillna(0)
  172. for index in df_res_end.index:
  173. df_t = df_res_end.loc[index:index]
  174. sql = 'delete from algo_all_fault_info_ing where start_time=:start_time and fault_code=:fault_code and sn=:sn'
  175. params = {'start_time': df_t['start_time'].values[0],
  176. 'fault_code': df_t['fault_code'].values[0], 'sn': df_t['sn'].values[0]}
  177. session.execute(sql, params=params)
  178. sql = 'insert into algo_all_fault_info_done (date_info, start_time, end_time, sn, imei, model, fault_level, fault_code, fault_info,\
  179. fault_reason, fault_advice, fault_location, device_status,odo, create_time, create_by,update_time, update_by, is_delete,comment) values \
  180. (:date_info, :start_time, :end_time, :sn, :imei, :model,:fault_level, :fault_code, :fault_info,\
  181. :fault_reason, :fault_advice, :fault_location, :device_status, :odo, :create_time, :create_by, :update_time,:update_by, :is_delete , :comment)'
  182. params = {'date_info': datetime.now(),
  183. 'start_time': df_t['start_time'].values[0],
  184. 'end_time': df_t['end_time'].values[0],
  185. 'sn': df_t['sn'].values[0],
  186. 'imei': df_t['imei'].values[0],
  187. 'model' :pack_code,
  188. 'fault_level': df_t['fault_level'].values[0],
  189. 'fault_code': df_t['fault_code'].values[0],
  190. 'fault_info': df_t['fault_info'].values[0],
  191. 'fault_reason': df_t['fault_reason'].values[0],
  192. 'fault_advice': df_t['fault_advice'].values[0],
  193. 'fault_location': df_t['fault_location'].values[0],
  194. 'device_status': df_t['device_status'].values[0],
  195. 'odo': df_t['odo'].values[0],
  196. 'create_time': datetime.now(),
  197. 'create_by': 'algo',
  198. 'update_time': datetime.now(),
  199. 'update_by': None,
  200. 'is_delete': 0,
  201. 'comment': None}
  202. session.execute(sql, params=params)
  203. session.commit()
  204. logger_main.info(f'process-{process_num}结束故障入库{pack_code}完成')
  205. if not df_res_update.empty:
  206. df_res_update=df_res_update.where(pd.notnull(df_res_update),None)
  207. df_res_update=df_res_update.fillna(0)
  208. for index in df_res_update.index:
  209. df_t = df_res_update.loc[index:index]
  210. try:
  211. # 更新数据
  212. with mysql_algo_Session() as session:
  213. session.execute(update(AlgoAllFaultInfoIng).where(
  214. and_((AlgoAllFaultInfoIng.start_time == df_t['start_time'].values[0]),
  215. (AlgoAllFaultInfoIng.fault_code == df_t['fault_code'].values[0]),
  216. (AlgoAllFaultInfoIng.sn == df_t['sn'].values[0]))).
  217. values(fault_level=df_t['fault_level'].values[0],
  218. comment=df_t['comment'].values[0]))
  219. session.commit()
  220. except Exception as e:
  221. logger_main.error(f"process-{process_num}:{pack_code}结果入库出错")
  222. logger_main.error(f"process-{process_num}:{e}")
  223. logger_main.error(f"process-{process_num}:{traceback.format_exc()}")
  224. finally:
  225. session.close()
  226. logger_main.info(f"process-{process_num}: 更新入库完成")
  227. else:
  228. logger_main.info(f"process-{process_num}: 无更新故障")
  229. logger_main.info(f"process-{process_num}: 结果入库耗时:{time.time()-time_st}")
  230. except Exception as e:
  231. logger_main.error(f"process-{process_num}:结果入库出错")
  232. logger_main.error(f"process-{process_num}:{e}")
  233. logger_main.error(f"process-{process_num}:{traceback.format_exc()}")
  234. finally:
  235. pass
  236. except Exception as e:
  237. logger_main.error(f'process-{process_num}: {e}')
  238. logger_main.error(f'process-{process_num}: {traceback.format_exc()}')
  239. if __name__ == '__main__':
  240. while(True):
  241. try:
  242. # 配置量
  243. cur_env = 'dev' # 设置运行环境
  244. app_path = "/home/shouxueqi/projects/zlwl-algos/" # 设置app绝对路径
  245. log_base_path = f"{os.path.dirname(os.path.abspath(__file__))}/log" # 设置日志路径
  246. app_name = "task_day_1_1" # 应用名
  247. sysUtils = SysUtils(cur_env, app_path)
  248. logger_main = sysUtils.get_logger(app_name, log_base_path)
  249. logger_main.info(f"本次主进程号: {os.getpid()}")
  250. # 读取配置文件 (该部分请不要修改)
  251. processes = int(sysUtils.env_params.get("PROCESS_NUM_PER_NODE", '1')) # 默认为1个进程
  252. pool = Pool(processes = int(processes))
  253. logger_main.info("开始分配子进程")
  254. for i in range(int(processes)):
  255. pool.apply_async(main, (i, ))
  256. pool.close()
  257. logger_main.info("进程分配结束,堵塞主进程")
  258. pool.join()
  259. except Exception as e:
  260. print(str(e))
  261. print(traceback.format_exc())
  262. logger_main.error(str(e))
  263. logger_main.error(traceback.format_exc())
  264. finally:
  265. handlers = logger_main.handlers.copy()
  266. for h in handlers:
  267. logger_main.removeHandler(h)
  268. pool.terminate()