from datetime import datetime from multiprocessing import Pool import json import os import time import traceback import warnings from sqlalchemy import text, delete, and_, or_, update import pandas as pd from ZlwlAlgosCommon.utils.ProUtils import * from ZlwlAlgosCommon.service.iotp.IotpAlgoService import IotpAlgoService from ZlwlAlgosCommon.service.iotp.Beans import DataField from ZlwlAlgosCommon.orm.models import * from safetyalarm.V_1_0_0.CBMSSafetyAlarm import SafetyAlarm from ChargeRemainder.V_1_0_0.AlgoChargeRemainder import ChargeRemainder def main(process_num): # 程序不能停止 while(True): try: warnings.filterwarnings("ignore") try: # 调用算法前的准备工作 cleanUtils = CleanUtils() mysql_algo_conn = None mysql_algo_engine = None mysql_iotp_conn = None mysql_iotp_engine= None kafka_consumer = None rc= None kafka_topic_key = 'topic_test_sxq' kafka_groupid_key = 'group_id_test_sxq' algo_list = ['safety_alarm', 'charge_remainder'] # 本调度所包含的算法名列表。 loggers = sysUtils.get_loggers(algo_list, log_base_path, process_num) # 为每个算法分配一个logger logger_main.info(f"process-{process_num}: 配置中间件") # mysql mysql_algo_params = sysUtils.get_cf_param('mysql-algo') mysqlUtils = MysqlUtils() mysql_algo_engine, mysql_algo_Session= mysqlUtils.get_mysql_engine(mysql_algo_params) mysql_algo_conn = mysql_algo_engine.connect() # kafka kafka_params = sysUtils.get_cf_param('kafka') kafkaUtils = KafkaUtils() kafka_consumer = kafkaUtils.get_kafka_consumer(kafka_params, kafka_topic_key, kafka_groupid_key, client_id=kafka_topic_key) #Hbase hbase_params = sysUtils.get_cf_param('hbase') iotp_service = IotpAlgoService(hbase_params=hbase_params) #redis redis_params = sysUtils.get_cf_param('redis') reidsUtils = RedisUtils() rc = reidsUtils.get_redis_conncect(redis_params) except Exception as e: logger_main.error(str(e)) logger_main.error(traceback.format_exc()) cleanUtils.clean(mysql_algo_conn, mysql_algo_engine, mysql_iotp_conn, mysql_iotp_engine, kafka_consumer, rc) # 开始准备调度 logger_main.info(f"process-{process_num}: 监听topic {kafka_params[kafka_topic_key]}等待kafka 调度") for message in kafka_consumer: try: logger_main.info(f'收到调度') if not mysql_algo_conn.closed: mysql_algo_conn.close() mysql_algo_conn = mysql_algo_engine.connect() # 从连接池中获取一个myslq连接 schedule_params = json.loads(message.value) if (schedule_params is None) or (schedule_params ==''): logger_main.info('{} kafka数据异常,跳过本次运算'.format(str(message.value))) continue # kafka 调度参数解析 df_snlist = pd.DataFrame(schedule_params['snlist']) sn_list=df_snlist['sn'].tolist() pack_code = schedule_params['pack_code'] # df_algo_adjustable_param = pd.DataFrame([(d['algo_id'], d['param'],d['param_ai']) for d in schedule_params['adjustable_param']], columns=['algo_id', 'param','param_ai']) df_algo_pack_param = json.loads(schedule_params['pack_param'][0]['param']) df_algo_list = pd.DataFrame(schedule_params['algo_list']) start_time = schedule_params['start_time'] end_time = schedule_params['end_time'] # cell_type = schedule_params['cell_type'] # 取数 time_st = time.time() logger_main.info(f"process-{process_num}: 开始取数,{start_time} ~ {end_time}\n{str(sn_list)}") columns = [ DataField.time, DataField.sn, DataField.pack_crnt, DataField.pack_volt, DataField.pack_soc, DataField.cell_voltage_count, DataField.cell_temp_count, DataField.cell_voltage, DataField.cell_temp, DataField.other_temp_value, DataField.bms_sta] df_data = iotp_service.get_data(sn_list=sn_list, columns=columns, start_time=start_time, end_time=end_time) logger_main.info(f'process-{process_num},获取到{len(df_data)}条数据,取数耗时:{time.time()-time_st}') except Exception as e: logger_main.error(f"process-{process_num}-{pack_code}:获取原始数据出错{sn_list}") logger_main.error(f"process-{process_num}:{e}") logger_main.error(f"process-{process_num}:{traceback.format_exc()}") continue # 数据清洗 try: time_st = time.time() logger_main.info(f'process-{process_num}数据清洗') if len(df_data) == 0: logger_main.info(f"process-{process_num}: 无有效数据,跳过本次运算") continue df_data, df_table, cellvolt_name, celltemp_name = iotp_service.datacleaning(df_algo_pack_param,df_data)#进行数据清洗 if len(df_data) == 0: logger_main.info(f"process-{process_num}: 数据清洗耗时{time.time()-time_st}, 无有效数据,跳过本次运算") continue else: logger_main.info(f"process-{process_num}: {pack_code}, time_type:{df_data.loc[0, 'time']} ~ {df_data.iloc[-1]['time']}, 数据清洗完成耗时{time.time()-time_st}") except Exception as e: logger_main.error(f"process-{process_num}:数据清洗出错{sn_list}") logger_main.error(f"process-{process_num}:{e}") logger_main.error(f"process-{process_num}:{traceback.format_exc()}") continue # 算法缓存数据获取 try: time_st = time.time() logger_main.info(f'process-{process_num}开始读取redis') df_bms_ram = pd.DataFrame(columns=['time', 'sn', 'packvolt', 'cellvolt', 'celltemp', 'packsoc','packcrnt']) df_alarm_ram = pd.DataFrame(columns=['time','sn','safetywarning1','safetywarning2']) for sn in sn_list: redis_ram_data = rc.get("Algo:FaultDiag:SafetyAlarm:df_bms_ram:{}".format(sn)) if pd.isnull(redis_ram_data): pass else: df_bms_ram1 = pd.read_json(redis_ram_data) df_bms_ram = pd.concat([df_bms_ram, df_bms_ram1]) redis_ram_data = rc.get("Algo:FaultDiag:SafetyAlarm:df_alarm_ram:{}".format(sn)) if pd.isnull(redis_ram_data): pass else: df_alarm_ram1 = pd.read_json(redis_ram_data) df_alarm_ram = pd.concat([df_alarm_ram, df_alarm_ram1]) logger_main.info(f'process-{process_num}读取redis耗时{time.time()-time_st}') except Exception as e: logger_main.error(f"process-{process_num}:读取redis出错{sn_list}") logger_main.error(f"process-{process_num}:{e}") logger_main.error(f"process-{process_num}:{traceback.format_exc()}") continue # mysql数据读取 try: time_st = time.time() logger_main.info(f'process-{process_num}开始读取mysql故障数据') sql = "select start_time,end_time,sn,imei,model,fault_level,fault_code,fault_info,fault_reason,fault_advice,fault_location,device_status,odo \ from algo_all_fault_info_ing where fault_code='{}' or fault_code='{}'".format('C599','C590') df_diag_ram = pd.read_sql(sql, mysql_algo_conn) logger_main.info(f'process-{process_num}读取mysql耗时{time.time()-time_st}') except Exception as e: logger_main.error(f"process-{process_num}:读取redis出错{sn_list}") logger_main.error(f"process-{process_num}:{e}") logger_main.error(f"process-{process_num}:{traceback.format_exc()}") continue # 算法调用 try: time_st = time.time() loggers['safety_alarm'].info(f'开始执行算法{pack_code}') safety_alarm = SafetyAlarm(df_data, df_diag_ram, df_bms_ram, df_alarm_ram, df_algo_list, df_table, cellvolt_name, celltemp_name, pack_code, df_snlist) df_res_new, df_res_end, df_bms_ram, df_alarm_ram = safety_alarm.safety_alarm_diag() loggers['safety_alarm'].info(f'算法运行完成{pack_code},算法耗时{time.time()-time_st}') except Exception as e: loggers['safety_alarm'].error(f'算法运行出错{sn_list}') loggers['safety_alarm'].error(str(e)) loggers['safety_alarm'].error(traceback.format_exc()) df_res_new=pd.DataFrame() df_res_end=pd.DataFrame() df_bms_ram=pd.DataFrame() df_alarm_ram=pd.DataFrame() try: #充电剩余时间预测,只计算重卡 if pack_code in ('JX18020', 'JX19220'): time_st = time.time() df_chargeremainder = df_data[['sn', 'pack_soc', 'pack_crnt', 'cell_temp_max', 'cell_temp_min', 'cell_volt_max', 'cell_volt_min', 'time']] chargeremainder = ChargeRemainder(df_chargeremainder, pack_code) df_results = chargeremainder.process() df_results.insert(loc=0, column='pack', value=pack_code) df_results.to_sql("algo_charge_remainder", con=mysql_algo_conn, if_exists="append", index=False) loggers['charge_remainder'].info(f'算法运行完成{pack_code},算法耗时{time.time()-time_st}') except Exception as e: loggers['charge_remainder'].error(f'算法运行出错{sn_list}') loggers['charge_remainder'].error(str(e)) loggers['charge_remainder'].error(traceback.format_exc()) #更新redis try: time_st = time.time() logger_main.info(f'process-{process_num}开始更新redis数据') df_bms_ram.groupby('sn').apply(lambda x : rc.set("Algo:FaultDiag:SafetyAlarm:df_bms_ram:{}".format(x['sn'].values[0]), json.dumps(x.to_dict()), ex=24*3600)) df_alarm_ram.groupby('sn').apply(lambda x : rc.set("Algo:FaultDiag:SafetyAlarm:df_alarm_ram:{}".format(x['sn'].values[0]), json.dumps(x.to_dict()), ex=24*3600)) logger_main.info(f'process-{process_num}更新redis数据耗时{time.time()-time_st}') except Exception as e: logger_main.error(f"process-{process_num}:更新redis出错{sn_list}") logger_main.error(f"process-{process_num}:{e}") logger_main.error(f"process-{process_num}:{traceback.format_exc()}") #结果写入mysql try: session = mysql_algo_Session() if not df_res_end.empty: df_res_end=df_res_end.where(pd.notnull(df_res_end),None) df_res_end=df_res_end.fillna(0) for index in df_res_end.index: df_t = df_res_end.loc[index:index] sql = 'delete from algo_all_fault_info_ing where start_time=:start_time and fault_code=:fault_code and sn=:sn' params = {'start_time': df_t['start_time'].values[0], 'fault_code': df_t['fault_code'].values[0], 'sn': df_t['sn'].values[0]} session.execute(sql, params=params) sql = 'insert into algo_all_fault_info_done (date_info, start_time, end_time, sn, imei, model, fault_level, fault_code, fault_info,\ fault_reason, fault_advice, fault_location, device_status,odo, create_time, create_by,update_time, update_by, is_delete,comment) values \ (:date_info, :start_time, :end_time, :sn, :imei, :model,:fault_level, :fault_code, :fault_info,\ :fault_reason, :fault_advice, :fault_location, :device_status, :odo, :create_time, :create_by, :update_time,:update_by, :is_delete , :comment)' params = {'date_info': datetime.now(), 'start_time': df_t['start_time'].values[0], 'end_time': df_t['end_time'].values[0], 'sn': df_t['sn'].values[0], 'imei': df_t['imei'].values[0], 'model' :pack_code, 'fault_level': df_t['fault_level'].values[0], 'fault_code': df_t['fault_code'].values[0], 'fault_info': df_t['fault_info'].values[0], 'fault_reason': df_t['fault_reason'].values[0], 'fault_advice': df_t['fault_advice'].values[0], 'fault_location': df_t['fault_location'].values[0], 'device_status': df_t['device_status'].values[0], 'odo': df_t['odo'].values[0], 'create_time': datetime.now(), 'create_by': 'algo', 'update_time': datetime.now(), 'update_by': None, 'is_delete': 0, 'comment': None} session.execute(sql, params=params) session.commit() logger_main.info(f'process-{process_num}结束故障入库{pack_code}完成') else: pass if not df_res_new.empty: # 如果该故障未结束且不存在于ing,则写入ing df_res_new['date_info'] = datetime.now() df_res_new['create_time'] = datetime.now() df_res_new['create_by'] = 'algo' df_res_new['update_by'] = None df_res_new['is_delete'] = 0 df_res_new.to_sql("algo_all_fault_info_ing", con=mysql_algo_conn, if_exists="append", index=False) logger_main.info(f'process-{process_num}新增故障入库{pack_code}完成') else: pass except Exception as e: logger_main.error(f"process-{process_num}:结果入库出错{sn_list}") logger_main.error(f"process-{process_num}:{e}") logger_main.error(f"process-{process_num}:{traceback.format_exc()}") finally: df_data = None session.close() except Exception as e: logger_main.error(f'process-{process_num}: {e}') logger_main.error(f'process-{process_num}: {traceback.format_exc()}') cleanUtils.clean(mysql_algo_conn, mysql_algo_engine, mysql_iotp_conn, mysql_iotp_engine, kafka_consumer, rc) if __name__ == '__main__': while(True): try: # 配置量 cur_env = 'dev' # 设置运行环境 app_path = "/home/shouxueqi/projects/zlwl-algos/zlwl-algos/" # 设置app绝对路径 log_base_path = f"{os.path.dirname(os.path.abspath(__file__))}/log" # 设置日志路径 app_name = "task_min_5" # 应用名 sysUtils = SysUtils(cur_env, app_path) logger_main = sysUtils.get_logger(app_name, log_base_path) logger_main.info(f"本次主进程号: {os.getpid()}") # 读取配置文件 (该部分请不要修改) processes = int(sysUtils.env_params.get("PROCESS_NUM_PER_NODE", '1')) # 默认为1个进程 pool = Pool(processes = int(processes)) logger_main.info("开始分配子进程") for i in range(int(processes)): pool.apply_async(main, (i, )) pool.close() logger_main.info("进程分配结束,堵塞主进程") pool.join() except Exception as e: print(str(e)) print(traceback.format_exc()) logger_main.error(str(e)) logger_main.error(traceback.format_exc()) finally: handlers = logger_main.handlers.copy() for h in handlers: logger_main.removeHandler(h) pool.terminate()