|
@@ -1,92 +1,58 @@
|
|
-
|
|
|
|
-__author__ = 'lmstack'
|
|
|
|
-#coding=utf-8
|
|
|
|
-import os
|
|
|
|
|
|
+from LIB.MIDDLE.InfoChrgDrive.Charge.V1_0_0.coreV0 import *
|
|
|
|
+import pymysql
|
|
import datetime
|
|
import datetime
|
|
import pandas as pd
|
|
import pandas as pd
|
|
from LIB.BACKEND import DBManager
|
|
from LIB.BACKEND import DBManager
|
|
|
|
+dbManager = DBManager.DBManager()
|
|
from sqlalchemy import create_engine
|
|
from sqlalchemy import create_engine
|
|
-import time, datetime
|
|
|
|
-import traceback
|
|
|
|
-from LIB.MIDDLE.InfoChrgDrive.Charge.V1_0_0.coreV0 import *
|
|
|
|
-
|
|
|
|
from urllib import parse
|
|
from urllib import parse
|
|
-import pymysql
|
|
|
|
|
|
+import datetime, time
|
|
from apscheduler.schedulers.blocking import BlockingScheduler
|
|
from apscheduler.schedulers.blocking import BlockingScheduler
|
|
|
|
+import os
|
|
|
|
+import traceback
|
|
import logging
|
|
import logging
|
|
import logging.handlers
|
|
import logging.handlers
|
|
|
|
|
|
-#...................................电池包电芯安全诊断函数......................................................................................................................
|
|
|
|
-def info_charge():
|
|
|
|
-
|
|
|
|
- # 日志
|
|
|
|
- log_path = 'log/'
|
|
|
|
- if not os.path.exists(log_path):
|
|
|
|
- os.makedirs(log_path)
|
|
|
|
- logger = logging.getLogger("main")
|
|
|
|
- logger.setLevel(logging.DEBUG)
|
|
|
|
-
|
|
|
|
- # 根据日期滚动(每天产生1个文件)
|
|
|
|
- fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_info.log'.format(log_path), when="D", interval=1, backupCount=30,
|
|
|
|
- encoding="utf-8")
|
|
|
|
- formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
|
|
|
|
- fh.suffix = "%Y-%m-%d_%H-%M-%S"
|
|
|
|
- fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
|
|
|
|
- fh.setFormatter(formatter)
|
|
|
|
- fh.setLevel(logging.INFO)
|
|
|
|
- logger.addHandler(fh)
|
|
|
|
|
|
+#...................................充电技术指标统计函数......................................................................................................................
|
|
|
|
+def diag_cal():
|
|
|
|
+ global SNnums
|
|
|
|
+ global kmeans1,kmeans2,kmeans3,kmeans4,kmeans5
|
|
|
|
|
|
- fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_error.log'.format(log_path), when="D", interval=1, backupCount=30,
|
|
|
|
- encoding="utf-8")
|
|
|
|
- formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
|
|
|
|
- fh.suffix = "%Y-%m-%d_%H-%M-%S"
|
|
|
|
- fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
|
|
|
|
- fh.setFormatter(formatter)
|
|
|
|
- fh.setLevel(logging.ERROR)
|
|
|
|
- logger.addHandler(fh)
|
|
|
|
-
|
|
|
|
- logger.info(os.getpid())
|
|
|
|
-
|
|
|
|
- # 更新sn列表
|
|
|
|
- host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
|
|
|
|
|
|
+ start=time.time()
|
|
|
|
+ now_time=datetime.datetime.now()
|
|
|
|
+ start_time=now_time-datetime.timedelta(hours=24)
|
|
|
|
+ start_time=start_time.strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
+ end_time=now_time.strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
+
|
|
|
|
+ #数据库配置
|
|
|
|
+ host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
|
|
port=3306
|
|
port=3306
|
|
- db='qixiang_oss'
|
|
|
|
- user='qixiang_oss'
|
|
|
|
- password='Qixiang2021'
|
|
|
|
- conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
|
|
|
|
- cursor = conn.cursor()
|
|
|
|
- cursor.execute("select sn, imei, add_time from app_device where status in (1,2,3)")
|
|
|
|
|
|
+ db='safety_platform'
|
|
|
|
+ user='qx_algo_rw'
|
|
|
|
+ password='qx@123456'
|
|
|
|
+
|
|
|
|
+ #读取结果库数据......................................................
|
|
|
|
+ param='sn,time_st,time_end,status,delta_time,soc_st,soc_end,volt_st,volt_end,diffvolt_st,diffvolt_end, \
|
|
|
|
+ temp_max,temp_min,temp_incr,temp_mean,temp_st_mean,temp_end_mean,difftem_max,meancrnt,max_meancrnt, \
|
|
|
|
+ sts_flg,full_chrg_flg,ovchrg_flg,ovchrg_prop,gps_lon,gps_lat,standtime_f,standtime_b,city,airtemp_st,airtemp_end,charge_env'
|
|
|
|
+ tablename='algo_charge_info'
|
|
|
|
+ mysql = pymysql.connect (host=host, user=user, password=password, port=port, database=db)
|
|
|
|
+ cursor = mysql.cursor()
|
|
|
|
+ sql = "select %s from %s where time_end='0000-00-00 00:00:00'" %(param,tablename)
|
|
|
|
+ cursor.execute(sql)
|
|
res = cursor.fetchall()
|
|
res = cursor.fetchall()
|
|
- df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
|
|
|
|
- df_sn = df_sn.reset_index(drop=True)
|
|
|
|
- cursor.close()
|
|
|
|
- conn.close()
|
|
|
|
- SNnums = list(df_sn['sn'])
|
|
|
|
-
|
|
|
|
- host2='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
|
|
|
|
- port2=3306
|
|
|
|
- db2='safety_platform'
|
|
|
|
- user2='qx_algo_rw'
|
|
|
|
- password2='qx@123456'
|
|
|
|
|
|
+ df_diag_ram= pd.DataFrame(res,columns=param.split(','))
|
|
|
|
|
|
|
|
+
|
|
db_res_engine = create_engine(
|
|
db_res_engine = create_engine(
|
|
"mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
|
|
"mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
|
|
- user2, parse.quote_plus(password2), host2, port2, db2
|
|
|
|
|
|
+ user, parse.quote_plus(password), host, port, db
|
|
))
|
|
))
|
|
-
|
|
|
|
- conn = pymysql.connect(host=host2, port=port2, user=user2, password=password2, database=db2)
|
|
|
|
- cursor = conn.cursor()
|
|
|
|
-
|
|
|
|
- df_diag_ram=pd.read_sql("select * from algo_charge_info", db_res_engine)
|
|
|
|
-
|
|
|
|
- now_time=datetime.datetime.now()
|
|
|
|
- start_time=now_time-datetime.timedelta(hours=24)
|
|
|
|
- start_time=start_time.strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
- end_time=now_time.strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
-
|
|
|
|
|
|
+
|
|
|
|
+
|
|
for sn in SNnums:
|
|
for sn in SNnums:
|
|
try:
|
|
try:
|
|
- logger.info("pid-{} SN: {} START!".format(os.getpid(), sn))
|
|
|
|
|
|
+
|
|
#读取原始数据库数据........................................................................................................................................................
|
|
#读取原始数据库数据........................................................................................................................................................
|
|
dbManager = DBManager.DBManager()
|
|
dbManager = DBManager.DBManager()
|
|
df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms','gps'])
|
|
df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms','gps'])
|
|
@@ -94,8 +60,9 @@ def info_charge():
|
|
df_gps = df_data['gps']
|
|
df_gps = df_data['gps']
|
|
|
|
|
|
#读取城市天气数据........................................................................................................................................................
|
|
#读取城市天气数据........................................................................................................................................................
|
|
- gpscity=pd.read_csv('gps.csv')
|
|
|
|
|
|
+ gpscity=pd.read_csv('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/InfoChrgDrive/Charge/gps.csv')
|
|
|
|
|
|
|
|
+
|
|
#调用主函数................................................................................................................................................................
|
|
#调用主函数................................................................................................................................................................
|
|
|
|
|
|
if not df_bms.empty: #BMS数据非空
|
|
if not df_bms.empty: #BMS数据非空
|
|
@@ -112,39 +79,88 @@ def info_charge():
|
|
time_end=chrg_last['time_end']
|
|
time_end=chrg_last['time_end']
|
|
|
|
|
|
df_diag_new,df_diag_change=pro_output(df_merge,sn,gpscity,chrg_last)
|
|
df_diag_new,df_diag_change=pro_output(df_merge,sn,gpscity,chrg_last)
|
|
- kmeans1 = joblib.load('kmeans1.pkl')
|
|
|
|
- kmeans2 = joblib.load('kmeans2.pkl')
|
|
|
|
- kmeans3 = joblib.load('kmeans3.pkl')
|
|
|
|
- df_diag_new=prediction(df_diag_new,kmeans1,kmeans2,kmeans3)
|
|
|
|
- df_diag_change=prediction(df_diag_change,kmeans1,kmeans2,kmeans3)
|
|
|
|
|
|
+
|
|
|
|
+ df_diag_new=prediction(df_diag_new,kmeans1,kmeans2,kmeans3,kmeans4,kmeans5)
|
|
|
|
+ df_diag_change=prediction(df_diag_change,kmeans1,kmeans2,kmeans3,kmeans4,kmeans5)
|
|
if not df_diag_change.empty: #需变更的结果非空
|
|
if not df_diag_change.empty: #需变更的结果非空
|
|
cursor.execute("DELETE FROM algo_charge_info WHERE time_end = '{}' and sn='{}'".format(time_end,sn))
|
|
cursor.execute("DELETE FROM algo_charge_info WHERE time_end = '{}' and sn='{}'".format(time_end,sn))
|
|
|
|
+ mysql.commit()
|
|
df_diag_change.to_sql("algo_charge_info",con=db_res_engine, if_exists="append",index=False)
|
|
df_diag_change.to_sql("algo_charge_info",con=db_res_engine, if_exists="append",index=False)
|
|
- logger.info(u"{} 更新成功!!!\n".format(sn), exc_info=True)
|
|
|
|
|
|
|
|
#新增结果存入结果库.....................................................................
|
|
#新增结果存入结果库.....................................................................
|
|
if not df_diag_new.empty: #需新增的结果非空
|
|
if not df_diag_new.empty: #需新增的结果非空
|
|
- df_diag_new.to_sql("algo_charge_info",con=db_res_engine, if_exists="append",index=False)
|
|
|
|
- logger.info(u"{} 写入成功!!!\n".format(sn), exc_info=True)
|
|
|
|
|
|
+ df_diag_new.to_sql("algo_charge_info",con=db_res_engine, if_exists="append",index=False)
|
|
|
|
+
|
|
|
|
+ end=time.time()
|
|
|
|
+ print(end-start)
|
|
|
|
|
|
except Exception as e:
|
|
except Exception as e:
|
|
- logger.error(u"{} :{},{} 任务运行错误\n".format(sn,start_time,end_time), exc_info=True)
|
|
|
|
- logger.error(traceback.format_exc)
|
|
|
|
-
|
|
|
|
- cursor.close()
|
|
|
|
- conn.close()
|
|
|
|
|
|
+ logger.error(str(e))
|
|
|
|
+ logger.error(traceback.format_exc())
|
|
|
|
|
|
|
|
+ cursor.close()
|
|
|
|
+ mysql.close()
|
|
|
|
|
|
|
|
+#...............................................主函数起定时作用.......................................................................................................................
|
|
if __name__ == "__main__":
|
|
if __name__ == "__main__":
|
|
|
|
+ # 日志
|
|
|
|
+ log_path = 'log/'
|
|
|
|
+ if not os.path.exists(log_path):
|
|
|
|
+ os.makedirs(log_path)
|
|
|
|
+ logger = logging.getLogger("main")
|
|
|
|
+ logger.setLevel(logging.DEBUG)
|
|
|
|
+
|
|
|
|
+ # 根据日期滚动(每天产生1个文件)
|
|
|
|
+ fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_info.log'.format(log_path), when="D", interval=1, backupCount=30,
|
|
|
|
+ encoding="utf-8")
|
|
|
|
+ formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
|
|
|
|
+ fh.suffix = "%Y-%m-%d_%H-%M-%S"
|
|
|
|
+ fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
|
|
|
|
+ fh.setFormatter(formatter)
|
|
|
|
+ fh.setLevel(logging.INFO)
|
|
|
|
+ logger.addHandler(fh)
|
|
|
|
|
|
- info_charge()
|
|
|
|
|
|
+ fh = logging.handlers.TimedRotatingFileHandler(filename='{}/main_error.log'.format(log_path), when="D", interval=1, backupCount=30,
|
|
|
|
+ encoding="utf-8")
|
|
|
|
+ formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
|
|
|
|
+ fh.suffix = "%Y-%m-%d_%H-%M-%S"
|
|
|
|
+ fh.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}")
|
|
|
|
+ fh.setFormatter(formatter)
|
|
|
|
+ fh.setLevel(logging.ERROR)
|
|
|
|
+ logger.addHandler(fh)
|
|
|
|
+
|
|
|
|
+ logger.info("pid is {}".format(os.getpid()))
|
|
|
|
+
|
|
|
|
+ # # 更新sn列表
|
|
|
|
+ host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
|
|
|
|
+ port=3306
|
|
|
|
+ db='qixiang_oss'
|
|
|
|
+ user='qixiang_oss'
|
|
|
|
+ password='Qixiang2021'
|
|
|
|
+ conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
|
|
|
|
+ cursor = conn.cursor()
|
|
|
|
+ cursor.execute("select sn, imei, add_time from app_device where status in (1,2,3)")
|
|
|
|
+ res = cursor.fetchall()
|
|
|
|
+ df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
|
|
|
|
+ df_sn = df_sn.reset_index(drop=True)
|
|
|
|
+ conn.close();
|
|
|
|
+
|
|
|
|
+ SNnums = list(df_sn['sn'])
|
|
|
|
+
|
|
|
|
+ kmeans1 = joblib.load('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/InfoChrgDrive/Charge/kmeans1.pkl')
|
|
|
|
+ kmeans2 = joblib.load('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/InfoChrgDrive/Charge/kmeans2.pkl')
|
|
|
|
+ kmeans3 = joblib.load('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/InfoChrgDrive/Charge/kmeans3.pkl')
|
|
|
|
+ kmeans4 = joblib.load('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/InfoChrgDrive/Charge/kmeans4.pkl')
|
|
|
|
+ kmeans5 = joblib.load('D:/deploy/python_platform/data_analyze_platform/LIB/MIDDLE/InfoChrgDrive/Charge/kmeans5.pkl')
|
|
|
|
+
|
|
|
|
+ diag_cal()
|
|
#定时任务.......................................................................................................................................................................
|
|
#定时任务.......................................................................................................................................................................
|
|
scheduler = BlockingScheduler()
|
|
scheduler = BlockingScheduler()
|
|
- scheduler.add_job(info_charge, 'interval', hours=24)
|
|
|
|
|
|
+ scheduler.add_job(diag_cal, 'interval', hours=24)
|
|
|
|
|
|
try:
|
|
try:
|
|
scheduler.start()
|
|
scheduler.start()
|
|
except Exception as e:
|
|
except Exception as e:
|
|
scheduler.shutdown()
|
|
scheduler.shutdown()
|
|
- print(e)
|
|
|
|
-
|
|
|
|
|
|
+ logger.error(str(e))
|
|
|
|
+ logger.error(traceback.format_exc())
|