lmstack 3 years ago
parent
commit
bba814f7b5

+ 3 - 0
.gitignore

@@ -20,5 +20,8 @@
 *.log.*
 !数据分析平台手册.doc
 *.pyc
+*.csv
+*.xlsx
+*.xls
 
 

+ 34 - 28
LIB/BACKEND/DBManager.py

@@ -124,21 +124,25 @@ class DBManager():
         for i in range(OtherT_Num):
             CellU.append(data['ffBatteryStatus']['otherTempList'][i])
         if mode == 0:
-            data_len = 15
+            data_len = 16
             
-            data_block = np.array([data['info']['obdTime'],data['ffBatteryStatus']['rssi'],data['ffBatteryStatus']['errorLevel'],data['ffBatteryStatus']['errorCode']
-                    ,data['ffBatteryStatus']['current'],data['ffBatteryStatus']['voltageInner'],data['ffBatteryStatus']['voltageOutter'],
-                    data['ffBatteryStatus']['totalOutputState'],data['ffBatteryStatus']['lockedState'],
-                    data['ffBatteryStatus']['chargeState'],data['ffBatteryStatus']['heatState'],data['ffBatteryStatus']['cellVoltageDiff']
-                    ,data['ffBatteryStatus']['soc'],data['ffBatteryStatus']['soh'],data['ffBatteryStatus']['cellVolBalance']]).reshape(1,data_len)
+            # data_block = np.array([data['info']['obdTime'],data['ffBatteryStatus']['rssi'],data['ffBatteryStatus']['errorLevel'],data['ffBatteryStatus']['errorCode']
+            #         ,data['ffBatteryStatus']['current'],data['ffBatteryStatus']['voltageInner'],data['ffBatteryStatus']['voltageOutter'],
+            #         data['ffBatteryStatus']['totalOutputState'],data['ffBatteryStatus']['lockedState'],
+            #         data['ffBatteryStatus']['chargeState'],data['ffBatteryStatus']['heatState'],data['ffBatteryStatus']['cellVoltageDiff']
+            #         ,data['ffBatteryStatus']['soc'],data['ffBatteryStatus']['soh'],data['ffBatteryStatus']['cellVolBalance']]).reshape(1,data_len)
+            data_block = np.array([data['info']['obdTime'],data['ffBatteryStatus'].get('rssi',None),data['ffBatteryStatus'].get('errorLevel', None),data['ffBatteryStatus'].get('errorCode', None), 
+            data['ffBatteryStatus'].get('current',None),data['ffBatteryStatus'].get('voltageInner', None),data['ffBatteryStatus'].get('voltageOutter', None),
+                    data['ffBatteryStatus'].get('totalOutputState', None),data['ffBatteryStatus'].get('lockedState', None),
+                    data['ffBatteryStatus'].get('chargeState', None),data['ffBatteryStatus'].get('heatState', None),data['ffBatteryStatus'].get('cellVoltageDiff', None)
+                    ,data['ffBatteryStatus'].get('soc', None),data['ffBatteryStatus'].get('soh', None),data['ffBatteryStatus'].get('cellVolBalance', None),data['ffBatteryStatus'].get('insResis', None)]).reshape(1,data_len)
         elif mode == 1:
-            data_len = 11
-
-            data_block = np.array([data['info']['obdTime'],data['ffBatteryStatus']['rssi']
-            ,data['ffBatteryStatus'].get('errorLevel'),data['ffBatteryStatus'].get('errorCode'),data['ffBatteryStatus']['switchState']
-            ,data['ffBatteryStatus']['current'],data['ffBatteryStatus']['voltageInner'],data['ffBatteryStatus']['chargeState'],
-            data['ffBatteryStatus']['cellVoltageDiff'],data['ffBatteryStatus']['soc'],data['ffBatteryStatus']['soh']]).reshape(1,data_len)
-
+            data_len = 12
+            
+            data_block = np.array([data['info']['obdTime'],data['ffBatteryStatus'].get('rssi',None)
+            ,data['ffBatteryStatus'].get('errorLevel', None),data['ffBatteryStatus'].get('errorCode', None),data['ffBatteryStatus'].get('switchState', None)
+            ,data['ffBatteryStatus'].get('current',None),data['ffBatteryStatus'].get('voltageInner', None),data['ffBatteryStatus'].get('chargeState', None),
+            data['ffBatteryStatus'].get('cellVoltageDiff', None),data['ffBatteryStatus'].get('soc', None),data['ffBatteryStatus'].get('soh', None),data['ffBatteryStatus'].get('insResis', None)]).reshape(1,data_len)
         data_block = np.append(data_block,CellU)
         data_block = np.append(data_block,CellT)
         data_block = np.append(data_block,OtherT)
@@ -169,10 +173,10 @@ class DBManager():
     def _convert_to_dataframe_system(data, mode=0):
         if mode == 0:
             
-            data_block = np.array([data['info']['obdTime'],data['ffSystemInfo']['heatTargetTemp'], data['ffSystemInfo']['heatTimeout'],
-                                    time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(int(data['ffSystemInfo']['rentalStartTime'])/1000)),
-                                    data['ffSystemInfo']['rentalPeriodDays'],data['ffSystemInfo']['bmsInterval'], 
-                                    data['ffSystemInfo']['gpsInterval']]).reshape(1,7)
+            data_block = np.array([data['info'].get('obdTime', None),data['ffSystemInfo'].get('heatTargetTemp', None), data['ffSystemInfo'].get('heatTimeout',None),
+                                    time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(int(data['ffSystemInfo'].get('rentalStartTime'))/1000)),
+                                    data['ffSystemInfo'].get('rentalPeriodDays',None),data['ffSystemInfo'].get('bmsInterval',None), 
+                                    data['ffSystemInfo'].get('gpsInterval', None)]).reshape(1,7)
             df = pd.DataFrame(
                 columns=['时间戳','加热目标温度', '加热超时','租赁开始时间','租赁天数','bms上传周期','gps上传周期'],data=data_block)
         if mode == 1:
@@ -184,18 +188,18 @@ class DBManager():
     def _convert_to_dataframe_accum(data, mode=0):
         if mode == 0:
             
-            data_block = np.array([data['info']['obdTime'],data['ffBatteryAccum']['SOH_AlgUnexTime'], data['ffBatteryAccum']['CHG_AHaccum'],
-                                    data['ffBatteryAccum']['CHG_PHaccum'], data['ffBatteryAccum']['DSG_AHaccum'],
-                                    data['ffBatteryAccum']['DSG_PHaccum'],data['ffBatteryAccum']['OverTemp_CHG_AHaccum'], 
-                                    data['ffBatteryAccum']['OverTemp_CHG_PHaccum']]).reshape(1,8)
+            data_block = np.array([data['info'].get('obdTime',None),data['ffBatteryAccum'].get('SOH_AlgUnexTime',None), data['ffBatteryAccum'].get('CHG_AHaccum',None),
+                                    data['ffBatteryAccum'].get('CHG_PHaccum',None), data['ffBatteryAccum'].get('DSG_AHaccum',None),
+                                    data['ffBatteryAccum'].get('DSG_PHaccum',None),data['ffBatteryAccum'].get('OverTemp_CHG_AHaccum',None), 
+                                    data['ffBatteryAccum'].get('OverTemp_CHG_PHaccum',None)]).reshape(1,8)
             df = pd.DataFrame(
                 columns=['时间戳','SOH未标定时间', '累计充电电量','累计充电能量','累计放电电量','累计放电能量',
                                                '累计高温充电电量', '累计高温充电能量'],data=data_block)
 
         if mode == 1:
-                data_block = np.array([data['info']['obdTime'], data['ffBatteryAccum']['CHG_AHaccum'],
-                                    data['ffBatteryAccum']['CHG_PHaccum'], data['ffBatteryAccum']['DSG_AHaccum'],
-                                    data['ffBatteryAccum']['DSG_PHaccum'],data['ffBatteryAccum']['totalMileage']]).reshape(1,6)
+                data_block = np.array([data['info'].get('obdTime',None), data['ffBatteryAccum'].get('CHG_AHaccum',None),
+                                    data['ffBatteryAccum'].get('CHG_PHaccum',None), data['ffBatteryAccum'].get('DSG_AHaccum',None),
+                                    data['ffBatteryAccum'].get('DSG_PHaccum',None),data['ffBatteryAccum'].get('totalMileage',None)]).reshape(1,6)
                 df = pd.DataFrame(
                     columns=['时间戳','累计充电电量','累计充电能量','累计放电电量','累计放电能量', '累积里程'],data=data_block)
         return df
@@ -206,9 +210,9 @@ class DBManager():
         if type_name == 'bms':
             if mode == 0:
                 name_const = ['时间戳','GSM信号','故障等级','故障代码','总电流[A]','总电压[V]', '外电压', '总输出状态', '上锁状态', '充电状态','加热状态',
-                              '单体压差', 'SOC[%]','SOH[%]','单体均衡状态']
+                              '单体压差', 'SOC[%]','SOH[%]','单体均衡状态', '绝缘电阻']
             elif mode == 1:
-                name_const = ['时间戳','GSM信号','故障等级', '故障代码','开关状态', '总电流[A]','总电压[V]','充电状态', '单体压差', 'SOC[%]','SOH[%]']
+                name_const = ['时间戳','GSM信号','故障等级', '故障代码','开关状态', '总电流[A]','总电压[V]','充电状态', '单体压差', 'SOC[%]','SOH[%]', '绝缘电阻']
             i=0
             CellUNum = 0
             CellTNum = 0
@@ -222,11 +226,13 @@ class DBManager():
                         data_blocks,CellUNum,CellTNum,OtherTNumm = DBManager._convert_to_dataframe_bms(line, mode)
                         i+=1
                         continue
-                except:
+                except Exception as e:
+                    print(str(e))
                     i = 0
                 try:
                     data_block,CellUNum,CellTNum,OtherTNumm = DBManager._convert_to_dataframe_bms(line, mode)
-                except:
+                except Exception as e:
+                    print(str(e))
                     continue
                 try:
                     data_blocks = np.concatenate((data_blocks,data_block),axis=0)

+ 243 - 0
LIB/FRONTEND/CellStateEstimation/BatSafetyAlarm/deploy.py

@@ -0,0 +1,243 @@
+
+__author__ = 'lmstack'
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import traceback
+from LIB.MIDDLE.CellStateEstimation.Common import log
+from LIB.MIDDLE.CellStateEstimation.BatSafetyAlarm.V1_0_1 import CBMSSafetyAlarm
+
+from LIB.MIDDLE.CellStateEstimation.Common import DBDownload
+from urllib import parse
+import pymysql
+import pdb
+from apscheduler.schedulers.blocking import BlockingScheduler
+import datacompy
+import logging
+import multiprocessing
+#...................................电池包电芯安全诊断函数......................................................................................................................
+def diag_cal(df_sn, df_diag_ram, df_bms_ram, log_name):
+    
+    # 日志
+    logger = logging.getLogger()
+    fh = logging.FileHandler(log_name + ".log", encoding="utf-8",mode="a")
+    formatter = logging.Formatter("%(asctime)s - %(name)s-%(levelname)s %(message)s")
+    fh.setFormatter(formatter)
+    logger.addHandler(fh)
+    logger.setLevel(logging.INFO)
+    logger.info("pid is {}".format(os.getpid()))
+    
+    # 读取结果数据库
+    host2='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port2=3306
+    db2='safety_platform'
+    user2='qx_read'
+    password2='Qx@123456'
+    
+    db_res_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user2, parse.quote_plus(password2), host2, port2, db2
+        ))
+    
+
+    
+    start=time.time()
+    now_time=datetime.datetime.now()
+    start_time=now_time-datetime.timedelta(seconds=70)
+    start_time=start_time.strftime('%Y-%m-%d %H:%M:%S')
+    end_time=now_time.strftime('%Y-%m-%d %H:%M:%S')
+
+    for i in range(0, len(df_sn)):
+        try:
+            if df_sn.loc[i, 'imei'][5:9] == 'N640':
+                celltype=1 #6040三元电芯
+            elif df_sn.loc[i, 'imei'][5:9] == 'N440':
+                celltype=2 #4840三元电芯
+            elif df_sn.loc[i, 'imei'][5:9] == 'L660':
+                celltype=99 # 6060锂电芯
+            elif df_sn.loc[i, 'imei'][3:5] == 'LX' and df_sn.loc[i, 'imei'][5:9] == 'N750':    
+                celltype=3 #力信 50ah三元电芯
+            elif df_sn.loc[i, 'imei'][3:5] == 'CL' and df_sn.loc[i, 'imei'][5:9] == 'N750': 
+                celltype=4 #CATL 50ah三元电芯
+            else:
+                logger.info("pid-{} celltype-{} SN: {} SKIP!".format(os.getpid(), "未知", sn))
+                continue
+            sn = df_sn.loc[i, 'sn']
+            
+            logger.info("pid-{} celltype-{} SN: {} START!".format(os.getpid(), celltype, sn))
+
+            #读取原始数据库数据........................................................................................................................................................
+            dbManager = DBManager.DBManager()
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
+            df_bms = df_data['bms']
+            # print(df_bms)
+
+            #电池诊断................................................................................................................................................................
+            if not df_bms.empty:
+                df_diag_ram_sn=df_diag_ram[df_diag_ram['product_id']==sn]
+                df_bms_ram_sn=df_bms_ram[df_bms_ram['sn']==sn]
+                if df_diag_ram_sn.empty:
+                    SafetyAlarm=CBMSSafetyAlarm.SafetyAlarm(sn,celltype,df_bms, df_bms_ram_sn)
+                    df_diag_res, df_bms_res=SafetyAlarm.diag() 
+
+                    #更新bms的ram数据 和 diag的Ram数据
+                    sn_index=df_bms_ram.loc[df_bms_ram['sn']==sn].index
+                    df_bms_ram=df_bms_ram.drop(index=sn_index)
+                    df_bms_ram=df_bms_ram.append(df_bms_res)
+
+                    sn_index=df_diag_ram.loc[df_diag_ram['product_id']==sn].index
+                    df_diag_ram=df_diag_ram.drop(index=sn_index)
+                    df_diag_ram=df_diag_ram.append(df_diag_res)
+                    df_diag_ram.reset_index(inplace=True,drop=True)     #重置索引
+
+                    #当前热失控故障写入数据库
+                    if not df_diag_res.empty:
+                        df_diag_res.columns = ['start_time', 'end_time', 'product_id', 'code', 'level', 'info', 'advice']
+                        df_diag_res['factory'] = '骑享'
+                        df_diag_res.to_sql("all_fault_info",con=db_res_engine, if_exists="append",index=False)
+
+                    
+                #当前热失控已超过三天变为历史故障并写入数据库,并删除原有数据库中的当前故障和ram中的当前故障
+                elif (now_time-df_bms_ram_sn.iloc[-1]['time']).total_seconds()>3*24*3600:
+                    df_diag_ram=df_diag_ram.drop(df_diag_ram['sn']==sn)    #删除ram中的当前故障
+                    df_bms_ram_sn.iloc[-1]['end_time']=now_time
+                    
+                    try:
+                        conn = pymysql.connect(host=host2, port=port2, user=user2, password=password2, database=db2)
+                        cursor = conn.cursor()
+        
+                        cursor.execute('''
+                                    update all_fault_info set end_time={} where product_id='{}' and code={} and factory='骑享'
+                                    '''.format(now_time, sn, 119))
+                        conn.commit()
+                        conn.close();
+                    except:
+                        logger.error(traceback.format_exc)
+                        logger.error(u"{} :{},{} 任务运行错误\n".format(sn,start_time,end_time), exc_info=True)
+                        conn.close();
+        except:
+            logger.error(traceback.format_exc)
+            logger.error(u"{} :{},{} 任务运行错误\n".format(sn,start_time,end_time), exc_info=True)
+    logger.info("pid-{} Done!".format(os.getpid()))
+
+#...................................................主进程...........................................................................................................
+def mainprocess():
+    global SNnums
+    global df_diag_ram
+    global df_bms_ram
+    global log_path
+    # 更新sn列表
+    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qixiang_oss'
+    user='qixiang_oss'
+    password='Qixiang2021'
+    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    cursor = conn.cursor()
+    cursor.execute("select sn, imei, add_time from app_device")
+    res = cursor.fetchall()
+    df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    df_sn = df_sn.reset_index(drop=True)
+    conn.close();
+    process = 2
+    pool = multiprocessing.Pool(processes = process)
+
+    for i in range(process):
+        sn_list = df_sn[int(len(df_sn)*i/process):int(len(df_sn)*(i+1)/process)]
+        sn_list = sn_list.reset_index(drop=True)
+        log_name = log_path + '/log_' + str(i)
+        pool.apply_async(diag_cal, (sn_list,df_diag_ram,df_bms_ram,log_name))
+
+    pool.close()
+    pool.join()
+if __name__ == "__main__":
+    
+    # 时间设置
+    # now_time = datetime.datetime.now()
+    # pre_time = now_time + dateutil.relativedelta.relativedelta(days=-1)# 前一日
+    # end_time=datetime.datetime.strftime(now_time,"%Y-%m-%d 00:00:00")
+    # start_time=datetime.datetime.strftime(pre_time,"%Y-%m-%d 00:00:00")
+    
+    history_run_flag = False # 历史数据运行标志
+    
+
+    # # 更新sn列表
+    # host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    # port=3306
+    # db='qixiang_oss'
+    # user='qixiang_oss'
+    # password='Qixiang2021'
+    # conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    # cursor = conn.cursor()
+    # cursor.execute("select sn, imei, add_time from app_device")
+    # res = cursor.fetchall()
+    # df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    # df_sn = df_sn.reset_index(drop=True)
+    # conn.close();
+    
+    # 数据库配置
+    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port = 3306
+    user = 'qx_cas'
+    password = parse.quote_plus('Qx@123456')
+    database = 'qx_cas'
+
+    db_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, database
+        ))
+    DbSession = sessionmaker(bind=db_engine)
+    
+    # 运行历史数据配置
+    
+    df_first_data_time = pd.read_sql("select * from bat_first_data_time", db_engine)
+
+    
+    # 日志配置
+    now_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()).replace(":","_")
+    log_path = 'log/' + now_str
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    log = Log.Mylog(log_name='batsafetyAlarm', log_level = 'info')
+    log.set_file_hl(file_name='{}/info.log'.format(log_path), log_level='info', size=1024* 1024 * 100)
+    log.set_file_hl(file_name='{}/error.log'.format(log_path), log_level='error', size=1024* 1024 * 100)
+    logger = log.get_logger()
+
+    logger.info("pid is {}".format(os.getpid()))
+    
+    # 算法参数
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='safety_platform'
+    user='qx_read'
+    password=parse.quote_plus('Qx@123456')
+    tablename='all_fault_info'
+    db_res_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, db
+        ))
+
+        
+    #............................模块运行前,先读取数据库中所有结束时间为0的数据,需要从数据库中读取................
+    # print("select start_time, end_time, product_id, code, level, info, advice, factory from {}".format(tablename))
+
+    result=pd.read_sql("select start_time, end_time, product_id, code, level, info, advice from all_fault_info where factory = '{}'".format('骑享'), db_res_engine)
+    result = result[['start_time', 'end_time', 'product_id', 'code', 'level', 'info', 'advice']]
+    df_diag_ram=result[(result['end_time']=='0000-00-00 00:00:00') & (result['code']==119)]
+    df_bms_ram=pd.DataFrame(columns=['time', 'sn', 'packvolt', 'cellvolt', 'celltemp'])
+
+    #定时任务.......................................................................................................................................................................
+    scheduler = BlockingScheduler()
+    scheduler.add_job(mainprocess, 'interval', seconds=60, id='diag_job')
+
+    try:  
+        scheduler.start()
+    except Exception as e:
+        scheduler.shutdown()
+        logger.error(str(e))
+    

+ 132 - 0
LIB/FRONTEND/CellStateEstimation/BatSafetyAlarm/main.py

@@ -0,0 +1,132 @@
+import CBMSSafetyAlarm
+import datetime
+import pandas as pd
+import multiprocessing
+from LIB.BACKEND import DBManager, Log
+import time, datetime
+from apscheduler.schedulers.blocking import BlockingScheduler
+from LIB.MIDDLE.CellStateEstimation.Common.V1_0_1 import log
+
+
+#...................................电池包电芯安全诊断函数......................................................................................................................
+def diag_cal(sn_list, df_diag_ram, df_bms_ram):
+    
+    start=time.time()
+    now_time=datetime.datetime.now()
+    start_time=now_time-datetime.timedelta(seconds=70)
+    start_time=start_time.strftime('%Y-%m-%d %H:%M:%S')
+    end_time=now_time.strftime('%Y-%m-%d %H:%M:%S')
+
+    for sn in sn_list:
+        if 'PK500' in sn:
+            celltype=1 #6040三元电芯
+        elif 'PK502' in sn:
+            celltype=2 #4840三元电芯
+        elif 'K504B' in sn:
+            celltype=99    #60ah林磷酸铁锂电芯
+        elif 'MGMLXN750' in sn:
+            celltype=3 #力信50ah三元电芯
+        elif 'MGMCLN750' or 'UD' in sn: 
+            celltype=4 #CATL 50ah三元电芯
+        else:
+            print('SN:{},未找到对应电池类型!!!'.format(sn))
+            continue
+            # sys.exit()
+
+        #读取原始数据库数据........................................................................................................................................................
+        dbManager = DBManager.DBManager()
+        df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
+        df_bms = df_data['bms']
+        # print(df_bms)
+
+        #电池诊断................................................................................................................................................................
+        if not df_bms.empty:
+            df_diag_ram_sn=df_diag_ram[df_diag_ram['product_id']==sn]
+            df_bms_ram_sn=df_bms_ram[df_bms_ram['sn']==sn]
+            if df_diag_ram_sn.empty:
+                SafetyAlarm=CBMSSafetyAlarm.SafetyAlarm(sn,celltype,df_bms, df_bms_ram_sn)
+                df_diag_res, df_bms_res=SafetyAlarm.diag() 
+
+                #更新bms的ram数据 和 diag的Ram数据
+                sn_index=df_bms_ram.loc[df_bms_ram['sn']==sn].index
+                df_bms_ram=df_bms_ram.drop(index=sn_index)
+                df_bms_ram=df_bms_ram.append(df_bms_res)
+
+                sn_index=df_diag_ram.loc[df_diag_ram['product_id']==sn].index
+                df_diag_ram=df_diag_ram.drop(index=sn_index)
+                df_diag_ram=df_diag_ram.append(df_diag_res)
+                df_diag_ram.reset_index(inplace=True,drop=True)     #重置索引
+
+                #当前热失控故障写入数据库
+                if not df_diag_res.empty:
+                    with open(r'D:\Platform\platform_python\data_analyze_platform\USER\spf\01qixiang\06BatSafetyAlarm\热失控.txt','a') as file:
+                        file.write(str(tuple(df_diag_res.iloc[-1]))+'\n')
+                
+            #当前热失控已超过三天变为历史故障并写入数据库,并删除原有数据库中的当前故障和ram中的当前故障
+            elif (now_time-df_bms_ram_sn.iloc[-1]['time']).total_seconds()>3*24*3600:
+                df_diag_ram=df_diag_ram.drop(df_diag_ram['sn']==sn)    #删除ram中的当前故障
+                df_bms_ram_sn.iloc[-1]['end_time']=now_time
+                with open(r'D:\Platform\platform_python\data_analyze_platform\USER\spf\01qixiang\06BatSafetyAlarm\热失控.txt','a') as file:
+                        file.write(str(tuple(df_diag_res.iloc[-1]))+'\n')
+
+
+
+        #故障处理........................................................................................................................................................
+
+        end=time.time()
+        print(end-start)
+        # print(df_soh)
+
+#...................................................主进程...........................................................................................................
+def mainprocess():
+    global SNnums
+    global df_diag_ram
+    global df_bms_ram
+    process = 2
+    pool = multiprocessing.Pool(processes = process)
+
+    for i in range(process):
+        sn_list = SNnums[i]
+        pool.apply_async(diag_cal, (sn_list,df_diag_ram,df_bms_ram))
+
+    pool.close()
+    pool.join()
+
+
+#...............................................主函数起定时作用.......................................................................................................................
+if __name__ == "__main__":
+    
+    excelpath=r'D:\Platform\platform_python\data_analyze_platform\USER\spf\01qixiang\sn-20210903.xlsx'
+    SNdata_6060 = pd.read_excel(excelpath, sheet_name='科易6060')
+    SNdata_6040 = pd.read_excel(excelpath, sheet_name='科易6040')
+    SNdata_4840 = pd.read_excel(excelpath, sheet_name='科易4840')
+    SNdata_L7255 = pd.read_excel(excelpath, sheet_name='格林美-力信7255')
+    SNdata_C7255 = pd.read_excel(excelpath, sheet_name='格林美-CATL7255')
+    SNdata_U7255 = pd.read_excel(excelpath, sheet_name='优旦7255')
+    SNnums_6060=SNdata_6060['SN号'].tolist()
+    SNnums_6040=SNdata_6040['SN号'].tolist()
+    SNnums_4840=SNdata_4840['SN号'].tolist()
+    SNnums_L7255=SNdata_L7255['SN号'].tolist()
+    SNnums_C7255=SNdata_C7255['SN号'].tolist()
+    SNnums_U7255=SNdata_U7255['SN号'].tolist()
+    SNnums=[SNnums_L7255 + SNnums_C7255 + SNnums_U7255, SNnums_6040 + SNnums_4840 + SNnums_6060]
+    # SNnums=['PK50201A000002201']
+    
+    mylog=log.Mylog('log_diag.txt','error')
+    mylog.logcfg()
+
+    #............................模块运行前,先读取数据库中所有结束时间为0的数据,需要从数据库中读取...................................
+    result=pd.read_excel(r'D:\Platform\platform_python\data_analyze_platform\USER\spf\01qixiang\06BatSafetyAlarm\result.xlsx')
+    df_diag_ram=result[(result['end_time']=='0000-00-00 00:00:00') & (result['code']==119)]
+    df_bms_ram=pd.DataFrame(columns=['time', 'sn', 'packvolt', 'cellvolt', 'celltemp'])
+
+    #定时任务.......................................................................................................................................................................
+    scheduler = BlockingScheduler()
+    scheduler.add_job(mainprocess, 'interval', seconds=60, id='diag_job')
+
+    try:  
+        scheduler.start()
+    except Exception as e:
+        scheduler.shutdown()
+        print(repr(e))
+        mylog.logopt(e)

+ 3 - 0
LIB/FRONTEND/CellStateEstimation/BatSafetyAlarm/run.bat

@@ -0,0 +1,3 @@
+cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\CellStateEstimation\BatSafetyAlarm
+title cal_batsafetyalarm
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\CellStateEstimation\BatSafetyAlarm\deploy.py

+ 1 - 1
LIB/FRONTEND/OutlierDetection/VoltOutlier/deploy.py

@@ -19,7 +19,7 @@ if __name__ == "__main__":
     
     # 时间设置
     now_time = datetime.datetime.now()
-    pre_time = now_time + dateutil.relativedelta.relativedelta(days=-8)# 最近一周
+    pre_time = now_time + dateutil.relativedelta.relativedelta(days=-1)# 最近一周
     end_time=datetime.datetime.strftime(now_time,"%Y-%m-%d 00:00:00")
     start_time=datetime.datetime.strftime(pre_time,"%Y-%m-%d 00:00:00")
     

+ 89 - 0
LIB/FRONTEND/RemainChargeTime/MakeDataSet_History.py

@@ -0,0 +1,89 @@
+#历史剩余充电时间统计 2021-10
+
+from LIB.BACKEND.Tools import Tools
+from LIB.BACKEND.DataPreProcess import DataPreProcess
+import pandas as pd
+import numpy as np
+import os
+import math
+import datetime
+from time import strftime,gmtime
+from LIB.BACKEND import DBManager
+dbManager = DBManager.DBManager()
+
+#将时间戳由 "%Y-%m-%d %H:%M:%S" 切换为 sec
+def timeconvert(df):  
+    df.index=range(len(df))
+    time=df.时间戳
+    timeInSeries=[]
+    time2=datetime.datetime.strptime(time[0],"%Y-%m-%d %H:%M:%S")
+    for k in range(len(time)):
+        time1=datetime.datetime.strptime(time[k],"%Y-%m-%d %H:%M:%S")    
+        t=(time1-time2)
+        timeInSeries.append(t.days*86400+t.seconds)
+    df.loc[:,'相对时间']=pd.DataFrame(timeInSeries,columns=['相对时间'])
+    return df
+
+#构建新数据表统计剩余充电时间
+def data_handle(df_in):
+    #数据预处理
+    df_in=timeconvert(df_in)  
+    dfOut_temp=DataPreProcess.data_split_by_status(DataPreProcess, df_in, drive_interval_threshold=120, charge_interval_threshold=300,drive_stand_threshold=120, charge_stand_threshold=300)
+    dfOut=dfOut_temp.copy()
+    df_charge=dfOut[dfOut['data_status']=='charge']
+    unique_status_idx=np.unique(df_charge.data_split_by_status.values)
+    #计算剩余充电时间
+    df_statisticsInfo=pd.DataFrame()
+    for n in unique_status_idx:
+        df_charge_buff=df_charge[df_charge.data_split_by_status==n]
+        if(np.max(df_charge_buff['SOC[%]'].values[-1])==100):    #满充状态数据提取    
+            times=df_charge_buff['相对时间'].values
+            times1=times[:-1]
+            times2=times[1:]
+            delta=times2-times1
+            if(np.max(delta)<50):
+                times_inv=[times[-1]-t for t in times ]
+                df_charge_buff.loc[:,'剩余充电时间[sec]']=times_inv
+                times_hms=[]
+                for k in list(range(len(times_inv))):
+                    time_hms=strftime("%H:%M:%S", gmtime(df_charge_buff.loc[:,'剩余充电时间[sec]'].values[k]))
+                    times_hms.append(time_hms)
+                df_charge_buff.loc[:,'剩余充电时间[时分秒]']=times_hms
+                df_dataset_temp=df_charge_buff.drop(['GSM信号','故障等级','故障代码','外电压','绝缘电阻','总输出状态','上锁状态','充电状态','加热状态','单体压差','单体均衡状态','相对时间','data_split_by_crnt','data_split_by_status','data_status'],axis=1)
+                df_statisticsInfo=df_statisticsInfo.append(df_dataset_temp)
+                #简化特征
+                list_col=list(df_statisticsInfo)
+                list1=[s for s in list_col if '单体电压' in s]
+                list2=[s for s in list_col if '单体温度' in s]
+                list3=[s for s in list_col if '其他温度' in s]
+                list_v=['单体电压'+str(i) for i in range(1,len(list1)+1)] 
+                list_T=['单体温度'+str(i) for i in range(1,len(list2)+1)] 
+                A_cellVolt=df_statisticsInfo[list_v].values
+                celVolt_max=np.max(A_cellVolt,axis=1)
+                celVolt_min=np.min(A_cellVolt,axis=1)
+                A_cellTemp=df_statisticsInfo[list_T].values
+                celTemp_max=np.max(A_cellTemp,axis=1)
+                celTemp_min=np.min(A_cellTemp,axis=1) 
+                df_statisticsInfo.loc[:,'最高单体电压[V]']=celVolt_max
+                df_statisticsInfo.loc[:,'最高单体温度[℃]']=celTemp_max
+                df_statisticsInfo.loc[:,'最低单体温度[℃]']=celTemp_min
+                df_statisticsInfo=df_statisticsInfo.drop(list_v,axis=1)
+                df_statisticsInfo=df_statisticsInfo.drop(list_T,axis=1) 
+                df_statisticsInfo=df_statisticsInfo.drop(['其他温度'+str(i) for i in range(1,len(list3)+1)],axis=1)
+    return(df_statisticsInfo)
+
+#Dataset
+data_sn=pd.read_excel('sn-20210903.xlsx',sheet_name='sn-20210903')
+fileNames=data_sn['sn']
+df_dataset=pd.DataFrame()
+for k in range(2): 
+    df_data = dbManager.get_data(sn=fileNames[k], start_time='2021-10-01 00:00:00', end_time='2021-11-01 00:00:00', data_groups=['bms'])
+    if len(df_data)==0:
+        continue
+    dataIn = df_data['bms']
+    dataIn=data_handle(dataIn)
+    dataIn['sn']=fileNames[k]
+    df_dataset=df_dataset.append(dataIn)
+
+df_dataset.to_csv('df_datatest.csv')
+

+ 52 - 0
LIB/FRONTEND/RemainChargeTime/create_table.py

@@ -0,0 +1,52 @@
+'''
+定义表的结构,并在数据库中创建对应的数据表
+'''
+__author__ = 'lmstack'
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, String, create_engine, Integer, DateTime, BigInteger, FLOAT, TIMESTAMP, func, Text
+from urllib import parse
+Base = declarative_base()
+
+
+class ConsistencyDeltaSoc(Base):
+    __tablename__ = "remainchargetime"
+    __table_args__ = ({'comment': '充电剩余时间'})  # 添加索引和表注释
+
+    id = Column(Integer, primary_key=True, autoincrement=True, comment="主键")
+    add_time = Column(TIMESTAMP(True), server_default=func.now(), comment='记录创建时间') # 创建时间
+    update_time = Column(TIMESTAMP(True), nullable=False, server_default=func.now(), onupdate=func.now(), comment='记录更新时间') # 更新时间
+    time = Column(TIMESTAMP(True), comment="时间")
+   
+    current = Column(FLOAT, comment="总电流")
+    voltage = Column(FLOAT, comment="总电压")
+    soc = Column(FLOAT, comment="soc")
+    soh = Column(FLOAT, comment="soh")
+    remain_time = Column(FLOAT, comment="剩余充电时间/s")
+    remain_time_str = Column(String(64), comment="剩余充电时间/时分秒")
+    max_volt = Column(FLOAT, comment="最高单体电压")
+    max_temp = Column(FLOAT, comment="最高单体温度")
+    min_temp = Column(FLOAT, comment="最低单体温度")
+    sn = Column(String(64), comment="sn")
+
+
+    # def __init__(self, sn, current, time_stamp, pack_state, line_state):
+    #     self.sn = sn
+    #     self.current = current
+    #     self.time_stamp = time_stamp
+    #     self.pack_state = pack_state
+    #     self.line_state = line_state
+
+# 执行该文件,创建表格到对应的数据库中
+if __name__ == "__main__":
+    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port = 3306
+    user = 'qx_cas'
+    password = parse.quote_plus('Qx@123456')
+    database = 'qx_cas'
+    
+    db_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, database
+        ))
+    Base.metadata.create_all(db_engine)

+ 118 - 0
LIB/FRONTEND/RemainChargeTime/deploy.py

@@ -0,0 +1,118 @@
+
+__author__ = 'lmstack'
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import dateutil.relativedelta
+import traceback
+from LIB.MIDDLE.RemainChargeTime.V1_0_0 import MakeDataSet_History
+from urllib import parse
+import pymysql
+import pdb
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    
+    # 时间设置
+    now_time = datetime.datetime.now()
+    pre_time = now_time + dateutil.relativedelta.relativedelta(days=-1)# 前一日
+    end_time=datetime.datetime.strftime(now_time,"%Y-%m-%d 00:00:00")
+    start_time=datetime.datetime.strftime(pre_time,"%Y-%m-%d 00:00:00")
+    start_time = '2021-10-01 00:00:00'
+    end_time = '2021-11-01 00:00:00'
+    history_run_flag = False # 历史数据运行标志
+    
+
+    # 更新sn列表
+    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qixiang_oss'
+    user='qixiang_oss'
+    password='Qixiang2021'
+    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    cursor = conn.cursor()
+    cursor.execute("select sn, imei, add_time from app_device")
+    res = cursor.fetchall()
+    df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    df_sn = df_sn.reset_index(drop=True)
+    conn.close();
+    
+    # 数据库配置
+    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port = 3306
+    user = 'qx_cas'
+    password = parse.quote_plus('Qx@123456')
+    database = 'qx_cas'
+
+    db_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, database
+        ))
+    DbSession = sessionmaker(bind=db_engine)
+    
+    # 运行历史数据配置
+    
+    df_first_data_time = pd.read_sql("select * from bat_first_data_time", db_engine)
+
+    
+    # 日志配置
+    now_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()).replace(":","_")
+    log_path = 'log/' + now_str
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    log = Log.Mylog(log_name='remainchargetime', log_level = 'info')
+    log.set_file_hl(file_name='{}/info.log'.format(log_path), log_level='info', size=1024* 1024 * 100)
+    log.set_file_hl(file_name='{}/error.log'.format(log_path), log_level='error', size=1024* 1024 * 100)
+    logger = log.get_logger()
+
+    logger.info("pid is {}".format(os.getpid()))
+    
+
+
+    # 算法参数
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qx_cas'
+    user='qx_read'
+    password='Qx@123456'
+    tablename='cellStateEstimation_soh'
+
+    for i in range(0, len(df_sn)):
+        try:
+            sn = df_sn.loc[i, 'sn']
+            logger.info("pid-{}  SN: {} START!".format(os.getpid(), sn))
+            dbManager = DBManager.DBManager()
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
+            data_bms = df_data['bms']
+
+            #...............每日累积里程............................................................................
+            if len(data_bms['时间戳'])>0 :
+                df_res = MakeDataSet_History.data_handle(data_bms)
+                df_res['sn']=[sn] * len(df_res)
+                
+                # df_res.to_csv('Mileage_'+sn+'.csv')
+                
+                
+            # # 处理运行历史数据
+            # if (history_run_flag):
+            #     this_sn = df_first_data_time[df_first_data_time['sn']==sn]
+            #     if (len(this_sn) == 0):
+            #         start_time = pd.to_datetime(str(df_sn.loc[df_sn[df_sn['sn']==sn].index, 'add_time'].values[0])).strftime("%Y-%m-%d 00:00:00")
+            #     else:
+            #         first_data_time = df_first_data_time.loc[df_first_data_time[df_first_data_time['sn']==sn].index, 'first_data_time'].values[0]
+            #         if pd.isnull(first_data_time):
+            #             start_time = "2018-01-01 00:00:00"
+            #         else:
+            #             start_time = pd.to_datetime(str(df_first_data_time.loc[df_first_data_time[df_first_data_time['sn']==sn].index, 'first_data_time'].values[0])).strftime("%Y-%m-%d 00:00:00")
+
+            if not df_res.empty:
+                df_res.columns = ['time', 'current', 'voltage', 'soc', 'soh', 'remain_time', 'remain_time_str', 'max_volt',  'max_temp','min_temp','sn']
+                df_res.to_sql("remainchargetime",con=db_engine, if_exists="append",index=False)
+            logger.info("pid-{} SN: {} DONE!".format(os.getpid(), sn))
+        except:
+            logger.error(traceback.format_exc)
+            logger.error(u"{} :{},{} 任务运行错误\n".format(sn,start_time,end_time), exc_info=True)

+ 4 - 0
LIB/FRONTEND/RemainChargeTime/run.bat

@@ -0,0 +1,4 @@
+cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\RemainChargeTime
+title cal_remainChargeTime
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\RemainChargeTime\deploy.py
+pause

+ 258 - 0
LIB/FRONTEND/SaftyCenter/DataSta/deploy.py

@@ -0,0 +1,258 @@
+
+__author__ = 'lmstack'
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import dateutil.relativedelta
+import traceback
+from LIB.MIDDLE.CellStateEstimation.Common import log
+from LIB.MIDDLE.SaftyCenter.Common import FeiShuData
+from LIB.MIDDLE.SaftyCenter.Common import DBDownload
+import time, datetime
+from pandas.core.frame import DataFrame
+from apscheduler.schedulers.blocking import BlockingScheduler
+from LIB.MIDDLE.SaftyCenter.DataSta.DataStatistics import DataSta
+from urllib import parse
+import pymysql
+
+from LIB.BACKEND import DBManager
+
+#............................主程序................................... 
+   
+
+
+def Week_Task():
+    all_period_fault_info=DataFrame(columns=['factory','week','level1_count','level2_count','level3_count','level4_count','level5_count','solve_rate'])
+
+    #............................获取数据................................
+    toweek='Week'+time.strftime('%W')
+    CS_Data=FeiShuData.getFeiShuDATA()
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='safety_platform'
+    user='qx_read'
+    password='Qx@123456'
+    mode=2
+    tablename1='all_fault_info'
+    
+    db_engine = create_engine(
+    "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+        user, parse.quote_plus(password), host, port, db
+    ))
+    
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_fltinfo=DBRead.getdata('product_id','level','code','start_time',tablename=tablename1,factory='骑享',sn='',timename='',st='',sp='')#dbdownload经过了改编
+    #............................获取时间................................      
+    end_time=datetime.datetime.now()
+    # end_time=datetime.datetime.strptime(end_time,'%Y-%m-%d')
+    start_time=end_time-datetime.timedelta(days=7)
+    start_time=start_time.strftime('%Y-%m-%d')
+    end_time=end_time.strftime('%Y-%m-%d')
+    FltAlarmInfo,Celltype=DataSta.SaftyWarningSta(CS_Data,df_fltinfo,start_time,end_time)
+    FaultLvlCount=DataSta.WeekInfoSta(df_fltinfo,start_time,end_time)
+    lvl1=FaultLvlCount[FaultLvlCount['level']==1]['product_id'].values
+    lvl2=FaultLvlCount[FaultLvlCount['level']==2]['product_id'].values
+    lvl3=FaultLvlCount[FaultLvlCount['level']==3]['product_id'].values
+    lvl4=FaultLvlCount[FaultLvlCount['level']==4]['product_id'].values
+    lvl5=FaultLvlCount[FaultLvlCount['level']==5]['product_id'].values
+    all_period_fault_info.loc[0,'factory']='骑享'
+    all_period_fault_info.loc[0,'week']=toweek
+    all_period_fault_info.loc[0,'level1_count']=lvl1
+    all_period_fault_info.loc[0,'level2_count']=lvl2
+    all_period_fault_info.loc[0,'level3_count']=lvl3
+    all_period_fault_info.loc[0,'level4_count']=lvl4
+    all_period_fault_info.loc[0,'level5_count']=lvl5
+    all_period_fault_info.loc[0,'solve_rate']=FltAlarmInfo.loc[0,'OprationManageRate']
+    all_period_fault_info.to_sql('all_period_fault_info', db_engine, if_exists='append', index=False)
+    print(all_period_fault_info)
+    
+def Minutes_Task():
+    
+    #............................获取数据................................
+    host='172.16.121.236'
+    port=3306
+    db='fastfun'
+    user='readonly'
+    password='Fast1234'
+    mode=3
+    tablename1='ff_battery_accum'
+    
+
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_last_accum=DBRead.getdata('devcode','dsg_phaccum','dsg_ahaccum',tablename=tablename1,factory='',sn='',timename='',st='',sp='')#dbdownload经过了改编
+
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qx_cas'
+    user='qx_read'
+    password='Qx@123456'
+    mode=3
+    tablename2='bat_first_data_time'
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_FirstDataTime=DBRead.getdata('sn','first_data_time',tablename=tablename2,factory='',sn='',timename='',st='',sp='')#dbdownload经过了改编
+    CS_Data=FeiShuData.getFeiShuDATA()
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='safety_platform'
+    user='qx_read'
+    password='Qx@123456'
+    mode=2
+    tablename1='all_fault_info'
+    
+    db_engine = create_engine(
+    "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+        user, parse.quote_plus(password), host, port, db
+    ))
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_fltinfo=DBRead.getdata('product_id','level','code','start_time',tablename=tablename1,factory='骑享',sn='',timename='',st='',sp='')#dbdownload经过了改编
+    #............................获取时间................................
+    end_time=datetime.datetime.now()
+    # end_time=datetime.datetime.strptime(end_time,'%Y-%m-%d')
+    start_time=end_time-datetime.timedelta(days=1)
+    start_time=start_time.strftime('%Y-%m-%d')
+    end_time=end_time.strftime('%Y-%m-%d')
+    #............................执行程序................................
+    FltAlarmInfo,Celltype=DataSta.SaftyWarningSta(CS_Data,df_fltinfo,start_time,end_time)
+    SatftyCount=DataSta.SftyWrngClsfy(df_fltinfo)
+    MaxAccumAh,TotalAccumAh,MaxCycle,MaxRunningHour,TotalRunHour=DataSta.AccumInfo(df_last_accum,df_FirstDataTime,end_time)
+
+    all_statistic_info=DataFrame(columns=['factory','total_alarm','alarm_total_today','alarm_not_close_today','alarm_close_today','alarm_uregent_total_today','alarm_uregent_close_today','alarm_uregent_not_close_today','alarm_close_total','run_time_total','dischrg_total','odo_total','max_dischrg_one','max_runtime_one','max_cycle_one','max_odo_one','alarm_close_total','alarm_total','cell_type','cell_type_count','cell_safety_risk_count','data_safety_risk_count','status_safety_risk_count','hv_safety_risk_count','system_safety_risk_count','sample_safety_risk_count','controller_safety_risk_count','design_safety_risk_count'])
+    all_statistic_info.loc[0,'factory']='骑享'
+    all_statistic_info.loc[0,'total_alarm']=FltAlarmInfo.loc[0,'SftyPlt_Data_Total']
+    all_statistic_info.loc[0,'alarm_total_today']=FltAlarmInfo.loc[0,'SftyPlt_Data_day']
+    all_statistic_info.loc[0,'alarm_close_today']=FltAlarmInfo.loc[0,'CS_Warning_day_Finish_Count']
+    all_statistic_info.loc[0,'alarm_not_close_today']=FltAlarmInfo.loc[0,'SftyPlt_Data_day']-FltAlarmInfo.loc[0,'CS_Warning_day_Finish_Count']
+    all_statistic_info.loc[0,'alarm_uregent_total_today']=FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day']
+    all_statistic_info.loc[0,'alarm_uregent_close_today']=FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day_Finish_Count']
+    all_statistic_info.loc[0,'alarm_uregent_not_close_today']=FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day']-FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day_Finish_Count']
+    all_statistic_info.loc[0,'run_time_total']=TotalRunHour
+    all_statistic_info.loc[0,'dischrg_total']=TotalAccumAh
+    all_statistic_info.loc[0,'odo_total']=0
+    all_statistic_info.loc[0,'max_dischrg_one']=MaxAccumAh
+    all_statistic_info.loc[0,'max_runtime_one']=MaxRunningHour
+    all_statistic_info.loc[0,'max_cycle_one']=MaxCycle
+    all_statistic_info.loc[0,'max_odo_one']=0
+    all_statistic_info.loc[0,'alarm_close_total']=FltAlarmInfo.loc[0,'CS_Warning_Total_Finish_Count']
+    all_statistic_info.loc[0,'alarm_total']=FltAlarmInfo.loc[0,'SftyPlt_Data_Total']
+    CellType=Celltype.columns.tolist()
+    CellType=','.join(CellType) 
+    all_statistic_info.loc[0,'cell_type']=str(CellType)
+    CellTypeCount=str(Celltype.loc[0].values)
+    CellTypeCount=''.join(CellTypeCount) 
+    all_statistic_info.loc[0,'cell_type_count']=str(CellTypeCount)
+    all_statistic_info.loc[0,'cell_safety_risk_count']=SatftyCount.loc[0,'CellSaftyCount']
+    all_statistic_info.loc[0,'data_safety_risk_count']=SatftyCount.loc[0,'DataSaftyCodeCount']
+    all_statistic_info.loc[0,'status_safety_risk_count']=SatftyCount.loc[0,'StateSaftyCodeCount']
+    all_statistic_info.loc[0,'hv_safety_risk_count']=SatftyCount.loc[0,'HvSaftyCodeCount']
+    all_statistic_info.loc[0,'system_safety_risk_count']=SatftyCount.loc[0,'SysSaftyCodeCount']
+    all_statistic_info.loc[0,'sample_safety_risk_count']=SatftyCount.loc[0,'SamplingSatyCount']
+    all_statistic_info.loc[0,'controller_safety_risk_count']=SatftyCount.loc[0,'CtrlSaftyCodeCount']
+    all_statistic_info.loc[0,'design_safety_risk_count']=SatftyCount.loc[0,'DsnSaftyCodeCount']
+    all_statistic_info.to_sql('all_statistic_info', db_engine, if_exists='append', index=False)
+
+    print(all_statistic_info)
+
+if __name__ == "__main__":
+    
+    # # 时间设置
+
+    # now_time = datetime.datetime.now()
+    # pre_time = now_time + dateutil.relativedelta.relativedelta(hours=-1)#上小时时间
+    # end_time=datetime.datetime.strftime(now_time,"%Y-%m-%d %H:00:00")
+    # start_time=datetime.datetime.strftime(pre_time,"%Y-%m-%d %H:00:00")
+    
+    # history_run_flag = False # 历史数据运行标志
+    
+
+    # # 更新sn列表
+    # host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    # port=3306
+    # db='qixiang_oss'
+    # user='qixiang_oss'
+    # password='Qixiang2021'
+    # conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    # cursor = conn.cursor()
+    # cursor.execute("select sn, imei, add_time from app_device")
+    # res = cursor.fetchall()
+    # df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    # df_sn = df_sn.reset_index(drop=True)
+    # conn.close();
+    
+    # # 数据库配置
+    # host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    # port = 3306
+    # user = 'qx_cas'
+    # password = parse.quote_plus('Qx@123456')
+    # database = 'qx_cas'
+
+    # db_engine = create_engine(
+    #     "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+    #         user, password, host, port, database
+    #     ))
+    # DbSession = sessionmaker(bind=db_engine)
+    
+    # # 运行历史数据配置
+    
+    # df_first_data_time = pd.read_sql("select * from bat_first_data_time", db_engine)
+
+    
+    # 日志配置
+    now_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()).replace(":","_")
+    log_path = 'log/' + now_str
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    log = Log.Mylog(log_name='saftyCenter_DataSta', log_level = 'info')
+    log.set_file_hl(file_name='{}/info.log'.format(log_path), log_level='info', size=1024* 1024 * 100)
+    log.set_file_hl(file_name='{}/error.log'.format(log_path), log_level='error', size=1024* 1024 * 100)
+    logger = log.get_logger()
+
+    logger.info("pid is {}".format(os.getpid()))
+    
+    
+    # # 算法参数
+    # host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    # port=3306
+    # db='safety_platform'
+    # user='qx_read'
+    # password=parse.quote_plus('Qx@123456')
+    # db_res_engine = create_engine(
+    #     "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+    #         user, password, host, port, db
+    #     ))
+    
+    # dbManager = DBManager.DBManager()
+    # charge_count = 0
+    # drive_count = 0
+    # stand_count = 0
+    # for sn in df_sn['sn'].tolist():
+    Minutes_Task()
+    scheduler = BlockingScheduler()
+    scheduler.add_job(Week_Task, 'interval', seconds=10, id='Week_Task')
+    scheduler.add_job(Minutes_Task, 'interval', seconds=10, id='Hour_Task')
+    
+    try:
+        logger.info("周期任务 START!")
+        scheduler.start()
+        # df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms', 'gps'])
+        
+        # df_bms = df_data['bms']
+        # df_gps = df_data['gps']
+        # charge_count, drive_count, stand_count = status_sta.status_sta(df_bms, df_gps, start_time, charge_count, drive_count, stand_count)
+        # df_result = pd.DataFrame({'factory':['骑享'], 'time':[start_time], 'charge_count':[charge_count], 'drive_count':[drive_count], 'stand_count':stand_count})
+
+        # logger.info("pid-{}  SN: {} DONE!".format(os.getpid(), sn))          
+    except:
+        logger.info("周期任务 ERROR!")
+        scheduler.shutdown()
+        logger.error(traceback.format_exc)
+
+    

+ 140 - 0
LIB/FRONTEND/SaftyCenter/DataSta/main.py

@@ -0,0 +1,140 @@
+import numpy as np
+import pandas as pd
+from MIDDLE.SaftyCenter.Common import FeiShuData
+from MIDDLE.SaftyCenter.Common import DBDownload
+import time, datetime
+from pandas.core.frame import DataFrame
+from apscheduler.schedulers.blocking import BlockingScheduler
+from DataStatistics import DataSta
+
+#............................主程序................................... 
+   
+
+
+def Week_Task():
+    all_period_fault_info=DataFrame(columns=['factory','week','level1_count','level2_count','level3_count','level4_count','level5_count','solve_rate'])
+
+    #............................获取数据................................
+    toweek='Week'+time.strftime('%W')
+    CS_Data=FeiShuData.getFeiShuDATA()
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='safety_platform'
+    user='qx_read'
+    password='Qx@123456'
+    mode=2
+    tablename1='all_fault_info'
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_fltinfo=DBRead.getdata('product_id','level','code','start_time',tablename=tablename1,factory='骑享',sn='',timename='',st='',sp='')#dbdownload经过了改编
+    #............................获取时间................................      
+    end_time=datetime.datetime.now()
+    # end_time=datetime.datetime.strptime(end_time,'%Y-%m-%d')
+    start_time=end_time-datetime.timedelta(days=7)
+    start_time=start_time.strftime('%Y-%m-%d')
+    end_time=end_time.strftime('%Y-%m-%d')
+    FltAlarmInfo,Celltype=DataSta.SaftyWarningSta(CS_Data,df_fltinfo,start_time,end_time)
+    FaultLvlCount=DataSta.WeekInfoSta(df_fltinfo,start_time,end_time)
+    lvl1=FaultLvlCount[FaultLvlCount['level']==1]['product_id'].values
+    lvl2=FaultLvlCount[FaultLvlCount['level']==2]['product_id'].values
+    lvl3=FaultLvlCount[FaultLvlCount['level']==3]['product_id'].values
+    lvl4=FaultLvlCount[FaultLvlCount['level']==4]['product_id'].values
+    lvl5=FaultLvlCount[FaultLvlCount['level']==5]['product_id'].values
+    all_period_fault_info.loc[0,'factory']='骑享'
+    all_period_fault_info.loc[0,'week']=toweek
+    all_period_fault_info.loc[0,'level1_count']=lvl1
+    all_period_fault_info.loc[0,'level2_count']=lvl2
+    all_period_fault_info.loc[0,'level3_count']=lvl3
+    all_period_fault_info.loc[0,'level4_count']=lvl4
+    all_period_fault_info.loc[0,'level5_count']=lvl5
+    all_period_fault_info.loc[0,'solve_rate']=FltAlarmInfo.loc[0,'OprationManageRate']
+    
+def Minutes_Task():
+    
+    #............................获取数据................................
+    host='172.16.121.236'
+    port=3306
+    db='fastfun'
+    user='readonly'
+    password='Fast1234'
+    mode=3
+    tablename1='ff_battery_accum'
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_last_accum=DBRead.getdata('devcode','dsg_phaccum','dsg_ahaccum',tablename=tablename1,factory='',sn='',timename='',st='',sp='')#dbdownload经过了改编
+
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qx_cas'
+    user='qx_read'
+    password='Qx@123456'
+    mode=3
+    tablename2='bat_first_data_time'
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_FirstDataTime=DBRead.getdata('sn','first_data_time',tablename=tablename2,factory='',sn='',timename='',st='',sp='')#dbdownload经过了改编
+    CS_Data=FeiShuData.getFeiShuDATA()
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='safety_platform'
+    user='qx_read'
+    password='Qx@123456'
+    mode=2
+    tablename1='all_fault_info'
+    DBRead=DBDownload.DBDownload(host, port, db, user, password,mode)
+    with DBRead as DBRead:
+        df_fltinfo=DBRead.getdata('product_id','level','code','start_time',tablename=tablename1,factory='骑享',sn='',timename='',st='',sp='')#dbdownload经过了改编
+    #............................获取时间................................
+    end_time=datetime.datetime.now()
+    # end_time=datetime.datetime.strptime(end_time,'%Y-%m-%d')
+    start_time=end_time-datetime.timedelta(days=1)
+    start_time=start_time.strftime('%Y-%m-%d')
+    end_time=end_time.strftime('%Y-%m-%d')
+    #............................执行程序................................
+    FltAlarmInfo,Celltype=DataSta.SaftyWarningSta(CS_Data,df_fltinfo,start_time,end_time)
+    SatftyCount=DataSta.SftyWrngClsfy(df_fltinfo)
+    MaxAccumAh,TotalAccumAh,MaxCycle,MaxRunningHour,TotalRunHour=DataSta.AccumInfo(df_last_accum,df_FirstDataTime,end_time)
+
+    all_statistic_info=DataFrame(columns=['factory','total_alarm','alarm_total_today','alarm_not_close_today','alarm_close_today','alarm_uregent_total_today','alarm_uregent_close_today','alarm_uregent_not_close_today','alarm_close_total','run_time_total','dischrg_total','odo_total','max_dischrg_one','max_runtime_one','max_cycle_one','max_odo_one','alarm_close_total','alarm_total','cell_type','cell_type_count','cell_safety_risk_count','data_safety_risk_count','status_safety_risk_count','hv_safety_risk_count','system_safety_risk_count','sample_safety_risk_count','controller_safety_risk_count','design_safety_risk_count'])
+    all_statistic_info.loc[0,'factory']='骑享'
+    all_statistic_info.loc[0,'total_alarm']=FltAlarmInfo.loc[0,'SftyPlt_Data_Total']
+    all_statistic_info.loc[0,'alarm_total_today']=FltAlarmInfo.loc[0,'SftyPlt_Data_day']
+    all_statistic_info.loc[0,'alarm_close_today']=FltAlarmInfo.loc[0,'CS_Warning_day_Finish_Count']
+    all_statistic_info.loc[0,'alarm_not_close_today']=FltAlarmInfo.loc[0,'SftyPlt_Data_day']-FltAlarmInfo.loc[0,'CS_Warning_day_Finish_Count']
+    all_statistic_info.loc[0,'alarm_uregent_total_today']=FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day']
+    all_statistic_info.loc[0,'alarm_uregent_close_today']=FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day_Finish_Count']
+    all_statistic_info.loc[0,'alarm_uregent_not_close_today']=FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day']-FltAlarmInfo.loc[0,'SftyPlt_EmgcyData_day_Finish_Count']
+    all_statistic_info.loc[0,'run_time_total']=TotalRunHour
+    all_statistic_info.loc[0,'dischrg_total']=TotalAccumAh
+    all_statistic_info.loc[0,'odo_total']=0
+    all_statistic_info.loc[0,'max_dischrg_one']=MaxAccumAh
+    all_statistic_info.loc[0,'max_runtime_one']=MaxRunningHour
+    all_statistic_info.loc[0,'max_cycle_one']=MaxCycle
+    all_statistic_info.loc[0,'max_odo_one']=0
+    all_statistic_info.loc[0,'alarm_close_total']=FltAlarmInfo.loc[0,'CS_Warning_Total_Finish_Count']
+    all_statistic_info.loc[0,'alarm_total']=FltAlarmInfo.loc[0,'SftyPlt_Data_Total']
+    CellType=Celltype.columns.tolist()
+    CellType=','.join(CellType) 
+    all_statistic_info.loc[0,'cell_type']=str(CellType)
+    CellTypeCount=str(Celltype.loc[0].values)
+    CellTypeCount=''.join(CellTypeCount) 
+    all_statistic_info.loc[0,'cell_type_count']=str(CellTypeCount)
+    all_statistic_info.loc[0,'cell_safety_risk_count']=SatftyCount.loc[0,'CellSaftyCount']
+    all_statistic_info.loc[0,'data_safety_risk_count']=SatftyCount.loc[0,'DataSaftyCodeCount']
+    all_statistic_info.loc[0,'status_safety_risk_count']=SatftyCount.loc[0,'StateSaftyCodeCount']
+    all_statistic_info.loc[0,'hv_safety_risk_count']=SatftyCount.loc[0,'HvSaftyCodeCount']
+    all_statistic_info.loc[0,'system_safety_risk_count']=SatftyCount.loc[0,'SysSaftyCodeCount']
+    all_statistic_info.loc[0,'sample_safety_risk_count']=SatftyCount.loc[0,'SamplingSatyCount']
+    all_statistic_info.loc[0,'controller_safety_risk_count']=SatftyCount.loc[0,'CtrlSaftyCodeCount']
+    all_statistic_info.loc[0,'design_safety_risk_count']=SatftyCount.loc[0,'DsnSaftyCodeCount']
+#定时任务....................................................................................................................................................................... 
+#Week_Task()
+Minutes_Task()
+scheduler = BlockingScheduler()
+scheduler.add_job(Week_Task, 'interval', days=7, id='Week_Task')
+scheduler.add_job(Minutes_Task, 'interval', seconds=300, id='Hour_Task')
+try:  
+    scheduler.start()
+except Exception as e:
+    scheduler.shutdown()
+    print(repr(e))

+ 4 - 0
LIB/FRONTEND/SaftyCenter/DataSta/run.bat

@@ -0,0 +1,4 @@
+cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\SaftyCenter\DataSta
+title cal_saftyCenter_DataSta
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\SaftyCenter\DataSta\deploy.py
+pause

+ 114 - 0
LIB/FRONTEND/SaftyCenter/status_sta/deploy.py

@@ -0,0 +1,114 @@
+
+__author__ = 'lmstack'
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import dateutil.relativedelta
+import traceback
+from LIB.MIDDLE.CellStateEstimation.Common import log
+from LIB.MIDDLE.SaftyCenter.StatusSta.V1_0_0 import status_sta
+
+from LIB.MIDDLE.CellStateEstimation.Common import DBDownload
+from urllib import parse
+import pymysql
+
+import datacompy
+from LIB.BACKEND import DBManager
+
+if __name__ == "__main__":
+    
+    # 时间设置
+
+    now_time = datetime.datetime.now()
+    pre_time = now_time + dateutil.relativedelta.relativedelta(hours=-1)#上小时时间
+    end_time=datetime.datetime.strftime(now_time,"%Y-%m-%d %H:00:00")
+    start_time=datetime.datetime.strftime(pre_time,"%Y-%m-%d %H:00:00")
+    
+    history_run_flag = False # 历史数据运行标志
+    
+
+    # 更新sn列表
+    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qixiang_oss'
+    user='qixiang_oss'
+    password='Qixiang2021'
+    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    cursor = conn.cursor()
+    cursor.execute("select sn, imei, add_time from app_device")
+    res = cursor.fetchall()
+    df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    df_sn = df_sn.reset_index(drop=True)
+    conn.close();
+    
+    # 数据库配置
+    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port = 3306
+    user = 'qx_cas'
+    password = parse.quote_plus('Qx@123456')
+    database = 'qx_cas'
+
+    db_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, database
+        ))
+    DbSession = sessionmaker(bind=db_engine)
+    
+    # 运行历史数据配置
+    
+    df_first_data_time = pd.read_sql("select * from bat_first_data_time", db_engine)
+
+    
+    # 日志配置
+    now_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()).replace(":","_")
+    log_path = 'log/' + now_str
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    log = Log.Mylog(log_name='saftyCenter_status_sta', log_level = 'info')
+    log.set_file_hl(file_name='{}/info.log'.format(log_path), log_level='info', size=1024* 1024 * 100)
+    log.set_file_hl(file_name='{}/error.log'.format(log_path), log_level='error', size=1024* 1024 * 100)
+    logger = log.get_logger()
+
+    logger.info("pid is {}".format(os.getpid()))
+    
+    
+    # 算法参数
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='safety_platform'
+    user='qx_read'
+    password=parse.quote_plus('Qx@123456')
+    db_res_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, db
+        ))
+    
+    dbManager = DBManager.DBManager()
+    charge_count = 0
+    drive_count = 0
+    stand_count = 0
+    for sn in df_sn['sn'].tolist():
+        try:
+            logger.info("pid-{}  SN: {} START!".format(os.getpid(), sn))
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms', 'gps'])
+            
+            df_bms = df_data['bms']
+            df_gps = df_data['gps']
+            charge_count, drive_count, stand_count = status_sta.status_sta(df_bms, df_gps, start_time, charge_count, drive_count, stand_count)
+            df_result = pd.DataFrame({'factory':['骑享'], 'time':[start_time], 'charge_count':[charge_count], 'drive_count':[drive_count], 'stand_count':stand_count})
+
+            logger.info("pid-{}  SN: {} DONE!".format(os.getpid(), sn))
+                        
+        except:
+            logger.error(traceback.format_exc)
+            logger.error(u"{} :{},{} 任务运行错误\n".format(sn,start_time,end_time), exc_info=True)
+    if not df_result.empty:
+        df_result.columns = ['factory', 'time', 'charge_count', 'drive_count', 'stand_count']
+        df_result.to_sql("period_status_statistic",con=db_res_engine, if_exists="append",index=False)
+        logger.info("数据库写入成功!")
+    

+ 950 - 0
LIB/FRONTEND/SaftyCenter/status_sta/main.ipynb

@@ -0,0 +1,950 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "### start to get data PK504B10100004341 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 8, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004342 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004344 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004345 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 204, gps_count is 88, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004346 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004347 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 4, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004349 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 7, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004350 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 2, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004351 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004352 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 328, gps_count is 163, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004353 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004354 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004355 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 188, gps_count is 93, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004356 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004357 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004358 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004359 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 45, gps_count is 12, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004360 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 2, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004361 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 182, gps_count is 89, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004362 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004363 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 339, gps_count is 166, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004364 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 130, gps_count is 62, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004365 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004366 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 323, gps_count is 158, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004367 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004368 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004369 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004370 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004371 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004372 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004373 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004374 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004376 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 75, gps_count is 34, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004377 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 77, gps_count is 17, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004378 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004379 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 114, gps_count is 54, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004380 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004381 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004383 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004384 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004385 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004386 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004387 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 9, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004390 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 4, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004391 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004392 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004393 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 268, gps_count is 134, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004394 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004395 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 166, gps_count is 83, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004396 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 173, gps_count is 85, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004397 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 4, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004398 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004399 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004400 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004401 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004402 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004403 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004404 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 150, gps_count is 70, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004405 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMLXN750N2189031 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004406 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004407 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004408 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 9, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004409 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 84, gps_count is 40, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004410 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 3, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004411 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004412 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004413 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 7, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004414 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004415 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 14, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004416 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 2, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004417 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 80, gps_count is 40, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004418 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004419 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004420 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 147, gps_count is 61, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMLXN750N218B019 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 120, gps_count is 122, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004421 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 66, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004422 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004423 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 55, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004424 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004425 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 353, gps_count is 77, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004426 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 9, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004427 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004428 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004429 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 139, gps_count is 68, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004430 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004431 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004432 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004433 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 341, gps_count is 167, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004434 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 239, gps_count is 109, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004435 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 2, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004436 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 134, gps_count is 62, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004437 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data K504B10100004438 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004439 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004441 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004442 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004443 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004444 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004445 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004446 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004447 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004448 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004449 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 8, gps_count is 3, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004450 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004451 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 9, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004452 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004453 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 5, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004454 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 8, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004455 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004456 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004457 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 339, gps_count is 169, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004458 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004459 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 19, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004460 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004461 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 319, gps_count is 40, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004462 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 262, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004463 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 3, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004464 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004465 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 352, gps_count is 176, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004468 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 58, gps_count is 27, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004469 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 52, gps_count is 14, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004470 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004471 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 3, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004472 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004473 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004474 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 63, gps_count is 29, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004475 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 59, gps_count is 29, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004476 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004477 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 4, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004478 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 4, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004479 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004480 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 85, gps_count is 41, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004481 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 53, gps_count is 25, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004482 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004483 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 229, gps_count is 73, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004484 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004485 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004486 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 58, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004487 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004488 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 214, gps_count is 29, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004489 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004490 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 304, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004491 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 4, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004492 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 227, gps_count is 4, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004493 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 4, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004494 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004495 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004496 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 93, gps_count is 47, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004497 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 4, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004498 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 88, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004499 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 60, gps_count is 7, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK504B10100004500 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 323, gps_count is 158, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK505B00100004166 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK50301A000001018 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data PK50301A000001029 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMLXN750N218C011 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 36, gps_count is 32, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMLXN750N2189029 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 361, gps_count is 121, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMLXN750N21AC017 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 111, gps_count is 101, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data UD02030118B4C0001 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 178, gps_count is 178, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data UD02030118B4C0012 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I034 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 75, gps_count is 75, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I035 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 12, gps_count is 12, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I036 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 7, gps_count is 7, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I037 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I038 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMLXN750N21AC029 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 119, gps_count is 119, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I039 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 190, gps_count is 190, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I040 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I041 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 315, gps_count is 315, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I042 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I043 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 7, gps_count is 7, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data UD02030118B4C0013 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 346, gps_count is 346, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I044 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 12, gps_count is 12, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I045 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 173, gps_count is 173, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I046 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I047 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 10, gps_count is 10, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I048 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 360, gps_count is 360, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I049 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 360, gps_count is 360, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I050 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I051 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I052 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 8, gps_count is 8, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I053 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 8, gps_count is 8, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data UD02030118B4C0014 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 239, gps_count is 239, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I054 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 131, gps_count is 131, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I055 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 12, gps_count is 12, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I056 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 360, gps_count is 360, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I057 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 316, gps_count is 316, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I058 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 8, gps_count is 8, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I059 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 361, gps_count is 361, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I060 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I061 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 9, gps_count is 9, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I062 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 12, gps_count is 12, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I063 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data UD02030118B4C0015 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I064 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 171, gps_count is 171, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I065 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 12, gps_count is 12, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I066 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I067 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 6, gps_count is 6, system_count is 0, accum_count is 0 \n",
+      "\n",
+      "### start to get data MGMCLN750N215I068 from 2021-11-07 22:00:00 to 2021-11-07 23:00:00\n",
+      "# get data from 2021-11-07 22:00:00 to 2021-11-07 23:00:00......... \n",
+      "all data-getting done, bms_count is 0, gps_count is 0, system_count is 0, accum_count is 0 \n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "# 获取数据\n",
+    "import sys\n",
+    "from LIB.BACKEND import DBManager\n",
+    "import pymysql\n",
+    "import time, datetime\n",
+    "import dateutil.relativedelta\n",
+    "import pandas as pd\n",
+    "from LIB.BACKEND import DataPreProcess\n",
+    "import V1_0_0.status_sta as status_sta\n",
+    "dataPrePro = DataPreProcess.DataPreProcess()\n",
+    "\n",
+    "host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'\n",
+    "port=3306\n",
+    "db='qixiang_oss'\n",
+    "user='qixiang_oss'\n",
+    "password='Qixiang2021'\n",
+    "conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)\n",
+    "cursor = conn.cursor()\n",
+    "cursor.execute(\"select sn, imei, add_time from app_device\")\n",
+    "res = cursor.fetchall()\n",
+    "df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])\n",
+    "df_sn = df_sn.reset_index(drop=True)\n",
+    "\n",
+    "now_time = datetime.datetime.now()\n",
+    "pre_time = now_time + dateutil.relativedelta.relativedelta(hours=-1)#上个月时间\n",
+    "end_time=datetime.datetime.strftime(now_time,\"%Y-%m-%d %H:00:00\")\n",
+    "start_time=datetime.datetime.strftime(pre_time,\"%Y-%m-%d %H:00:00\")\n",
+    "\n",
+    "dbManager = DBManager.DBManager()\n",
+    "charge_count = 0\n",
+    "drive_count = 0\n",
+    "stand_count = 0\n",
+    "for sn in df_sn['sn'].tolist()[0:200]:\n",
+    "\n",
+    "    df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms', 'gps'])\n",
+    "    # \n",
+    "    df_bms = df_data['bms']\n",
+    "    df_gps = df_data['gps']\n",
+    "    df_result = status_sta.status_sta(df_bms, df_gps, start_time)\n",
+    "    "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/html": [
+       "<div>\n",
+       "<style scoped>\n",
+       "    .dataframe tbody tr th:only-of-type {\n",
+       "        vertical-align: middle;\n",
+       "    }\n",
+       "\n",
+       "    .dataframe tbody tr th {\n",
+       "        vertical-align: top;\n",
+       "    }\n",
+       "\n",
+       "    .dataframe thead th {\n",
+       "        text-align: right;\n",
+       "    }\n",
+       "</style>\n",
+       "<table border=\"1\" class=\"dataframe\">\n",
+       "  <thead>\n",
+       "    <tr style=\"text-align: right;\">\n",
+       "      <th></th>\n",
+       "      <th>factory</th>\n",
+       "      <th>time</th>\n",
+       "      <th>charge_count</th>\n",
+       "      <th>drive_count</th>\n",
+       "      <th>stand_count</th>\n",
+       "    </tr>\n",
+       "  </thead>\n",
+       "  <tbody>\n",
+       "    <tr>\n",
+       "      <th>0</th>\n",
+       "      <td>骑享</td>\n",
+       "      <td>2021-11-07 22:00:00</td>\n",
+       "      <td>19</td>\n",
+       "      <td>18</td>\n",
+       "      <td>stand_count</td>\n",
+       "    </tr>\n",
+       "  </tbody>\n",
+       "</table>\n",
+       "</div>"
+      ],
+      "text/plain": [
+       "  factory                 time  charge_count  drive_count  stand_count\n",
+       "0      骑享  2021-11-07 22:00:00            19           18  stand_count"
+      ]
+     },
+     "execution_count": 8,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "df_result"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "interpreter": {
+   "hash": "b3ba2566441a7c06988d0923437866b63cedc61552a5af99d1f4fb67d367b25f"
+  },
+  "kernelspec": {
+   "display_name": "Python 3.8.8 64-bit ('base': conda)",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.8.8"
+  },
+  "orig_nbformat": 4
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}

+ 4 - 0
LIB/FRONTEND/SaftyCenter/status_sta/run.bat

@@ -0,0 +1,4 @@
+cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\SaftyCenter\status_sta
+title cal_saftyCenter_status_sta
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\SaftyCenter\status_sta\deploy.py
+pause

+ 127 - 0
LIB/FRONTEND/odo/old/CalDist.py

@@ -0,0 +1,127 @@
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+
+from LIB.BACKEND import DBManager
+
+import DBManager
+#####################################配置环境分割线#################################################
+
+def GetDistInfo(input_sn,input_starttime,input_endtime):
+
+    #####################################配置参数分割线#################################################
+    dbManager = DBManager.DBManager()
+    data_raw = dbManager.get_data(sn=input_sn, start_time=input_starttime, 
+        end_time=input_endtime)
+    #拆包预处理
+    df_bms_raw=data_raw['bms']
+    df_gps_raw=data_raw['gps']
+    df_bms=preprocess_Df_Bms(df_bms_raw)
+    df_gps=preprocess_Df_Gps(df_gps_raw)
+    
+    #####################################数据预处理分割线#################################################
+
+    # mode: 0:正常取数; 1:7255 取数
+    if input_sn[0:2] == 'UD' or input_sn[0:2] == 'MG':
+        mode = 1
+    else:
+        mode = 0
+    #获取状态表,mode默认为0,mode=1放电时电流为负,mode=0充电时电流为正
+
+    df_bms_drive_timetable=get_bms_drive_timetable(df_bms,mode)
+    df_gps_drive_cycle_accum=pd.DataFrame()
+    if len(df_bms_drive_timetable)>0:
+        for index in range(len(df_bms_drive_timetable)):
+            #筛选drivecycle数据
+            drive_start_time=df_bms_drive_timetable.loc[index,'drive_start_time']#开始时间
+            drive_end_time=df_bms_drive_timetable.loc[index,'drive_end_time']#结束时间
+
+            time_condition=(df_gps['time']>drive_start_time)&(df_gps['time']<drive_end_time)#时间判断条件
+            df_gps_drive_cycle=df_gps.loc[time_condition,:].copy()
+            df_gps_drive_cycle=df_gps_drive_cycle.reset_index(drop=True)#重置index
+            #计算drivecycle GPS累计里程,存入表格
+            condition_a=df_gps_drive_cycle['deltatime']>60*3
+            condition_b=(df_gps_drive_cycle['deltatime']>90*1)&(df_gps_drive_cycle['distance']>1000)
+            drive_cycle_dist_array=df_gps_drive_cycle.loc[~(condition_a|condition_b),'distance'].values
+            drive_cycle_dist_array=drive_cycle_dist_array[np.where((drive_cycle_dist_array>=1)&(drive_cycle_dist_array<1000))[0]]
+            gps_dist=drive_cycle_dist_array.sum()
+            df_bms_drive_timetable.loc[index,'gps_dist']=gps_dist#得到GPS路径
+            #计算头-尾的空缺时间段对应的预估SOC
+            if len(df_gps_drive_cycle)>2:
+                gps_starttime=df_gps_drive_cycle.loc[1,'time']#gps开始时间
+                gps_endtime=df_gps_drive_cycle.loc[df_gps_drive_cycle.index[-1],'time']#gps结束时间
+                #从drive_start_time到gps开始时间,使用SOC计算的里程
+                #gps结束时间到drive_end_time,使用SOC计算的里程
+                unrecorded_odo_head=cal_deltasoc(df_bms,drive_start_time,gps_starttime)
+                unrecorded_odo_tail=cal_deltasoc(df_bms,gps_endtime,drive_end_time)
+            else:
+                #计算数据丢失行unrecordeodo
+                unrecorded_odo_head=cal_deltasoc(df_bms,drive_start_time,drive_end_time)
+                unrecorded_odo_tail=0
+            #计算中间的预估SOC
+            predict_dist=cal_unrecorded_gps(df_gps_drive_cycle,df_bms)
+            #计算总的预估SOC
+            totaldist=predict_dist+unrecorded_odo_head+ unrecorded_odo_tail#得到GPS路径
+            df_bms_drive_timetable.loc[index,'predict_dist']=totaldist
+    else :
+        pass
+
+    #####################################统计行驶里程End#################################################
+    #打印输出结果#
+    index_list=list(range(len(df_bms_drive_timetable)))
+
+    dist_gps=0
+    dist_predict=0
+    day_start_time=''#当日开始时间
+    day_end_time=''#当日结束时间
+    day_start_soc=0#当日开始soc
+    day_end_soc=0#当日结束soc
+    day_min_soc=101#当日最低soc
+    drive_accum_soc=0#累计使用SOC
+
+    if len(df_bms_drive_timetable)>0:
+        #开始行
+        day_start_soc=df_bms_drive_timetable.loc[1,'drive_start_soc']#开始soc
+        day_start_time=df_bms_drive_timetable.loc[1,'drive_start_time']#开始时间
+        #结束行
+        day_end_time=df_bms_drive_timetable.loc[len(df_bms_drive_timetable)-1,'drive_end_time']#结束时间
+        day_end_soc=df_bms_drive_timetable.loc[len(df_bms_drive_timetable)-1,'drive_end_soc']#结束soc
+
+    for index in index_list:
+        '''汇总里程'''
+        dist_gps+=df_bms_drive_timetable.loc[index,'gps_dist']/1000#计算GPS里程
+        dist_predict+=df_bms_drive_timetable.loc[index,'predict_dist']#计算预估里程
+        drive_start_soc=df_bms_drive_timetable.loc[index,'drive_start_soc']#驾驶周期开始的soc
+        drive_end_soc=df_bms_drive_timetable.loc[index,'drive_end_soc']#驾驶周期结束的soc
+        day_min_soc=min(day_min_soc,drive_start_soc,drive_end_soc)
+
+        delta_soc=drive_start_soc-drive_end_soc#驾驶周期SOC变化量
+        drive_accum_soc+=abs(delta_soc)#所有drive cycle累计消耗的soc
+
+    # gps_score=get_df_gps_score(input_starttime,input_endtime,df_gps)
+    # gps_score=round(gps_score,1)
+    #计算总里程
+    dist_gps=round(dist_gps,3)
+    dist_predict=round(dist_predict,3)
+    dist_all=round(dist_gps+dist_predict,3)
+    #输出统计结果
+    # print ('为您查询到,从'+input_starttime+'到'+input_endtime+'时间段内:')
+    # print('SOC变化量:'+str(df_bms['bmspacksoc'].max()-df_bms['bmspacksoc'].min())+' %')
+    # print('行驶总里程:'+str(dist_all)+' km')
+
+    return {'SN':input_sn,'range':dist_all,'accum_soc':drive_accum_soc,'day_start_soc':day_start_soc,
+    'day_end_soc':day_end_soc,'day_start_time':day_start_time,'day_end_time':day_end_time,
+    'day_min_soc':day_min_soc}
+    # print('其中GPS信号在线时里程:'+str(dist_gps)+' km')
+    # print('其中GPS信号掉线时预估里程:'+str(dist_predict)+' km')
+    # print('GPS信号质量评分为:'+str(gps_score),'分\n')
+
+    #####################################打印结果End#################################################
+
+

+ 68 - 0
LIB/FRONTEND/odo/old/CalDist_Batch.py

@@ -0,0 +1,68 @@
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+from CalDist import *
+from LIB.BACKEND import DBManager
+import pdb
+
+asset_table_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\asset_table.xlsx'
+drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\drive_info.xlsx'
+asset_sheet_num=1
+usecols_list=[4,5]
+
+asset_table=pd.read_excel(asset_table_path,sheet_name=asset_sheet_num,skiprows=1,usecols=usecols_list)
+SN_list=asset_table['SN号'].values.tolist()
+print('从6060sheet读取到:'+str(len(SN_list))+'行')
+asset_table=asset_table.rename(columns={'SN号':'SN','状态':'state'})
+
+asset_table.set_index(["SN"],inplace=True)
+col_name=asset_table.columns.tolist()
+col_name.extend(['range','accum_soc','day_start_soc','day_end_soc','day_start_time','day_end_time'])
+asset_table=asset_table.reindex(columns=col_name)
+
+start_hour='00:00:00'#每日查询最早时间
+end_hour='23:59:00'#每日查询最晚时间
+
+
+date_index=pd.date_range('2021-07-31','2021-07-31')
+for date in date_index:
+    '''遍历日期'''
+
+    str_date=str(date)[:10]
+    input_starttime=str_date+' '+start_hour
+    input_endtime=str_date+' '+end_hour
+    test_day=str_date[5:10]#月-日,用于建立sheet
+    drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\6060\\drive_info'+test_day+'_50_end_'+'.xlsx'
+
+    print(input_starttime)
+
+    drive_info_aday=pd.DataFrame()
+    SN_list_short=SN_list#先选择了0:50,50:end
+
+    for SN in SN_list_short:
+        '''遍历SN号'''
+        SN=SN.strip('\t')
+        SN=SN.strip('\n')
+
+        try:
+            range=GetDistInfo(SN,input_starttime,input_endtime)
+            range_df=pd.DataFrame([range])
+            drive_info_aday=pd.concat([drive_info_aday,range_df],axis=0)
+
+        except:
+            print(SN+' '+test_day+'fail')
+        else:
+            pass
+            #print(SN+' '+test_day+'success')
+
+    drive_info_aday.to_excel(drive_info_path,sheet_name=test_day)#sheet名称为testday
+    
+    
+
+

+ 77 - 0
LIB/FRONTEND/odo/old/GpsRank.py

@@ -0,0 +1,77 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+def cal_gps_score(df):
+    '''在获取信号,优、良、合格、掉线的比例之后,计算gps的总评分'''
+    score=0
+    for index in range(len(df)):
+        time_percent=df.loc[index,'累计时间占比']
+        if df.loc[index,'GPS质量']=='优':
+            score+=time_percent*0
+        elif df.loc[index,'GPS质量']=='良':
+            score+=time_percent*0.3
+        elif df.loc[index,'GPS质量']=='合格':
+            score+=time_percent*0.5
+        elif df.loc[index,'GPS质量']=='掉线':
+            score+=time_percent*1
+    return (1-score)*100
+
+def gps_rank(df_gps_signal_table,df_gps,signal_rank,dist_factor):
+    '''gps信号质量分析函数,需要输入表格,df_gps,信号等级,权重'''
+    gps_signal_condition=(df_gps['gps_signal']==signal_rank)
+    dist=df_gps.loc[gps_signal_condition,'distance'].values.sum()
+    deltatime=df_gps.loc[gps_signal_condition,'deltatime'].values.sum()
+    df_gps_signal_table_condition=(df_gps_signal_table['gps_signal']==signal_rank)
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_distance']=dist/1000
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_deltatime']=deltatime
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_distance_factor']=dist/1000*dist_factor
+    return df_gps_signal_table
+
+def get_df_gps_score(starttime,endtime,df_gps):
+    '''对df_gps中的gps质量进行评分,返回一个数值'''
+    test_start_time=starttime#'2021-05-29 17:16:39'
+    test_end_time=endtime#'2021-05-29 20:08:08'
+
+    test_time_condition=(df_gps['time']>test_start_time)&(df_gps['time']<test_end_time)
+    df_gps_test=df_gps.loc[test_time_condition,:].copy()
+    df_gps_test=df_gps_test.reset_index(drop=True)#重置index
+    #按照deltatime打标签
+    gps_deltatime_bins=[0,30,60,120,10000]#优-良-合格-掉线
+    name=['优','良','合格','掉线']
+    df_gps_test['gps_signal']=pd.cut(df_gps_test['deltatime'], gps_deltatime_bins,labels=name)
+    df_gps_test['gps_signal'].value_counts()
+    #声明一个gps信号按类别统计table
+    df_gps_signal_table=pd.DataFrame()
+    df_gps_signal_table['gps_signal']=df_gps_test['gps_signal'].value_counts().index.tolist()
+    df_gps_signal_table['num']=df_gps_test['gps_signal'].value_counts().values.tolist()
+
+    #分类进行统计
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'优',1.00)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'良',1.05)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'合格',1.2)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'掉线',1)
+
+    #次数占比,时间占比
+    all_num=df_gps_signal_table['num'].sum()
+    df_gps_signal_table['num_percent']=df_gps_signal_table['num']/all_num
+    all_accum_deltatime=df_gps_signal_table['accum_deltatime'].sum()
+    df_gps_signal_table['accum_deltatime_percent']=df_gps_signal_table['accum_deltatime']/all_accum_deltatime
+
+    #选择参数
+    df_gps_signal_table=df_gps_signal_table[['gps_signal','num','num_percent','accum_distance',
+                                            'accum_distance_factor','accum_deltatime','accum_deltatime_percent']]
+    df_gps_signal_table=df_gps_signal_table.rename(columns={'gps_signal':'GPS质量','num':'数量','num_percent':'数量占比',
+                                                        'accum_distance':'累计里程','accum_distance_factor':'累计里程修正值',
+                                                        'accum_deltatime':'累计时间','accum_deltatime_percent':'累计时间占比'})
+
+    df_gps_signal_table.loc[:,['GPS质量','累计时间','累计时间占比']]
+    gps_score=cal_gps_score(df_gps_signal_table)#调用函数计算gps评分
+    
+    #输出结果,评分
+    #print('From '+test_start_time+'  to '+test_end_time)
+    #print('GPS信号质量评分:'+str(gps_score))
+
+    return gps_score
+

+ 159 - 0
LIB/FRONTEND/odo/old/ProcessDfBms.py

@@ -0,0 +1,159 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+def get_bms_drive_timetable(df_bms,battery_mode):
+    '''对df_bms进行处理,得到行车的时间表。'''
+
+    #####################step1 先使用电流做充电状态的判断#############################################
+    if battery_mode==0:#mode=0,电流为正代表放电
+        condition_chrg=df_bms['bmspackcrnt']<0##根据电流,挑选充电状态
+        df_bms.loc[condition_chrg,'bscsta']='chrg'
+        condition_drive=df_bms['bmspackcrnt']>0.01##根据电流,挑选行驶状态
+        df_bms.loc[condition_drive,'bscsta']='drive'
+        df_bms.loc[~(condition_drive|condition_chrg),'bscsta']='idle'#静置状态
+    else :#mode=1,电流为负代表放电
+        condition_chrg=df_bms['bmspackcrnt']>0##根据电流,挑选充电状态
+        df_bms.loc[condition_chrg,'bscsta']='chrg'
+        condition_drive=df_bms['bmspackcrnt']<-0.01##根据电流,挑选行驶状态
+        df_bms.loc[condition_drive,'bscsta']='drive'
+        df_bms.loc[~(condition_drive|condition_chrg),'bscsta']='idle'#静置状态
+
+    #####################step2 对drive进行debounce,进入时立即进入,退出时debounce,5分钟。##########
+    index=0
+    debounce_row=10#debounce判断持续10行
+    debounce_time=300#debounce判断持续300秒
+    #对上一步初步状态进行二次处理
+    while index<(len(df_bms)-debounce_row):
+        mode_0=df_bms.loc[index,'bscsta']
+        mode_1=df_bms.loc[index+1,'bscsta']
+        #如果发现了边界行,则进行debounce判断
+        if (mode_0=='drive')&(mode_1!='drive'):#如果从drive变为idle
+            accum_subtime=0#累计时间初始化
+
+            for sub_index in range(debounce_row):#往下处理10行
+                sub_time=df_bms.loc[index+sub_index,'deltatime']
+                accum_subtime+=sub_time
+                #如果累计时间不到300秒,则设置为drive
+                if accum_subtime<debounce_time:
+                    df_bms.loc[index+sub_index,'bscsta']='drive'
+            index=index+debounce_row#处理10行以后的数据
+        #如果从idle变为drivemode,则将idle变为drive,包容前一行
+        elif (mode_0!='drive')&(mode_1=='drive'): 
+            df_bms.loc[index,'bscsta']='drive'
+            index=index+1
+        else:
+            index=index+1
+
+
+    #######################step3 对drivemode的时间进行分段###########################################
+    not_drive_flg=0#初始化
+    #输出drivemode的时间段,包含开始时间、结束时间
+    df_bms_drive_timetable_index=0
+    df_bms_drive_timetable=pd.DataFrame([],columns={'drive_start_time','drive_end_time',
+                                                    'gps_dist','predict_dist','drive_start_soc','drive_end_soc'})
+    for index in range(len(df_bms)):
+        temp_bscsta=df_bms.loc[index,'bscsta']
+        
+        if (temp_bscsta=='drive')&(not_drive_flg==0):
+            drive_start_time=df_bms.loc[index,'time']
+            not_drive_flg=1
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_start_time']=drive_start_time
+            #startsoc
+            drive_start_soc=df_bms.loc[index,'bmspacksoc']
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_start_soc']=drive_start_soc
+
+        elif (temp_bscsta!='drive')&(not_drive_flg==1):
+            drive_end_time=df_bms.loc[index,'time']
+            not_drive_flg=0
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_end_time']=drive_end_time
+            #endsoc
+            drive_end_soc=df_bms.loc[index,'bmspacksoc']
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_end_soc']=drive_end_soc
+            df_bms_drive_timetable_index+=1#index++
+
+    #删除时间信息不齐全的行
+    df_bms_drive_timetable=df_bms_drive_timetable.dropna(subset=['drive_end_time','drive_start_time'])
+    
+    return df_bms_drive_timetable
+
+
+def read_df_bms(path):
+    '''从路径中读取df_bms,进行预处理'''
+    df_bms=pd.read_csv(path, encoding='gbk')#编码方式gbk
+    #筛选表头,重命名
+    bms_columns=['时间戳','总电流[A]','总电压[V]','SOC[%]']
+    df_bms=df_bms.loc[:,bms_columns].copy()
+    df_bms.rename(columns = {"时间戳": "time", "总电流[A]": "bmspackcrnt", 
+                             "总电压[V]": "bmspackvol", "SOC[%]": "bmspacksoc"},inplace=True)#表头替换
+    #时间格式调整
+    df_bms['time']=df_bms['time'].apply(lambda x:datetime.strptime(x,'%Y-%m-%d %H:%M:%S'))#时间格式调整
+    #进行预处理
+    df_bms=df_add_deltatime(df_bms)#增加deltatime列 
+    return df_bms
+
+def preprocess_Df_Bms(df_bms):
+    '''对获得的df_bms,进行预处理'''
+    #筛选表头,重命名
+    bms_columns=['时间戳','总电流[A]','总电压[V]','SOC[%]']
+    df_bms=df_bms.loc[:,bms_columns].copy()
+    df_bms.rename(columns = {"时间戳": "time", "总电流[A]": "bmspackcrnt", 
+                             "总电压[V]": "bmspackvol", "SOC[%]": "bmspacksoc"},inplace=True)#表头替换
+    #删除空行
+    df_bms=df_bms.dropna(subset=['time'])
+    #删除时间重复的行,保留第一次出现的行
+    df_bms=df_bms.drop_duplicates(subset=['time'],keep='first')
+    #时间格式调整
+    df_bms['time']=df_bms['time'].apply(lambda x:datetime.strptime(x,'%Y-%m-%d %H:%M:%S'))#时间格式调整
+    #进行预处理
+    df_bms=df_add_deltatime(df_bms)#增加deltatime列 
+    return df_bms
+
+
+def df_add_deltatime(df_in):
+    '''Add a columns:deltatime,input df must have time column.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'deltatime']=0
+        #从第二行开始,计算i行到i-1行,GPS距离之差
+        else:
+            time1=df_in.loc[i-1,'time']
+            time2=df_in.loc[i,'time']
+            deltatime=time_interval(time1,time2)#计算时间差,返回单位为秒。
+            df_in.loc[i,'deltatime']=deltatime
+    return df_in
+
+
+def time_interval(time1,time2):
+    """
+    Calculate the time interval between two times,
+    return the seconds
+    """
+    deltatime=time2-time1
+    return deltatime.seconds
+
+
+def cal_deltasoc(df_bms,start_time,end_time):
+    '''输入开始时间和结束时间,返回deltasoc,此处将deltasoc*1既等效为unrecorded_odo.'''
+    time_condition=(df_bms['time']>start_time)&(df_bms['time']<end_time)
+    df_bms_sub=df_bms.loc[time_condition,:].copy()
+    if len(df_bms_sub)>=2:
+        
+        df_bms_head=df_bms_sub.head(1).copy()#首行
+        df_bms_startsoc=df_bms_head['bmspacksoc'].values[0]
+        df_bms_tail=df_bms_sub.tail(1).copy()#尾行
+        df_bms_endsoc=df_bms_tail['bmspacksoc'].values[0]
+        delta_soc=df_bms_startsoc-df_bms_endsoc
+        
+        if delta_soc>0:
+            #如果df_bms出现时间不连续,则先计算deltasoc,deltasoc每变化1,续驶里程增加1,
+            unrecorded_odo=delta_soc*1
+            #print('From '+str(start_time)+' to  '+str(end_time)+' soc decrease:  '+str(delta_soc))
+        else:
+            unrecorded_odo=0#如果deltasoc不大于0,说明在充电,或者静置不动    
+    #如果行数少于2,无法计算
+    else:
+        unrecorded_odo=0
+    return unrecorded_odo

+ 139 - 0
LIB/FRONTEND/odo/old/ProcessDfGps.py

@@ -0,0 +1,139 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+from ProcessDfBms import *
+from math import radians, cos, sin, asin, sqrt
+
+def cal_unrecorded_gps(df_in,df_bms):
+    '''筛选出现gps时间断点的数据,用df_bms数据补齐,df_in为df_gps表格。'''
+    #未记录到的odo总和
+    accum_unrecorded_odo=0
+
+    #设置丢失的判断条件,获得信息丢失行的index
+    condition1=df_in['deltatime']>60*3#时间间隔大于3分钟。说明数据掉线了。
+    condition2=(df_in['deltatime']>90*1)&(df_in['distance']>1000)#时间间隔大于*分钟,且Distance间隔大于*,代表掉线了。
+    signal_start_list=df_in.loc[condition1|condition2,:].index.to_list()#信息丢失行
+    #如果第0行属于信息丢失行,则删除,因为需要index-1行
+    try:
+        signal_start_list.remove(0)
+    except:
+        pass
+    else:
+        pass
+    #筛选出所有GPS信号丢失,对应的开始时间-结束时间对。
+    if len(signal_start_list)>0:
+        signal_end_list=[num-1 for num in signal_start_list]#信息丢失行的前一行,此处可能如果是首行,可能会有bug。
+        pick_gps_list=[0]+signal_start_list+signal_end_list+[len(df_in)-1]#首行+尾行+信号开始行+信号结束行
+        pick_gps_list=sorted(pick_gps_list)#重新排序
+
+    #有出现信号断点的行,则进行以下计算。
+    if len(signal_start_list)>0:
+        #针对每个时间对,计算unrecorded odo
+        for start_time_index,end_time_index in zip(signal_start_list,signal_end_list):
+            last_end_time=df_in.loc[end_time_index,'time']
+            this_start_time=df_in.loc[start_time_index,'time']
+            #print('gps signal loss from: '+str(last_end_time)+'-to-'+str(this_start_time))
+            #使用cal_delatasoc计算预估里程
+            unrecorded_odo=cal_deltasoc(df_bms,last_end_time,this_start_time)
+            accum_unrecorded_odo+=unrecorded_odo
+        #print('accum_unrecorded_odo:'+str(accum_unrecorded_odo))
+    else:
+        pass
+    
+    return accum_unrecorded_odo
+
+
+def df_add_avgspeed(df_in):
+    '''Add a columns:avgspeed ,input df must have deltatime,distance column.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'avgspeed']=0
+        #从第二行开始,计算平均速度
+        else:
+            deltatime=df_in.loc[i,'deltatime']
+            distance=df_in.loc[i,'distance']
+            avgspeed=(distance/1000)/(deltatime/3600)
+            df_in.loc[i,'avgspeed']=avgspeed
+    return df_in
+
+
+def read_df_gps(path):
+    df_gps=pd.read_csv(path, encoding='gbk')#编码方式gbk
+    #重置表头
+    df_gps.rename(columns = {"时间戳": "time", "纬度":"lat", "经度":"lng", 
+                             "卫星数":"sat_num", "海拔m":"height","速度[km/h]":"speed"},  inplace=True)
+    #时间格式调整
+    df_gps['time']=pd.to_datetime(df_gps['time'])
+    #对gps进行清洗
+    df_gps=df_add_distance(df_gps)#增加distance列
+    condition=df_gps['distance']<20000#删除GPS漂移过远的点,可能为GPS错误值
+    df_gps=df_gps.loc[condition,:].copy()#删除condition中,avgspd过大的部分,很可能伴随着GPS的漂移。
+    df_gps=df_gps.reset_index(drop=True)#重置index
+    #进行预处理
+    df_gps=df_add_distance(df_gps)#增加distance列,再算一次distance
+    df_gps=df_add_deltatime(df_gps)#增加deltatime列
+    df_gps=df_add_avgspeed(df_gps)#增加avgspeed列
+
+    #df_gps.to_excel('df_gps.xlsx',sheet_name='Sheet1')
+    return df_gps
+
+def preprocess_Df_Gps(df_gps):
+    '''对Df_Gps进行预处理'''
+    #重置表头
+    df_gps.rename(columns = {"时间戳": "time", "纬度":"lat", "经度":"lng", 
+                             "卫星数":"sat_num", "海拔m":"height","速度[km/h]":"speed"},  inplace=True)
+    #删除含有空数据的行
+    df_gps=df_gps.dropna(subset=['time','lat','lng'])
+    #删除时间重复的行,保留第一次出现的行
+    df_gps=df_gps.drop_duplicates(subset=['time'],keep='first')
+    #时间格式调整
+    df_gps['time']=pd.to_datetime(df_gps['time'])
+    
+    #对gps进行清洗
+    df_gps=df_add_distance(df_gps)#增加distance列
+    condition=df_gps['distance']<20000#删除GPS漂移过远的点,可能为GPS错误值
+    df_gps=df_gps.loc[condition,:].copy()#删除condition中,avgspd过大的部分,很可能伴随着GPS的漂移。
+    df_gps=df_gps.reset_index(drop=True)#重置index
+    #进行预处理
+    df_gps=df_add_distance(df_gps)#增加distance列,再算一次distance
+    df_gps=df_add_deltatime(df_gps)#增加deltatime列
+    df_gps=df_gps.loc[df_gps['deltatime']>0.01,:].copy()#删除deltatime=0的列,两个时间戳相同,无法求速度。
+    df_gps=df_add_avgspeed(df_gps)#增加avgspeed列
+
+    #df_gps.to_excel('df_gps.xlsx',sheet_name='Sheet1')
+    return df_gps
+
+
+def df_add_distance(df_in):
+    '''Add a columns:distance,input df must have lng,lat columns.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'distance']=0
+        #从第二行开始,计算i行到i-1行,GPS距离之差
+        else:
+            lon1=df_in.loc[i-1,'lng']
+            lat1=df_in.loc[i-1,'lat']
+            lon2=df_in.loc[i,'lng']
+            lat2=df_in.loc[i,'lat']
+            distance=haversine(lon1,lat1,lon2,lat2)#haversine公式计算距离差
+            df_in.loc[i,'distance']=distance    
+    return df_in
+
+
+def haversine(lon1, lat1, lon2, lat2):
+    """
+    Calculate the great circle distance between two points 
+    on the earth (specified in decimal degrees)
+    """
+    # 将十进制度数转化为弧度
+    lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
+    # haversine公式
+    dlon = lon2 - lon1 
+    dlat = lat2 - lat1 
+    a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
+    c = 2 * asin(sqrt(a)) 
+    r = 6371 # 地球平均半径,单位为公里
+    return c * r * 1000

+ 293 - 0
LIB/FRONTEND/odo/old/UpdtFct.py

@@ -0,0 +1,293 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+
+#建立引擎
+engine = create_engine(str(r"mysql+mysqldb://%s:" + '%s' + "@%s/%s") % ('root', 'pengmin', 'localhost', 'qixiangdb'))
+
+conn_qx = pymysql.connect(
+        host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+        user='qx_cas',
+        password='Qx@123456',#Qx@123456
+        database='qx_cas',
+        charset='utf8'
+    )
+
+conn_local = pymysql.connect(
+        host='localhost',
+        user='root',
+        password='pengmin',
+        database='qixiangdb',
+        charset='utf8'
+    )
+
+def getNextSoc(start_soc):
+    '''输入当前的soc,寻找目标soc函数'''
+    if start_soc>80:
+        next_soc=80
+    elif start_soc>60:
+        next_soc=60
+    elif start_soc>40:
+        next_soc=40
+    elif start_soc>20:
+        next_soc=20
+    else:
+        next_soc=1
+    return next_soc
+
+def updtSnFct(sn_factor_df,end_soc,delta_range,range_soc):
+    '''输入当前的soc区间段,里程变量量,soc变化量,输出新的df
+    sn_factor_df为dataframe,delta_range单位为km,range_soc单位为km/persoc'''
+    if end_soc==80:
+        updtFctByCol(sn_factor_df,'a0',delta_range,range_soc)
+    elif end_soc==60:
+        updtFctByCol(sn_factor_df,'a1',delta_range,range_soc)
+    elif end_soc==40:
+        updtFctByCol(sn_factor_df,'a2',delta_range,range_soc)
+    elif end_soc==20:
+        updtFctByCol(sn_factor_df,'a3',delta_range,range_soc)
+    elif end_soc<20:
+        updtFctByCol(sn_factor_df,'a4',delta_range,range_soc)
+    return sn_factor_df
+
+def updtFctByCol(sn_factor_df,colmun_name,delta_range,range_soc):
+    '''更新制定列的factor,sn_factor_df为dataframe,新的系数更新到第一行。delta_range单位为km,
+    range_soc单位为km/persoc,默认按照100km更新续驶里程权重'''
+    range_soc_old=sn_factor_df.loc[0,colmun_name]#读取第0行的老factor
+    debounce_range=200#更新权重
+    new_factor=range_soc*((delta_range)/debounce_range)+range_soc_old*(1-(delta_range)/debounce_range)
+    #在第1行,存储新的factor
+    sn_factor_df.loc[1,colmun_name]=new_factor
+    return sn_factor_df
+
+def updtTodayFct(factor_input,sn_day_df):
+    '''更新今日的Factor***'''
+    sn_factor_df_last=factor_input
+    start_soc=sn_day_df.loc[0,'soc']
+    next_soc=getNextSoc(start_soc)
+    start_range=sn_day_df.loc[0,'vehodo']
+    sn=sn_day_df.loc[0,'name']
+
+    for index in range(len(sn_day_df)-1):
+    #寻找分割点,
+        index_soc=sn_day_df.loc[index,'soc']#当前行soc
+        next_index_soc=sn_day_df.loc[index+1,'soc']#下一行soc
+
+        if (index_soc>=next_soc)&(next_index_soc<next_soc):#当前行高,下一行低
+            delta_soc_tonext=start_soc-next_soc#两个距离点的soc差,单位为%
+            delta_range_tonext=sn_day_df.loc[index,'vehodo']-start_range#两个时间点的距离差,单位为m
+            delta_range_tonext_km=delta_range_tonext/1000#两个时间点的距离差,单位为km
+            range_soc_tonext=(delta_range_tonext/1000)/delta_soc_tonext#单位soc可行驶的公里数
+            print(sn+'start_soc: '+str(start_soc),'next_soc: '+str(next_soc),'delta_vehodo; '+str(round(delta_range_tonext_km,3))
+            +'km'+' range_soc:'+str(round(range_soc_tonext,3)))
+
+            if (delta_range_tonext_km)>1:
+                sn_factor_df_last=updtSnFct(sn_factor_df_last,next_soc,delta_range_tonext_km,range_soc_tonext)
+            
+            start_soc=next_index_soc#变更开始soc
+            next_soc=getNextSoc(start_soc)#变更结束soc
+            start_range=sn_day_df.loc[index+1,'vehodo']#变更开始里程    
+
+    return sn_factor_df_last
+
+def snDayDfPreProcess(sn_day_df):
+    '''预处理,判断是否在dirvemode,获取drivemode条件下的累计行驶距离。
+    增加delta_soc列,drive_flg列,vehodo列'''
+    sn_day_df=sn_day_df.reset_index(drop=True)#重置index
+    #增加列,计算delta_soc
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'delta_soc']=0
+        else:
+            sn_day_df.loc[index,'delta_soc']=sn_day_df.loc[index,'soc']-sn_day_df.loc[index-1,'soc']
+    #增加列,判断是否在drive状态
+    drive_flg=False
+    accum_distance=0
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'drive_status']=drive_flg
+            sn_day_df.loc[index,'vehodo']=0
+        else:
+            if (sn_day_df.loc[index,'delta_soc']<-0.1)|\
+                ((sn_day_df.loc[index,'delta_soc']<=0)&(sn_day_df.loc[index,'distance']>500)):#soc处于下降状态,说明在drive
+                drive_flg=True#置true
+            elif sn_day_df.loc[index,'delta_soc']>0.1:#soc处于上升状态,说明不在drive
+                drive_flg=False#置false
+                accum_distance=0#清零
+            sn_day_df.loc[index,'drive_flg']=drive_flg
+            accum_distance+=sn_day_df.loc[index,'distance']#对行驶里程进行累加
+            sn_day_df.loc[index,'vehodo']=accum_distance
+    #筛选所有的drive信息行
+    sn_day_drive_df=sn_day_df.loc[sn_day_df['drive_flg']==True,:]
+    sn_day_drive_df=sn_day_drive_df.reset_index(drop=True)#重置index
+    
+    return sn_day_drive_df 
+
+def updtAllSnFct(start_date,end_date):
+    '''计算开始时间到结束时间的,所有sn的factor'''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        updtAllSnTodayFct(start_date,end_date)#调用函数
+        print('update all sn factor from '+start_date+" to "+end_date)
+        start_date=end_date
+        i+=1#自加
+
+def updtAllSnTodayFct(start_date,end_date):
+    ''''更新今天所有sn的factorx信息,start_date和end_date相隔一天。此处还可优化'''
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+" and distance!=0;"
+    range_soc_df = pd.read_sql(sql_cmd, conn_qx)#使用read_sql方法查询qx数据库
+
+    #筛选出所有当日数据之后,筛选当日有更新的sn
+    today_sn_list=range_soc_df['name'].unique().tolist()#[:100]#先一次更新5个
+    #建立空的dataframe,用于承接所有更新的factor信息
+    today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+    for sn in today_sn_list:
+        #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+        sn_str="'"+sn+"'"
+        sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<"+start_date_str+" and sn="+sn_str
+        #此处可以限定每次查询的数量,例如不高于5行
+        factor_df=pd.read_sql(sql_cmd2, conn_local)#使用read_sql方法查询local数据库
+
+        #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+        factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+
+        if len(factor_df)==0:
+            #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+            start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+            yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+            #为sn申请一个新的factor,初始值为1
+            factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+        sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+        sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+
+        sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+        sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+        sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+        sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+        #筛选对应车辆的信息
+        condition_sn=(range_soc_df['name']==sn)
+        sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+        sn_day_df=sn_day_df.reset_index(drop=True)
+        #使用updtTodayFct函数更新今天的factor
+        if len(sn_day_df)>=2:
+            #使用process函数,进行预处理
+            sn_day_df=snDayDfPreProcess(sn_day_df)#预处理函数
+            if len(sn_day_df)>=2:
+                sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#
+                today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+    
+    #将today_sn_fct_df写入到数据库中,今天所有factor更新的系数,一次写入。
+    if len(today_sn_fct_df)>=1:
+        today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)
+
+def updtOneSnFct(sn,start_date,end_date):
+    '''计算开始时间到结束时间的,一个sn的所有factor'''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        updtOneSnTodayFct(sn,start_date,end_date)#调用函数
+        print('update one sn factor from '+start_date+" to "+end_date)
+        start_date=end_date
+        i+=1#自加
+
+def updtOneSnTodayFct(sn,start_date,end_date):
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sn_str="'"+sn+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+\
+    " and distance!=0 and name="+sn_str
+    range_soc_df = pd.read_sql(sql_cmd, conn_qx)#使用read_sql方法查询qx数据库
+
+    if len(range_soc_df)>0:
+        #筛选出所有当日数据之后,筛选当日有更新的sn
+        today_sn_list=range_soc_df['name'].unique().tolist()
+        #建立空的dataframe,用于承接所有更新的factor信息
+        today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+        for sn in today_sn_list:
+            #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+            sn_str="'"+sn+"'"
+            sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<"+start_date_str+" and sn="+sn_str
+            factor_df=pd.read_sql(sql_cmd2, conn_local)#使用read_sql方法查询local数据库
+
+            #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+            factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+
+            if len(factor_df)==0:
+                #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+                start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+                yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+                factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+                today_sn_fct_df=today_sn_fct_df.append(factor_df.loc[0,:])#将初始化的行记录到数据库
+
+            sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+            
+            sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+            sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+            sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+            sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+            sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+            #筛选对应车辆的信息
+            condition_sn=(range_soc_df['name']==sn)
+            sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+            sn_day_df=sn_day_df.reset_index(drop=True)
+            #使用updtTodayFct函数更新今天的factor
+            if len(sn_day_df)>=2:
+                #使用process函数,进行预处理
+                sn_day_df=snDayDfPreProcess(sn_day_df)#!!!!!!!!!!!增加
+                if len(sn_day_df)>=2:
+                    sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#
+                    today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+        
+        # #将today_sn_fct_df写入到数据库中
+        if len(today_sn_fct_df)>=1:
+            today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)
+            # print(sn+' factor will be update in table tb_sn_factor!')
+        return today_sn_fct_df
+
+
+
+
+
+# def updtASnTodayFct(start_date,end_date,today_sn_list):
+
+#     sql_cmd="select * from qixiang_test where time>='"+start_date+"' and time<='"+end_date+"'"
+#     range_soc_df = pd.read_sql(sql_cmd, conn)#使用read_sql方法查询数据库
+
+#     sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<'"+start_date+"'"
+#     factor_df=pd.read_sql(sql_cmd2, conn)#使用read_sql方法查询数据库
+
+#     #筛选出所有当日数据之后,筛选当日有更新的sn
+#     # today_sn_list=range_soc_df['sn'].unique().tolist()
+#     # today_sn_list=today_sn_list[:10]#更新若干个
+#     #建立空的dataframe,用于承接所有更新的factor信息
+#     today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+#     for sn in today_sn_list:
+#         sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+#         sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+#         sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+#         sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+#         sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+#         sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+#         #筛选对应车辆的信息
+#         condition_sn=(range_soc_df['sn']==sn)
+#         sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+#         sn_day_df=sn_day_df.reset_index(drop=True)
+#         #使用updtTodayFct函数更新今天的factor
+#         sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)
+#         today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+    
+#     #将today_sn_fct_df写入到数据库中
+#     today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)

+ 28 - 0
LIB/FRONTEND/odo/old/UpdtFct_Main.py

@@ -0,0 +1,28 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+from UpdtFct import *
+
+
+conn_qx = pymysql.connect(
+        host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+        user='qx_cas',
+        password='Qx@123456',#Qx@123456
+        database='qx_cas',
+        charset='utf8'
+    )
+
+conn_local = pymysql.connect(
+        host='localhost',
+        user='root',
+        password='pengmin',
+        database='qixiangdb',
+        charset='utf8'
+    )
+
+#指定开始时间,结束时间,更新所有sn的factor
+start_date="2021-07-18"
+end_date="2021-08-01"
+
+updtAllSnFct(start_date,end_date)

+ 0 - 0
LIB/FRONTEND/odo/old/create_table.py


+ 103 - 0
LIB/FRONTEND/odo/old/deploy.py

@@ -0,0 +1,103 @@
+#coding=utf-8
+# 计算里程
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import timedelta
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+from LIB.MIDDLE.odo.CalDist import *
+from LIB.BACKEND import DBManager, Log
+import pdb
+from urllib import parse
+import traceback
+import os
+import time, datetime
+import pymysql
+import dateutil.relativedelta
+
+if __name__ == "__main__":
+    
+    # 时间设置
+    now_time = datetime.datetime.now()
+    pre_time = now_time + dateutil.relativedelta.relativedelta(days=-1)
+    end_time=datetime.datetime.strftime(now_time,"%Y-%m-%d 00:00:00")
+    start_time=datetime.datetime.strftime(pre_time,"%Y-%m-%d 00:00:00")
+    
+    # end_time = "2021-08-13 00:00:00"
+    # start_time = "2019-01-01 00:00:00"
+    
+    # start_hour='00:00:00'#每日查询最早时间
+    # end_hour='23:59:00'#每日查询最晚时间
+    
+    # 更新sn列表
+    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qixiang_oss'
+    user='qixiang_oss'
+    password='Qixiang2021'
+    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    cursor = conn.cursor()
+    cursor.execute("select sn, imei from app_device")
+    res = cursor.fetchall()
+    df_sn = pd.DataFrame(res, columns=['sn', 'imei'])
+    df_sn = df_sn.sort_values(['sn'])
+    df_sn = df_sn.reset_index(drop=True)
+    conn.close();
+    
+    # 结果数据库配置
+    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port = 3306
+    user = 'qx_cas'
+    password = parse.quote_plus('Qx@123456')
+    database = 'qx_cas'
+
+    db_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, database
+        ))
+    DbSession = sessionmaker(bind=db_engine)
+    
+    # 日志配置
+    now_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()).replace(":","_")
+    log_path = 'log/' + now_str
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    log = Log.Mylog(log_name='odo', log_level = 'info')
+    log.set_file_hl(file_name='{}/info.log'.format(log_path), log_level='info', size=1024* 1024 * 100)
+    log.set_file_hl(file_name='{}/error.log'.format(log_path), log_level='error', size=1024* 1024 * 100)
+    logger = log.get_logger()
+
+    logger.info("pid is + {}".format(os.getpid()))
+
+    # date_index=pd.date_range(start_time[:10], end_time[:10])
+    # for date in date_index:
+    #     '''遍历日期'''
+    input_starttime = start_time
+    input_endtime = end_time
+
+    drive_info_aday=pd.DataFrame()
+
+    for SN in df_sn['sn'].tolist()[1200:]:
+        '''遍历SN号'''
+        SN=SN.strip('\t')
+        SN=SN.strip('\n')
+        try:
+            logger.info("pid-{} {}---{} START!".format(os.getpid(), SN, str(input_starttime)))
+            range=GetDistInfo(SN,input_starttime,input_endtime)
+            range_df=pd.DataFrame([range])
+            drive_info_aday=pd.concat([drive_info_aday,range_df],axis=0)
+            logger.info("{}---{} DONE!".format(SN, str(input_starttime)))
+            if not drive_info_aday.empty:
+                drive_info_aday['time'] = input_starttime[0:10]
+                drive_info_aday.columns = ['sn', 'odo', 'accum_soc', 'start_soc', 'end_soc', 'start_time','end_time', 'min_soc', 'time']
+                drive_info_aday.to_sql("odo_result",con=db_engine, if_exists="append",index=False)
+        except Exception as e:
+            logger.error(traceback.format_exc)
+            logger.error(u"{} :{},{} 任务运行错误".format(SN,input_starttime,input_endtime), exc_info=True)
+
+
+    

+ 66 - 0
LIB/FRONTEND/odo/old/main_1.py

@@ -0,0 +1,66 @@
+#coding=utf-8
+# 计算里程
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+from LIB.MIDDLE.odo.CalDist import *
+from LIB.BACKEND import DBManager
+import pdb
+
+asset_table_path='asset_table.xlsx'
+# drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\drive_info.xlsx'
+asset_sheet_num=1
+usecols_list=[4,5]
+
+asset_table=pd.read_excel(asset_table_path,sheet_name=asset_sheet_num,skiprows=1,usecols=usecols_list)
+SN_list=asset_table['SN号'].values.tolist()
+print('从6060sheet读取到:'+str(len(SN_list))+'行')
+asset_table=asset_table.rename(columns={'SN号':'SN','状态':'state'})
+
+asset_table.set_index(["SN"],inplace=True)
+col_name=asset_table.columns.tolist()
+col_name.extend(['range','accum_soc','day_start_soc','day_end_soc','day_start_time','day_end_time'])
+asset_table=asset_table.reindex(columns=col_name)
+
+start_hour='00:00:00'#每日查询最早时间
+end_hour='23:59:00'#每日查询最晚时间
+
+
+date_index=pd.date_range('2021-07-31','2021-07-31')
+for date in date_index:
+    '''遍历日期'''
+
+    str_date=str(date)[:10]
+    input_starttime=str_date+' '+start_hour
+    input_endtime=str_date+' '+end_hour
+    test_day=str_date[5:10]#月-日,用于建立sheet
+    drive_info_path='6060\\drive_info'+test_day+'_50_end_'+'.xlsx'
+
+    print(input_starttime)
+
+    drive_info_aday=pd.DataFrame()
+    SN_list_short=SN_list#先选择了0:50,50:end
+
+    for SN in SN_list_short:
+        '''遍历SN号'''
+        SN=SN.strip('\t')
+        SN=SN.strip('\n')
+
+        try:
+            range=GetDistInfo(SN,input_starttime,input_endtime)
+            range_df=pd.DataFrame([range])
+            drive_info_aday=pd.concat([drive_info_aday,range_df],axis=0)
+
+        except:
+            print(SN+' '+test_day+'fail')
+        else:
+            pass
+            #print(SN+' '+test_day+'success')
+
+    drive_info_aday.to_excel(drive_info_path,sheet_name=test_day)#sheet名称为testday

+ 2 - 0
LIB/FRONTEND/odo/old/run.bat

@@ -0,0 +1,2 @@
+cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\odo
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\odo\deploy.py