Browse Source

Merge branch 'dev' of http://git.fast-fun.cn:92/lmstack/data_analyze_platform into dev

su-lmstack 3 years ago
parent
commit
02d2b3603e
75 changed files with 5452 additions and 262 deletions
  1. 7 1
      .gitignore
  2. 11 7
      LIB/BACKEND/DBManager.py
  3. 6 5
      LIB/BACKEND/DataPreProcess.py
  4. 20 5
      LIB/BACKEND/Log.py
  5. 6 5
      LIB/BACKEND/Tools.py
  6. 60 0
      LIB/FRONTEND/DrivingRange/main.py
  7. 27 0
      LIB/FRONTEND/LeakCurrent/main.py
  8. 0 102
      LIB/FRONTEND/SignalMonitor/create_table.py
  9. 45 76
      LIB/FRONTEND/SignalMonitor/main.py
  10. 15 10
      LIB/FRONTEND/day_sta.py
  11. 157 0
      LIB/FRONTEND/deltsoc/LFPDeltSoc20210804.py
  12. 4 0
      LIB/FRONTEND/deltsoc/detaSOC表头及数据类型.xlsx
  13. 27 0
      LIB/FRONTEND/deltsoc/main.py
  14. BIN
      LIB/FRONTEND/deltsoc/骑享资产梳理-20210621.xlsx
  15. 127 0
      LIB/FRONTEND/odo/CalDist.py
  16. 68 0
      LIB/FRONTEND/odo/CalDist_Batch.py
  17. 77 0
      LIB/FRONTEND/odo/GpsRank.py
  18. 159 0
      LIB/FRONTEND/odo/ProcessDfBms.py
  19. 139 0
      LIB/FRONTEND/odo/ProcessDfGps.py
  20. 293 0
      LIB/FRONTEND/odo/UpdtFct.py
  21. 28 0
      LIB/FRONTEND/odo/UpdtFct_Main.py
  22. BIN
      LIB/FRONTEND/odo/asset_table.xlsx
  23. 66 0
      LIB/FRONTEND/odo/main_1.py
  24. 106 0
      LIB/FRONTEND/other/bat_user_relation/main.py
  25. 309 0
      LIB/FRONTEND/soh/LFPSoh 20210711.py
  26. 173 0
      LIB/FRONTEND/soh/NCMSoh 20210716.py
  27. 34 0
      LIB/FRONTEND/soh/main.py
  28. 6 0
      LIB/FRONTEND/soh/soh表头及数据类型.xlsx
  29. BIN
      LIB/FRONTEND/soh/骑享资产梳理-20210621.xlsx
  30. 86 0
      LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/BatParam.py
  31. 733 0
      LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/CBMSBatSoh.py
  32. 61 0
      LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/DBDownload.py
  33. BIN
      LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/SOH表单.xlsx
  34. 24 0
      LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/log.py
  35. 73 0
      LIB/MIDDLE/CellStateEstimation/SOH/main.py
  36. 86 0
      LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/BatParam.py
  37. 362 0
      LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/CBMSBatUniform.py
  38. BIN
      LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/Uniform表单.xlsx
  39. 24 0
      LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/log.py
  40. 73 0
      LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/main.py
  41. 71 0
      LIB/MIDDLE/CellStateEstimation/Uniform/main.py
  42. 422 0
      LIB/MIDDLE/DrivingRange/UpdtFct.py
  43. 12 0
      LIB/MIDDLE/DrivingRange/UpdtFctTable.py
  44. 11 0
      LIB/MIDDLE/DrivingRange/UpdtFctTableNewest.py
  45. 16 0
      LIB/MIDDLE/DrivingRange/UpdtVehElecRng.py
  46. BIN
      LIB/MIDDLE/DrivingRange/计算续驶里程程序介绍.docx
  47. 6 6
      LIB/MIDDLE/IndexStaByOneCycle.py
  48. 6 6
      LIB/MIDDLE/IndexStaByPeriod.py
  49. 160 0
      LIB/MIDDLE/LeakCurrent/LFPLeakCurrent20210812.py
  50. 23 0
      LIB/MIDDLE/LeakCurrent/LeakCurrent表头及数据类型.xlsx
  51. 27 0
      LIB/MIDDLE/LeakCurrent/main.py
  52. 56 31
      LIB/MIDDLE/SignalMonitor.py
  53. 125 0
      LIB/MIDDLE/odo/CalDist.py
  54. 68 0
      LIB/MIDDLE/odo/CalDist_Batch.py
  55. 77 0
      LIB/MIDDLE/odo/GpsRank.py
  56. 159 0
      LIB/MIDDLE/odo/ProcessDfBms.py
  57. 139 0
      LIB/MIDDLE/odo/ProcessDfGps.py
  58. 293 0
      LIB/MIDDLE/odo/UpdtFct.py
  59. 28 0
      LIB/MIDDLE/odo/UpdtFct_Main.py
  60. 28 0
      LIB/MIDDLE/算法类别(模板)/算法名/V_1_0_0/core_algorithm.py
  61. 28 0
      LIB/MIDDLE/算法类别(模板)/算法名/V_1_0_1/core_algorithm.py
  62. 38 0
      LIB/MIDDLE/算法类别(模板)/算法名/main.py
  63. 28 0
      LIB/MIDDLE/算法类别(模板)/算法名2/V_1_0_0/core_algorithm.py
  64. 28 0
      LIB/MIDDLE/算法类别(模板)/算法名2/V_1_0_1/core_algorithm.py
  65. 38 0
      LIB/MIDDLE/算法类别(模板)/算法名2/main.py
  66. 39 2
      demo.ipynb
  67. 28 0
      demo.py
  68. 1 1
      函数说明/DBManager.html
  69. 1 1
      函数说明/DataPreProcess.html
  70. 1 1
      函数说明/IndexStaByOneCycle.html
  71. 1 1
      函数说明/IndexStaByPeriod.html
  72. 1 1
      函数说明/Log.html
  73. 1 1
      函数说明/Tools.html
  74. BIN
      数据分析平台手册.doc
  75. BIN
      数据分析平台手册.pdf

+ 7 - 1
.gitignore

@@ -5,14 +5,20 @@
 /.vscode/
 /.vscode/
 /*/__pycache__/*.*
 /*/__pycache__/*.*
 /*/*/__pycache__/*.pyc
 /*/*/__pycache__/*.pyc
+/*/*/*/__pycache__/*.pyc
 /*.ipynb
 /*.ipynb
 /*.csv
 /*.csv
 *.doc
 *.doc
 *.ppt
 *.ppt
 *.tmp
 *.tmp
 !/demo.ipynb
 !/demo.ipynb
-/手册
 !.gitignore
 !.gitignore
 # *.*
 # *.*
 /CONFIGURE/PathSetting.py
 /CONFIGURE/PathSetting.py
 *.log
 *.log
+/demo.ipynb
+/*/*/deploy.py
+/*/*/*/deploy.py
+!数据分析平台手册.doc
+*.pyc
+

+ 11 - 7
LIB/BACKEND/DBManager.py

@@ -353,7 +353,7 @@ class DBManager():
 
 
 预留:后期若改用通过访问数据库的形式进行数据的获取,则本文件负责数据库的连接,sql指令的执行,数据获取等功能。
 预留:后期若改用通过访问数据库的形式进行数据的获取,则本文件负责数据库的连接,sql指令的执行,数据获取等功能。
 '''
 '''
-__author__ = 'Wang Liming'
+__author__ = 'lmstack'
 
 
 import time
 import time
 import datetime
 import datetime
@@ -381,7 +381,7 @@ class DBManager():
         self.connect()
         self.connect()
         return self
         return self
 
 
-    def __exit__(self):
+    def __exit__(self, exc_type, exc_val, exc_tb):
         self.close()
         self.close()
 
 
     def connect(self):
     def connect(self):
@@ -560,10 +560,13 @@ class DBManager():
             st = time.time()
             st = time.time()
             for line in DBManager._download_json_data(urls):
             for line in DBManager._download_json_data(urls):
                 et = time.time()
                 et = time.time()
-                if i==0:
-                    data_blocks,CellUNum,CellTNum,OtherTNumm = DBManager._convert_to_dataframe_bms(line, mode)
-                    i+=1
-                    continue
+                try:
+                    if i==0:
+                        data_blocks,CellUNum,CellTNum,OtherTNumm = DBManager._convert_to_dataframe_bms(line, mode)
+                        i+=1
+                        continue
+                except:
+                    i = 0
                 try:
                 try:
                     data_block,CellUNum,CellTNum,OtherTNumm = DBManager._convert_to_dataframe_bms(line, mode)
                     data_block,CellUNum,CellTNum,OtherTNumm = DBManager._convert_to_dataframe_bms(line, mode)
                 except:
                 except:
@@ -571,7 +574,8 @@ class DBManager():
                 try:
                 try:
                     data_blocks = np.concatenate((data_blocks,data_block),axis=0)
                     data_blocks = np.concatenate((data_blocks,data_block),axis=0)
                 except Exception as e: 
                 except Exception as e: 
-                    if 'all the input array dimensions except for the concatenation axis must match exactly' in str(e):
+                    if 'all the input array dimensions for the concatenation axis must match exactly' in str(e) or \
+                    'all the input array dimensions except for the concatenation axis must match exactly'  in str(e):
                         pass
                         pass
                     else:
                     else:
                         raise e
                         raise e

+ 6 - 5
LIB/BACKEND/DataPreProcess.py

@@ -492,18 +492,19 @@ class DataPreProcess:
 数据预处理类
 数据预处理类
 
 
 '''
 '''
-__author__ = 'Wang Liming'
+__author__ = 'lmstack'
 
 
 
 
-import CONFIGURE.PathSetting as PathSetting
-import sys
-sys.path.append(PathSetting.backend_path)
+# import CONFIGURE.PathSetting as PathSetting
+# import sys
+# sys.path.append(PathSetting.backend_path)
+# from LIB.BACKEND
 from os import defpath
 from os import defpath
 import pandas as pd
 import pandas as pd
 import numpy as np
 import numpy as np
 import pdb
 import pdb
 from numba import jit
 from numba import jit
-import Tools
+from LIB.BACKEND import Tools
 
 
 class DataPreProcess:
 class DataPreProcess:
     def __init__(self):
     def __init__(self):

+ 20 - 5
LIB/BACKEND/Log.py

@@ -51,10 +51,11 @@ class Mylog:
 Log类
 Log类
 
 
 '''
 '''
-__author__ = 'Wang Liming'
+__author__ = 'lmstack'
 
 
 
 
 import logging
 import logging
+from logging import handlers
 import os
 import os
 
 
 class Mylog:
 class Mylog:
@@ -68,12 +69,19 @@ class Mylog:
     def get_logger(self):
     def get_logger(self):
         return self.logger
         return self.logger
     
     
-    def set_file_hl(self, file_name='all.log', log_level='info'):
-        fh = logging.FileHandler(filename=file_name)
+    def set_file_hl(self, file_name='all.log', log_level='info', size=1):
+        fh = handlers.RotatingFileHandler(file_name, maxBytes=size, backupCount=10)
         fh_formatter = logging.Formatter('%(asctime)s:%(created)f:%(name)s:%(module)s:%(funcName)s:%(levelname)s:%(message)s')
         fh_formatter = logging.Formatter('%(asctime)s:%(created)f:%(name)s:%(module)s:%(funcName)s:%(levelname)s:%(message)s')
         fh.setFormatter(fh_formatter)
         fh.setFormatter(fh_formatter)
         if len(log_level) > 0:
         if len(log_level) > 0:
-            self._set_log_level(log_level)
+            if log_level == 'debug':
+                fh.setLevel(logging.DEBUG)
+            if log_level == 'info':
+                fh.setLevel(logging.INFO)
+            if log_level == 'warning':
+                fh.setLevel(logging.WARNING)
+            if log_level == 'error':
+                fh.setLevel(logging.ERROR)
         self.logger.addHandler(fh)
         self.logger.addHandler(fh)
 
 
     def set_stream_hl(self, log_level='info'):
     def set_stream_hl(self, log_level='info'):
@@ -81,7 +89,14 @@ class Mylog:
         sh_formatter = logging.Formatter('%(asctime)s:%(created)f:%(name)s:%(module)s:%(funcName)s:%(levelname)s:%(message)s')
         sh_formatter = logging.Formatter('%(asctime)s:%(created)f:%(name)s:%(module)s:%(funcName)s:%(levelname)s:%(message)s')
         sh.setFormatter(sh_formatter)
         sh.setFormatter(sh_formatter)
         if len(log_level) > 0:
         if len(log_level) > 0:
-            self._set_log_level(log_level)
+            if log_level == 'debug':
+                sh.setLevel(logging.DEBUG)
+            if log_level == 'info':
+                sh.setLevel(logging.INFO)
+            if log_level == 'warning':
+                sh.setLevel(logging.WARNING)
+            if log_level == 'error':
+                sh.setLevel(logging.ERROR)
         self.logger.addHandler(sh)  
         self.logger.addHandler(sh)  
 
 
     def _set_log_level(self, log_level):
     def _set_log_level(self, log_level):

+ 6 - 5
LIB/BACKEND/Tools.py

@@ -77,13 +77,14 @@ class Tools():
 工具类
 工具类
 
 
 '''
 '''
-__author__ = 'Wang Liming'
+__author__ = 'lmstack'
 
 
 
 
-import CONFIGURE.PathSetting as PathSetting
-import sys
-sys.path.append(PathSetting.backend_path)
-import DBManager
+# import CONFIGURE.PathSetting as PathSetting
+# import sys
+# sys.path.append(PathSetting.backend_path)
+from LIB.BACKEND import DBManager
+# import DBManager
 import pandas as pd
 import pandas as pd
 import numpy as np
 import numpy as np
 import math
 import math

+ 60 - 0
LIB/FRONTEND/DrivingRange/main.py

@@ -0,0 +1,60 @@
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import traceback
+from LIB.MIDDLE.DrivingRange import UpdtFct
+import pymysql
+
+from urllib import parse
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    #建立引擎
+    db_engine = create_engine(str(r"mysql+pymysql://%s:" + '%s' + "@%s/%s") % ('root', 'pengmin', 'localhost', 'qixiangdb'))
+    #连接到qx数据库
+    db_qx = pymysql.connect(
+            host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+            user='qx_read',
+            password='Qx@123456',#Qx@123456
+            database='qx_cas',
+            charset='utf8'
+        )
+    #连接到本地数据库,输出物
+    db_local = pymysql.connect(
+            host='localhost',
+            user='root',
+            password='pengmin',
+            database='qixiangdb',
+            charset='utf8'
+        )
+
+    #调度周期:每天运行一次。
+
+    #更新所有sn,连读多日的factor,如果start_date和end_date相隔一天,代表更新start_date的factor。
+    start_date="2021-07-23"
+    end_date="2021-07-28"
+    UpdtFct.updtAllSnFct(start_date,end_date, db_engine, db_local, db_qx)
+    df_newest = UpdtFct.updtNewestFctTb(db_local, db_engine)
+    #按照日期排序,只保留最近的一天,输出factor_unique_df,方法为replace。
+    #本函数,每天需要运行一次,用于更新factor。
+    df_newest.to_sql("tb_sn_factor_newest",con=db_engine,chunksize=10000,\
+        if_exists='replace',index=False)
+
+    #调度周期:程序每5分钟运行一次
+
+    #更新剩余里程,每5min一次,几秒钟运行结束。
+    test_time=datetime.datetime.now()#当前系统时间
+    input_time=datetime.datetime.strftime(test_time,'%Y-%m-%d %H:%M:%S')
+
+    # input_time='2021-07-29 11:59:00'#手动设定一个时间
+
+    #函数每5min调度一次,input_time为当前时间,更新tb_sn_factor_soc_range表格
+    df_range = UpdtFct.updtVehElecRng(db_qx, db_local, db_engine, input_time)
+    df_range.to_sql("tb_sn_factor_soc_range",con=db_engine,chunksize=10000,\
+         if_exists='replace',index=False)    

+ 27 - 0
LIB/FRONTEND/LeakCurrent/main.py

@@ -0,0 +1,27 @@
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import traceback
+from LIB.MIDDLE.LeakCurrent import LFPLeakCurrent20210812 as LFPLeakCurrent
+
+from urllib import parse
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    SNdata_6060 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+    SNnums_6060=SNdata_6060['SN号']
+    now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+    now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+    start_time=now_time-datetime.timedelta(days=31)
+    end_time=str(now_time)
+    start_time=str(start_time)
+
+    for sn in SNnums_6060.tolist():
+        res = LFPLeakCurrent.cal_LFPLeakCurrent(sn, end_time, start_time)
+        res.to_csv('BMS_LeakCurrent_'+sn+'.csv',encoding='GB18030')

+ 0 - 102
LIB/FRONTEND/SignalMonitor/create_table.py

@@ -1,102 +0,0 @@
-'''
-定义表的结构,并在数据库中创建对应的数据表
-'''
-__author__ = 'Wang Liming'
-
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy import Column, String, create_engine, Integer, DateTime, BigInteger, FLOAT
-
-Base = declarative_base()
-
-
-class BmsLastDataDay(Base):
-    __tablename__ = "bms_last_data_day"
-    __table_args__ = ({'comment': '电池信号监控---每天最后一条bms数据'})  # 添加索引和表注释
-
-    id = Column(Integer, primary_key=True, autoincrement=True, comment="主键")
-    sn = Column(String(64), comment="sn")
-    current = Column(FLOAT, comment="电流")
-    time_stamp = Column(Integer, comment="时间戳")
-    pack_state = Column(Integer, comment="电池状态")
-    line_state = Column(Integer, comment="信号状态")
-
-
-    def __init__(self, sn, current, time_stamp, pack_state, line_state):
-        self.sn = sn
-        self.current = current
-        self.time_stamp = time_stamp
-        self.pack_state = pack_state
-        self.line_state = line_state
-
-class GpsLastDataDay(Base):
-    __tablename__ = "gps_last_data_day"
-    __table_args__ = ({'comment': '电池信号监控---每天最后一条gps数据'})  # 添加索引和表注释
-
-    id = Column(Integer, primary_key=True, autoincrement=True, comment="主键")
-    sn = Column(String(64), comment="sn")
-    time_stamp = Column(Integer, comment="时间戳")
-    pack_state = Column(Integer, comment="电池状态")
-    line_state = Column(Integer, comment="信号状态")
-
-
-    def __init__(self, sn, time_stamp, pack_state, line_state):
-        self.sn = sn
-        self.time_stamp = time_stamp
-        self.pack_state = pack_state
-        self.line_state = line_state
-
-class GpsSignalMonitor(Base):
-    __tablename__ = "gps_signal_monitor"
-    __table_args__ = ({'comment': 'gps信号监控'})  # 添加索引和表注释
-
-    id = Column(Integer, primary_key=True, autoincrement=True, comment="主键")
-    sn = Column(String(64), comment="sn")
-    start_time = Column(DateTime, comment="开始时间")
-    end_time = Column(DateTime, comment="结束时间")
-    offline_time = Column(DateTime, comment="离线时间")
-    pack_state = Column(Integer, comment="电池状态")
-    line_state = Column(Integer, comment="信号状态")
-
-
-    def __init__(self, sn, start_time, end_time, off_line_time, pack_state, line_state):
-        self.sn = sn
-        self.start_time = start_time
-        self.end_time = end_time
-        self.off_line_time = off_line_time
-        self.pack_state = pack_state
-        self.line_state = line_state
-
-class BmsSignalMonitor(Base):
-    __tablename__ = "bms_signal_monitor"
-    __table_args__ = ({'comment': 'bms信号监控'})  # 添加索引和表注释
-
-    id = Column(Integer, primary_key=True, autoincrement=True, comment="主键")
-    sn = Column(String(64), comment="sn")
-    start_time = Column(DateTime, comment="开始时间")
-    end_time = Column(DateTime, comment="结束时间")
-    offline_time = Column(DateTime, comment="离线时间")
-    pack_state = Column(Integer, comment="电池状态")
-    line_state = Column(Integer, comment="信号状态")
-
-
-    def __init__(self, sn, start_time, end_time, off_line_time, pack_state, line_state):
-        self.sn = sn
-        self.start_time = start_time
-        self.end_time = end_time
-        self.off_line_time = off_line_time
-        self.pack_state = pack_state
-        self.line_state = line_state
-
-# 执行该文件,创建表格到对应的数据库中
-if __name__ == "__main__":
-    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
-    port = 3306
-    user = 'qx_cas'
-    password = 'Qx@123456'
-    database = 'qx_cas'
-
-    db_engine = create_engine(
-        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
-            user, password, host, port, database
-        ))
-    Base.metadata.create_all(db_engine)

+ 45 - 76
LIB/FRONTEND/SignalMonitor/main.py

@@ -9,87 +9,56 @@ from sqlalchemy.orm import sessionmaker
 import time, datetime
 import time, datetime
 import traceback
 import traceback
 
 
+from urllib import parse
+
 dbManager = DBManager.DBManager()
 dbManager = DBManager.DBManager()
 if __name__ == "__main__":
 if __name__ == "__main__":
-    try:
-        # 数据库配置
-        host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
-        port = 3306
-        user = 'qx_cas'
-        password = 'Qx@123456'
-        database = 'qx_cas'
-
-        db_engine = create_engine(
-            "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
-                user, password, host, port, database
-            ))
-
-        db_engine = create_engine("mysql+pymysql://qx_cas:Qx@123456@rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com/qx_cas?charset=utf8")
-        DbSession = sessionmaker(bind=db_engine)
-        
-        # 日志配置
-        log = Log.Mylog(log_name='signal_monitor', log_level = 'info')
-        log.set_file_hl(file_name='info.log', log_level='info')
-        log.set_file_hl(file_name='error.log', log_level='error')
-        logger = log.get_logger()
-
-        logger.info("pid is + {}".format(os.getpid))
-
-        # 读取sn列表
-        df_sn = pd.read_csv('sn_list.csv')
-        df_sn = df_sn.reset_index(drop=True)
-        df_sn['StartTime'] = pd.to_datetime(df_sn['StartTime'])
-        df_sn['EndTime'] = pd.to_datetime(df_sn['EndTime'])
-        df_sn['ExitTime'] = pd.to_datetime(df_sn['ExitTime'])
-        signalMonitor = SignalMonitor.SignalMonitor()
-        
-        cal_period = 24    # 计算间隔,单位h
-        for i in range(0, len(df_sn['sn'])):    # 遍历SN号
-            sn = [df_sn.loc[i,'sn']]
-            if not (sn[0][0:2] == 'PK' or sn[0][0:2] == 'MG' or sn[0][0:2] == 'UD'):
+    # 读取sn列表
+    df_sn = pd.read_csv('sn_list.csv')
+    df_sn = df_sn.reset_index(drop=True)
+    df_sn['StartTime'] = pd.to_datetime(df_sn['StartTime'])
+    df_sn['EndTime'] = pd.to_datetime(df_sn['EndTime'])
+    df_sn['ExitTime'] = pd.to_datetime(df_sn['ExitTime'])
+    signalMonitor = SignalMonitor.SignalMonitor()
+    
+    cal_period = 24    # 计算间隔,单位h
+    for i in range(0, len(df_sn['sn'])):    # 遍历SN号
+        sn = [df_sn.loc[i,'sn']]
+        if not (sn[0][0:2] == 'PK' or sn[0][0:2] == 'MG' or sn[0][0:2] == 'UD'):
+            continue
+        st = df_sn.loc[i, 'StartTime']
+        if df_sn.loc[i, 'Service'] == 0:
+            if pd.isnull(df_sn.loc[i, 'EndTime']) and pd.isnull(df_sn.loc[i, 'ExitTime']):
                 continue
                 continue
-            st = df_sn.loc[i, 'StartTime']
-            if df_sn.loc[i, 'Service'] == 0:
-                et = df_sn.loc[i, 'ExitTime']
-            elif pd.isnull(df_sn.loc[i, 'EndTime']):
-                otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
-                et = pd.to_datetime(otherStyleTime)
-            else:
+            elif pd.isnull(df_sn.loc[i, 'ExitTime']):
                 et = df_sn.loc[i, 'EndTime']
                 et = df_sn.loc[i, 'EndTime']
-            df_last_state = pd.DataFrame(
-                columns=['sn', 'current', 'Timestamp', 'PackState', 'LineState'])    # 每日最后BMS数据
-            df_last_state_gps = pd.DataFrame(
-                columns=['sn', 'Timestamp', 'PackState', 'LineState'])    # 每日最后GPS数据
-            while st < et:
-                df_res = pd.DataFrame(columns=[
-                                    'sn', 'PackState', 'LineState', 'StartTime', 'EndTime', 'OfflineTime'])    # 初始化BMS信号统计数据
-                df_res_gps = pd.DataFrame(columns=[
-                                        'sn', 'PackState', 'LineState', 'StartTime', 'EndTime', 'OfflineTime'])    # 初始化GPS信号统计数据
-                df_res, df_state, df_last_state = signalMonitor.get_bms_offline_stat(
-                    sn, st, et, df_res, df_last_state, cal_period)    # 计算每日BMS信号统计数据
-                df_res_gps, df_last_state_gps = signalMonitor.get_gps_offline_stat(
-                    sn, st, et, df_state, df_res_gps, df_last_state_gps, cal_period)    # 计算每日GPS信号统计数据
-            
-                # 数据入库
-                df_tosql = df_res_gps.copy()
-                df_tosql.columns = ['sn', 'pack_state', 'line_state', 'start_time', 'end_time', 'offline_time']
-                df_tosql.loc[df_tosql.index[-1:]].to_sql("gps_signal_monitor",con=db_engine, if_exists="append",index=False)
-
-                df_tosql = df_res.copy()
-                df_tosql.columns = ['sn', 'pack_state', 'line_state', 'start_time', 'end_time', 'offline_time']
-                df_tosql.loc[df_res.index[-1:]].to_sql("bms_signal_monitor",con=db_engine, if_exists="append",index=False)
-                st = st + datetime.timedelta(hours=cal_period)
+            else:
+                et = df_sn.loc[i, 'ExitTime']
+        elif pd.isnull(df_sn.loc[i, 'EndTime']):
+            otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
+            et = pd.to_datetime(otherStyleTime)
+        else:
+            et = df_sn.loc[i, 'EndTime']
+        df_last_state = pd.DataFrame(
+            columns=['sn', 'current', 'Timestamp', 'PackState', 'LineState'])    # 每日最后BMS数据
+        df_last_state_gps = pd.DataFrame(
+            columns=['sn', 'Timestamp', 'PackState', 'LineState', 'latitude', 'longitude'])    # 每日最后GPS数据
+        df_res = pd.DataFrame(columns=[
+                                'sn', 'PackState', 'LineState', 'StartTime', 'EndTime', 'OfflineTime'])    # 初始化BMS信号统计数据
+        df_res_gps = pd.DataFrame(columns=[
+                                    'sn', 'PackState', 'LineState', 'StartTime', 'EndTime', 'OfflineTime', 'latitude', 'longitude'])    # 初始化GPS信号统计数据
+        while st < et:
 
 
+            df_res, df_state, df_last_state = signalMonitor.get_bms_offline_stat(
+                sn, st, et, df_res, df_last_state, cal_period)    # 计算每日BMS信号统计数据
+            df_res_gps, df_last_state_gps = signalMonitor.get_gps_offline_stat(
+                sn, st, et, df_state, df_res_gps, df_last_state_gps, cal_period)    # 计算每日GPS信号统计数据
             # 数据入库
             # 数据入库
-            df_tosql = df_last_state.copy()
-            df_tosql.columns = ['sn', 'current', 'time_stamp', 'pack_state', 'line_state']
-            df_tosql.to_sql("bms_last_data_day",con=db_engine, if_exists="append",index=False)
+            st = st + datetime.timedelta(hours=cal_period)
+        SignalMonitor._file_write(r'D:\result_04.xls', df_res)    # BMS信号统计数据入库
+        SignalMonitor._file_write(r'D:\result_05.xls', df_res_gps)    # GPS信号统计数据入库
 
 
-            df_tosql = df_last_state.copy()
-            df_tosql.columns = ['sn', 'time_stamp', 'pack_state', 'line_state']
-            df_tosql.to_sql("gps_last_data_day",con=db_engine, if_exists="append",index=False)
+        SignalMonitor._file_write(r'D:\result_06.xls', df_last_state)
+        SignalMonitor._file_write(r'D:\result_07.xls', df_last_state_gps)
 
 
-            logger.info("{} {} Success!".format(sn, str(st)))
-    except Exception as e:
-        logger.error(traceback.format_exc)
-        logger.error(u"任务运行错误", exc_info=True)
+    # 数据入库

+ 15 - 10
LIB/FRONTEND/day_sta.py

@@ -1,4 +1,5 @@
 <<<<<<< HEAD
 <<<<<<< HEAD
+<<<<<<< HEAD
 __author__ = 'lmstack'
 __author__ = 'lmstack'
 
 
 # 每日指标统计函数
 # 每日指标统计函数
@@ -144,18 +145,22 @@ for sn in sn_list[:]:
         
         
 =======
 =======
 __author__ = 'Wang Liming'
 __author__ = 'Wang Liming'
+=======
+__author__ = 'lmstack'
+>>>>>>> 0fdacae7667a378900d95748e2f53901ada95b8c
 
 
 # 每日指标统计函数
 # 每日指标统计函数
-import CONFIGURE.PathSetting as PathSetting
-import sys
-sys.path.append(PathSetting.backend_path)
-sys.path.append(PathSetting.middle_path)
-import DBManager
-import Tools
-import DataPreProcess
-import IndexStaByPeriod
-import Log
-import IndexStaByPeriod
+# import CONFIGURE.PathSetting as PathSetting
+# import sys
+# sys.path.append(PathSetting.backend_path)
+# sys.path.append(PathSetting.middle_path)
+from LIB.BACKEND import DBManager, Log, DataPreProcess
+from LIB.MIDDLE import IndexStaByPeriod, IndexStaBOneCycle
+from LIB.BACKEND import DBManager
+from LIB.BACKEND import DBManager
+from LIB.BACKEND import DBManager
+from LIB.BACKEND import DBManager
+
 import importlib
 import importlib
 import datetime
 import datetime
 import os
 import os

+ 157 - 0
LIB/FRONTEND/deltsoc/LFPDeltSoc20210804.py

@@ -0,0 +1,157 @@
+# 获取数据
+from LIB.BACKEND import DBManager
+
+import os
+import pandas as pd
+import numpy as np
+import datetime
+# import matplotlib.pyplot as plt
+#参数初始化
+Capacity = 53.6
+PackFullChrgVolt=69.99
+CellFullChrgVolt=3.6
+CellVoltNums=20
+CellTempNums=4
+FullChrgSoc=98
+PeakSoc=57
+
+#获取数据时间段
+def cal_deltsoc(sn, end_time, start_time):
+    end_time = end_time
+    strat_time = start_time
+    SNnum=str(sn)
+
+    sn = sn
+    st = strat_time
+    et = end_time
+
+
+    dbManager = DBManager.DBManager()
+    df_data = dbManager.get_data(sn=sn, start_time=st, end_time=et, data_groups=['bms'])
+    df_bms = df_data['bms']
+    # 计算电芯Soc差
+    packcrnt = df_bms['总电流[A]']
+    packvolt = df_bms['总电压[V]']
+    SOC = df_bms['SOC[%]']
+    SOH = df_bms['SOH[%]']
+    bmsstat = (df_bms['充电状态']).astype(int)
+    time = pd.to_datetime(df_bms['时间戳'], format='%Y-%m-%d %H:%M:%S')
+
+    # 筛选充电数据
+    ChgStart = []
+    ChgEnd = []
+
+    for i in range(3, len(time) - 3):
+        if i==3 and bmsstat[i]==2 and bmsstat[i+1]==2 and bmsstat[i+2]==2:
+            ChgStart.append(i)
+        elif bmsstat[i-2]!=2 and bmsstat[i-1]!=2 and bmsstat[i]==2:
+            ChgStart.append(i)
+        elif bmsstat[i-1]==2 and bmsstat[i]!=2 and bmsstat[i+1]!=2:
+            ChgEnd.append(i)
+        elif i == (len(time) - 4) and bmsstat[len(bmsstat)-1] == 2 and bmsstat[len(bmsstat)-2] == 2:
+            ChgEnd.append(len(time)-1)
+
+    # 筛选充电起始Soc<46%,电芯温度>15℃的数据
+    if ChgStart:
+        ChgStartValid = []
+        ChgEndValid = []
+
+        for i in range(min(len(ChgStart),len(ChgEnd))):
+            # 获取最小温度值
+            celltemp = []
+            for j in range(1, CellTempNums + 1):
+                s = str(j)
+                temp = df_bms['单体温度' + s]
+                celltemp.append(temp[ChgEnd[i]])
+
+            if SOC[ChgStart[i]] < 46 and SOC[ChgEnd[i]]>80 and min(celltemp) > 10:
+                if ((time[ChgEnd[i]]-time[ChgStart[i]]).total_seconds())/(ChgEnd[i]-ChgStart[i])<30:
+                    ChgStartValid.append(ChgStart[i])
+                    ChgEndValid.append(ChgEnd[i])
+
+    # 计算充电每个单体到达DVDQ峰值的Ah差
+    # 定义滑动平均滤波函数
+    def np_move_avg(a, n, mode="same"):
+        return (np.convolve(a, np.ones((n,)) / n, mode=mode))
+
+
+    # 定义函数:切片Soc>50且Soc<80,并寻找峰值返回峰值点的时间
+    def data_search(t, soc, cellvolt1, packcrnt1):
+        cellvolt2 = np_move_avg(cellvolt1, 5, mode="same")
+        Soc = 0
+        Ah = 0
+        Volt = [cellvolt2[0]]
+        DV_Volt = []
+        DQ_Ah = []
+        DVDQ = []
+        time1 = []
+        soc1 = []
+        soc2 = []
+
+        for m in range(1, len(t)):
+            Step = (t[m] - t[m - 1]).total_seconds()
+            Soc = Soc - packcrnt1[m] * Step * 100 / (3600 * Capacity)
+            Ah = Ah - packcrnt1[m] * Step / 3600
+            if (cellvolt2[m] - Volt[-1]) > 0.9 and Ah>0:
+                DQ_Ah.append(Ah)
+                Volt.append(cellvolt2[m])
+                DV_Volt.append(Volt[-1] - Volt[-2])
+                DVDQ.append((DV_Volt[-1]) / DQ_Ah[-1])
+                Ah = 0
+                time1.append(t[m])
+                soc1.append(Soc)
+                soc2.append(soc[m])
+        df_Data1 = pd.DataFrame({'Time': time1,
+                                'SOC': soc2,
+                                'DVDQ': DVDQ,
+                                'AhSOC': soc1})
+
+        df_Data1 = df_Data1[(df_Data1['SOC'] > 50) & (df_Data1['SOC'] < 75)]
+        # 寻找峰值点,且峰值点个数>3
+        if len(df_Data1['DVDQ'])>2:
+            PeakIndex = df_Data1['DVDQ'].idxmax()
+            df_Data2 = df_Data1[
+                (df_Data1['SOC'] > (df_Data1['SOC'][PeakIndex] - 0.5)) & (df_Data1['SOC'] < (df_Data1['SOC'][PeakIndex] + 0.5))]
+            if len(df_Data2) > 3:
+                return df_Data1['AhSOC'][PeakIndex]
+            else:
+                df_Data1 = df_Data1.drop([PeakIndex])
+                PeakIndex = df_Data1['DVDQ'].idxmax()
+                return df_Data1['AhSOC'][PeakIndex]
+
+
+    # 计算最大最小Soc差
+    if ChgStartValid:
+        DetaSoc2 = []
+        DetaSoc=[]
+        DetaSoc_SN=[]
+        DetaSoc_time=[]
+        for i in range(len(ChgStartValid)):
+            DetaSoc1 = []
+            for j in range(1, CellVoltNums + 1):
+                s = str(j)
+                cellvolt = df_bms['单体电压' + s]
+                cellvolt = list(cellvolt[ChgStartValid[i]:ChgEndValid[i]])
+                Time = list(time[ChgStartValid[i]:ChgEndValid[i]])
+                Packcrnt = list(packcrnt[ChgStartValid[i]:ChgEndValid[i]])
+                SOC1 = list(SOC[ChgStartValid[i]:ChgEndValid[i]])
+                a = data_search(Time, SOC1, cellvolt, Packcrnt)
+                if a:
+                    DetaSoc1.append(a)  # 计算到达峰值点的累计Soc
+            if DetaSoc1:
+                DetaSoc2.append(max(DetaSoc1) - min(DetaSoc1))
+
+        DetaSocMean = np.mean(DetaSoc2)
+        DetaSoc.append(DetaSocMean)
+        DetaSoc_SN.append(SNnum)
+        DetaSoc_time.append(time[ChgStartValid[-1]])
+
+
+        result_DetaSoc={'time':DetaSoc_time,
+                        'SN号':DetaSoc_SN,
+                        'Soc差':DetaSoc}
+        Result_DetaSoc=pd.DataFrame(result_DetaSoc)
+        return Result_DetaSoc
+    return pd.DataFrame()
+
+

+ 4 - 0
LIB/FRONTEND/deltsoc/detaSOC表头及数据类型.xlsx

@@ -0,0 +1,4 @@
+±íÍ·	Ãû³Æ	Êý¾ÝÀàÐÍ
+time	time	timestamps
+SNºÅ	sn	str
+Soc²î	deta_soc	float64

+ 27 - 0
LIB/FRONTEND/deltsoc/main.py

@@ -0,0 +1,27 @@
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import traceback
+from LIB.MIDDLE.DeltSoc import LFPDeltSoc20210804 as LFPDeltSoc
+
+from urllib import parse
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    SNdata_6060 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+    SNnums_6060=SNdata_6060['SN号']
+    now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+    now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+    start_time=now_time-datetime.timedelta(days=31)
+    end_time=str(now_time)
+    start_time=str(start_time)
+
+    for sn in SNnums_6060.tolist():
+        res = LFPDeltSoc.cal_deltsoc(sn, end_time, start_time)
+        res.to_csv('BMS_DetaSoc_'+sn+'.csv',encoding='GB18030')

BIN
LIB/FRONTEND/deltsoc/骑享资产梳理-20210621.xlsx


+ 127 - 0
LIB/FRONTEND/odo/CalDist.py

@@ -0,0 +1,127 @@
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+
+from LIB.BACKEND import DBManager
+
+import DBManager
+#####################################配置环境分割线#################################################
+
+def GetDistInfo(input_sn,input_starttime,input_endtime):
+
+    #####################################配置参数分割线#################################################
+    dbManager = DBManager.DBManager()
+    data_raw = dbManager.get_data(sn=input_sn, start_time=input_starttime, 
+        end_time=input_endtime)
+    #拆包预处理
+    df_bms_raw=data_raw['bms']
+    df_gps_raw=data_raw['gps']
+    df_bms=preprocess_Df_Bms(df_bms_raw)
+    df_gps=preprocess_Df_Gps(df_gps_raw)
+    
+    #####################################数据预处理分割线#################################################
+
+    # mode: 0:正常取数; 1:7255 取数
+    if input_sn[0:2] == 'UD' or input_sn[0:2] == 'MG':
+        mode = 1
+    else:
+        mode = 0
+    #获取状态表,mode默认为0,mode=1放电时电流为负,mode=0充电时电流为正
+
+    df_bms_drive_timetable=get_bms_drive_timetable(df_bms,mode)
+    df_gps_drive_cycle_accum=pd.DataFrame()
+    if len(df_bms_drive_timetable)>0:
+        for index in range(len(df_bms_drive_timetable)):
+            #筛选drivecycle数据
+            drive_start_time=df_bms_drive_timetable.loc[index,'drive_start_time']#开始时间
+            drive_end_time=df_bms_drive_timetable.loc[index,'drive_end_time']#结束时间
+
+            time_condition=(df_gps['time']>drive_start_time)&(df_gps['time']<drive_end_time)#时间判断条件
+            df_gps_drive_cycle=df_gps.loc[time_condition,:].copy()
+            df_gps_drive_cycle=df_gps_drive_cycle.reset_index(drop=True)#重置index
+            #计算drivecycle GPS累计里程,存入表格
+            condition_a=df_gps_drive_cycle['deltatime']>60*3
+            condition_b=(df_gps_drive_cycle['deltatime']>90*1)&(df_gps_drive_cycle['distance']>1000)
+            drive_cycle_dist_array=df_gps_drive_cycle.loc[~(condition_a|condition_b),'distance'].values
+            drive_cycle_dist_array=drive_cycle_dist_array[np.where((drive_cycle_dist_array>=1)&(drive_cycle_dist_array<1000))[0]]
+            gps_dist=drive_cycle_dist_array.sum()
+            df_bms_drive_timetable.loc[index,'gps_dist']=gps_dist#得到GPS路径
+            #计算头-尾的空缺时间段对应的预估SOC
+            if len(df_gps_drive_cycle)>2:
+                gps_starttime=df_gps_drive_cycle.loc[1,'time']#gps开始时间
+                gps_endtime=df_gps_drive_cycle.loc[df_gps_drive_cycle.index[-1],'time']#gps结束时间
+                #从drive_start_time到gps开始时间,使用SOC计算的里程
+                #gps结束时间到drive_end_time,使用SOC计算的里程
+                unrecorded_odo_head=cal_deltasoc(df_bms,drive_start_time,gps_starttime)
+                unrecorded_odo_tail=cal_deltasoc(df_bms,gps_endtime,drive_end_time)
+            else:
+                #计算数据丢失行unrecordeodo
+                unrecorded_odo_head=cal_deltasoc(df_bms,drive_start_time,drive_end_time)
+                unrecorded_odo_tail=0
+            #计算中间的预估SOC
+            predict_dist=cal_unrecorded_gps(df_gps_drive_cycle,df_bms)
+            #计算总的预估SOC
+            totaldist=predict_dist+unrecorded_odo_head+ unrecorded_odo_tail#得到GPS路径
+            df_bms_drive_timetable.loc[index,'predict_dist']=totaldist
+    else :
+        pass
+
+    #####################################统计行驶里程End#################################################
+    #打印输出结果#
+    index_list=list(range(len(df_bms_drive_timetable)))
+
+    dist_gps=0
+    dist_predict=0
+    day_start_time=''#当日开始时间
+    day_end_time=''#当日结束时间
+    day_start_soc=0#当日开始soc
+    day_end_soc=0#当日结束soc
+    day_min_soc=101#当日最低soc
+    drive_accum_soc=0#累计使用SOC
+
+    if len(df_bms_drive_timetable)>0:
+        #开始行
+        day_start_soc=df_bms_drive_timetable.loc[1,'drive_start_soc']#开始soc
+        day_start_time=df_bms_drive_timetable.loc[1,'drive_start_time']#开始时间
+        #结束行
+        day_end_time=df_bms_drive_timetable.loc[len(df_bms_drive_timetable)-1,'drive_end_time']#结束时间
+        day_end_soc=df_bms_drive_timetable.loc[len(df_bms_drive_timetable)-1,'drive_end_soc']#结束soc
+
+    for index in index_list:
+        '''汇总里程'''
+        dist_gps+=df_bms_drive_timetable.loc[index,'gps_dist']/1000#计算GPS里程
+        dist_predict+=df_bms_drive_timetable.loc[index,'predict_dist']#计算预估里程
+        drive_start_soc=df_bms_drive_timetable.loc[index,'drive_start_soc']#驾驶周期开始的soc
+        drive_end_soc=df_bms_drive_timetable.loc[index,'drive_end_soc']#驾驶周期结束的soc
+        day_min_soc=min(day_min_soc,drive_start_soc,drive_end_soc)
+
+        delta_soc=drive_start_soc-drive_end_soc#驾驶周期SOC变化量
+        drive_accum_soc+=abs(delta_soc)#所有drive cycle累计消耗的soc
+
+    # gps_score=get_df_gps_score(input_starttime,input_endtime,df_gps)
+    # gps_score=round(gps_score,1)
+    #计算总里程
+    dist_gps=round(dist_gps,3)
+    dist_predict=round(dist_predict,3)
+    dist_all=round(dist_gps+dist_predict,3)
+    #输出统计结果
+    # print ('为您查询到,从'+input_starttime+'到'+input_endtime+'时间段内:')
+    # print('SOC变化量:'+str(df_bms['bmspacksoc'].max()-df_bms['bmspacksoc'].min())+' %')
+    # print('行驶总里程:'+str(dist_all)+' km')
+
+    return {'SN':input_sn,'range':dist_all,'accum_soc':drive_accum_soc,'day_start_soc':day_start_soc,
+    'day_end_soc':day_end_soc,'day_start_time':day_start_time,'day_end_time':day_end_time,
+    'day_min_soc':day_min_soc}
+    # print('其中GPS信号在线时里程:'+str(dist_gps)+' km')
+    # print('其中GPS信号掉线时预估里程:'+str(dist_predict)+' km')
+    # print('GPS信号质量评分为:'+str(gps_score),'分\n')
+
+    #####################################打印结果End#################################################
+
+

+ 68 - 0
LIB/FRONTEND/odo/CalDist_Batch.py

@@ -0,0 +1,68 @@
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+from CalDist import *
+from LIB.BACKEND import DBManager
+import pdb
+
+asset_table_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\asset_table.xlsx'
+drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\drive_info.xlsx'
+asset_sheet_num=1
+usecols_list=[4,5]
+
+asset_table=pd.read_excel(asset_table_path,sheet_name=asset_sheet_num,skiprows=1,usecols=usecols_list)
+SN_list=asset_table['SN号'].values.tolist()
+print('从6060sheet读取到:'+str(len(SN_list))+'行')
+asset_table=asset_table.rename(columns={'SN号':'SN','状态':'state'})
+
+asset_table.set_index(["SN"],inplace=True)
+col_name=asset_table.columns.tolist()
+col_name.extend(['range','accum_soc','day_start_soc','day_end_soc','day_start_time','day_end_time'])
+asset_table=asset_table.reindex(columns=col_name)
+
+start_hour='00:00:00'#每日查询最早时间
+end_hour='23:59:00'#每日查询最晚时间
+
+
+date_index=pd.date_range('2021-07-31','2021-07-31')
+for date in date_index:
+    '''遍历日期'''
+
+    str_date=str(date)[:10]
+    input_starttime=str_date+' '+start_hour
+    input_endtime=str_date+' '+end_hour
+    test_day=str_date[5:10]#月-日,用于建立sheet
+    drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\6060\\drive_info'+test_day+'_50_end_'+'.xlsx'
+
+    print(input_starttime)
+
+    drive_info_aday=pd.DataFrame()
+    SN_list_short=SN_list#先选择了0:50,50:end
+
+    for SN in SN_list_short:
+        '''遍历SN号'''
+        SN=SN.strip('\t')
+        SN=SN.strip('\n')
+
+        try:
+            range=GetDistInfo(SN,input_starttime,input_endtime)
+            range_df=pd.DataFrame([range])
+            drive_info_aday=pd.concat([drive_info_aday,range_df],axis=0)
+
+        except:
+            print(SN+' '+test_day+'fail')
+        else:
+            pass
+            #print(SN+' '+test_day+'success')
+
+    drive_info_aday.to_excel(drive_info_path,sheet_name=test_day)#sheet名称为testday
+    
+    
+
+

+ 77 - 0
LIB/FRONTEND/odo/GpsRank.py

@@ -0,0 +1,77 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+def cal_gps_score(df):
+    '''在获取信号,优、良、合格、掉线的比例之后,计算gps的总评分'''
+    score=0
+    for index in range(len(df)):
+        time_percent=df.loc[index,'累计时间占比']
+        if df.loc[index,'GPS质量']=='优':
+            score+=time_percent*0
+        elif df.loc[index,'GPS质量']=='良':
+            score+=time_percent*0.3
+        elif df.loc[index,'GPS质量']=='合格':
+            score+=time_percent*0.5
+        elif df.loc[index,'GPS质量']=='掉线':
+            score+=time_percent*1
+    return (1-score)*100
+
+def gps_rank(df_gps_signal_table,df_gps,signal_rank,dist_factor):
+    '''gps信号质量分析函数,需要输入表格,df_gps,信号等级,权重'''
+    gps_signal_condition=(df_gps['gps_signal']==signal_rank)
+    dist=df_gps.loc[gps_signal_condition,'distance'].values.sum()
+    deltatime=df_gps.loc[gps_signal_condition,'deltatime'].values.sum()
+    df_gps_signal_table_condition=(df_gps_signal_table['gps_signal']==signal_rank)
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_distance']=dist/1000
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_deltatime']=deltatime
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_distance_factor']=dist/1000*dist_factor
+    return df_gps_signal_table
+
+def get_df_gps_score(starttime,endtime,df_gps):
+    '''对df_gps中的gps质量进行评分,返回一个数值'''
+    test_start_time=starttime#'2021-05-29 17:16:39'
+    test_end_time=endtime#'2021-05-29 20:08:08'
+
+    test_time_condition=(df_gps['time']>test_start_time)&(df_gps['time']<test_end_time)
+    df_gps_test=df_gps.loc[test_time_condition,:].copy()
+    df_gps_test=df_gps_test.reset_index(drop=True)#重置index
+    #按照deltatime打标签
+    gps_deltatime_bins=[0,30,60,120,10000]#优-良-合格-掉线
+    name=['优','良','合格','掉线']
+    df_gps_test['gps_signal']=pd.cut(df_gps_test['deltatime'], gps_deltatime_bins,labels=name)
+    df_gps_test['gps_signal'].value_counts()
+    #声明一个gps信号按类别统计table
+    df_gps_signal_table=pd.DataFrame()
+    df_gps_signal_table['gps_signal']=df_gps_test['gps_signal'].value_counts().index.tolist()
+    df_gps_signal_table['num']=df_gps_test['gps_signal'].value_counts().values.tolist()
+
+    #分类进行统计
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'优',1.00)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'良',1.05)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'合格',1.2)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'掉线',1)
+
+    #次数占比,时间占比
+    all_num=df_gps_signal_table['num'].sum()
+    df_gps_signal_table['num_percent']=df_gps_signal_table['num']/all_num
+    all_accum_deltatime=df_gps_signal_table['accum_deltatime'].sum()
+    df_gps_signal_table['accum_deltatime_percent']=df_gps_signal_table['accum_deltatime']/all_accum_deltatime
+
+    #选择参数
+    df_gps_signal_table=df_gps_signal_table[['gps_signal','num','num_percent','accum_distance',
+                                            'accum_distance_factor','accum_deltatime','accum_deltatime_percent']]
+    df_gps_signal_table=df_gps_signal_table.rename(columns={'gps_signal':'GPS质量','num':'数量','num_percent':'数量占比',
+                                                        'accum_distance':'累计里程','accum_distance_factor':'累计里程修正值',
+                                                        'accum_deltatime':'累计时间','accum_deltatime_percent':'累计时间占比'})
+
+    df_gps_signal_table.loc[:,['GPS质量','累计时间','累计时间占比']]
+    gps_score=cal_gps_score(df_gps_signal_table)#调用函数计算gps评分
+    
+    #输出结果,评分
+    #print('From '+test_start_time+'  to '+test_end_time)
+    #print('GPS信号质量评分:'+str(gps_score))
+
+    return gps_score
+

+ 159 - 0
LIB/FRONTEND/odo/ProcessDfBms.py

@@ -0,0 +1,159 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+def get_bms_drive_timetable(df_bms,battery_mode):
+    '''对df_bms进行处理,得到行车的时间表。'''
+
+    #####################step1 先使用电流做充电状态的判断#############################################
+    if battery_mode==0:#mode=0,电流为正代表放电
+        condition_chrg=df_bms['bmspackcrnt']<0##根据电流,挑选充电状态
+        df_bms.loc[condition_chrg,'bscsta']='chrg'
+        condition_drive=df_bms['bmspackcrnt']>0.01##根据电流,挑选行驶状态
+        df_bms.loc[condition_drive,'bscsta']='drive'
+        df_bms.loc[~(condition_drive|condition_chrg),'bscsta']='idle'#静置状态
+    else :#mode=1,电流为负代表放电
+        condition_chrg=df_bms['bmspackcrnt']>0##根据电流,挑选充电状态
+        df_bms.loc[condition_chrg,'bscsta']='chrg'
+        condition_drive=df_bms['bmspackcrnt']<-0.01##根据电流,挑选行驶状态
+        df_bms.loc[condition_drive,'bscsta']='drive'
+        df_bms.loc[~(condition_drive|condition_chrg),'bscsta']='idle'#静置状态
+
+    #####################step2 对drive进行debounce,进入时立即进入,退出时debounce,5分钟。##########
+    index=0
+    debounce_row=10#debounce判断持续10行
+    debounce_time=300#debounce判断持续300秒
+    #对上一步初步状态进行二次处理
+    while index<(len(df_bms)-debounce_row):
+        mode_0=df_bms.loc[index,'bscsta']
+        mode_1=df_bms.loc[index+1,'bscsta']
+        #如果发现了边界行,则进行debounce判断
+        if (mode_0=='drive')&(mode_1!='drive'):#如果从drive变为idle
+            accum_subtime=0#累计时间初始化
+
+            for sub_index in range(debounce_row):#往下处理10行
+                sub_time=df_bms.loc[index+sub_index,'deltatime']
+                accum_subtime+=sub_time
+                #如果累计时间不到300秒,则设置为drive
+                if accum_subtime<debounce_time:
+                    df_bms.loc[index+sub_index,'bscsta']='drive'
+            index=index+debounce_row#处理10行以后的数据
+        #如果从idle变为drivemode,则将idle变为drive,包容前一行
+        elif (mode_0!='drive')&(mode_1=='drive'): 
+            df_bms.loc[index,'bscsta']='drive'
+            index=index+1
+        else:
+            index=index+1
+
+
+    #######################step3 对drivemode的时间进行分段###########################################
+    not_drive_flg=0#初始化
+    #输出drivemode的时间段,包含开始时间、结束时间
+    df_bms_drive_timetable_index=0
+    df_bms_drive_timetable=pd.DataFrame([],columns={'drive_start_time','drive_end_time',
+                                                    'gps_dist','predict_dist','drive_start_soc','drive_end_soc'})
+    for index in range(len(df_bms)):
+        temp_bscsta=df_bms.loc[index,'bscsta']
+        
+        if (temp_bscsta=='drive')&(not_drive_flg==0):
+            drive_start_time=df_bms.loc[index,'time']
+            not_drive_flg=1
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_start_time']=drive_start_time
+            #startsoc
+            drive_start_soc=df_bms.loc[index,'bmspacksoc']
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_start_soc']=drive_start_soc
+
+        elif (temp_bscsta!='drive')&(not_drive_flg==1):
+            drive_end_time=df_bms.loc[index,'time']
+            not_drive_flg=0
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_end_time']=drive_end_time
+            #endsoc
+            drive_end_soc=df_bms.loc[index,'bmspacksoc']
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_end_soc']=drive_end_soc
+            df_bms_drive_timetable_index+=1#index++
+
+    #删除时间信息不齐全的行
+    df_bms_drive_timetable=df_bms_drive_timetable.dropna(subset=['drive_end_time','drive_start_time'])
+    
+    return df_bms_drive_timetable
+
+
+def read_df_bms(path):
+    '''从路径中读取df_bms,进行预处理'''
+    df_bms=pd.read_csv(path, encoding='gbk')#编码方式gbk
+    #筛选表头,重命名
+    bms_columns=['时间戳','总电流[A]','总电压[V]','SOC[%]']
+    df_bms=df_bms.loc[:,bms_columns].copy()
+    df_bms.rename(columns = {"时间戳": "time", "总电流[A]": "bmspackcrnt", 
+                             "总电压[V]": "bmspackvol", "SOC[%]": "bmspacksoc"},inplace=True)#表头替换
+    #时间格式调整
+    df_bms['time']=df_bms['time'].apply(lambda x:datetime.strptime(x,'%Y-%m-%d %H:%M:%S'))#时间格式调整
+    #进行预处理
+    df_bms=df_add_deltatime(df_bms)#增加deltatime列 
+    return df_bms
+
+def preprocess_Df_Bms(df_bms):
+    '''对获得的df_bms,进行预处理'''
+    #筛选表头,重命名
+    bms_columns=['时间戳','总电流[A]','总电压[V]','SOC[%]']
+    df_bms=df_bms.loc[:,bms_columns].copy()
+    df_bms.rename(columns = {"时间戳": "time", "总电流[A]": "bmspackcrnt", 
+                             "总电压[V]": "bmspackvol", "SOC[%]": "bmspacksoc"},inplace=True)#表头替换
+    #删除空行
+    df_bms=df_bms.dropna(subset=['time'])
+    #删除时间重复的行,保留第一次出现的行
+    df_bms=df_bms.drop_duplicates(subset=['time'],keep='first')
+    #时间格式调整
+    df_bms['time']=df_bms['time'].apply(lambda x:datetime.strptime(x,'%Y-%m-%d %H:%M:%S'))#时间格式调整
+    #进行预处理
+    df_bms=df_add_deltatime(df_bms)#增加deltatime列 
+    return df_bms
+
+
+def df_add_deltatime(df_in):
+    '''Add a columns:deltatime,input df must have time column.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'deltatime']=0
+        #从第二行开始,计算i行到i-1行,GPS距离之差
+        else:
+            time1=df_in.loc[i-1,'time']
+            time2=df_in.loc[i,'time']
+            deltatime=time_interval(time1,time2)#计算时间差,返回单位为秒。
+            df_in.loc[i,'deltatime']=deltatime
+    return df_in
+
+
+def time_interval(time1,time2):
+    """
+    Calculate the time interval between two times,
+    return the seconds
+    """
+    deltatime=time2-time1
+    return deltatime.seconds
+
+
+def cal_deltasoc(df_bms,start_time,end_time):
+    '''输入开始时间和结束时间,返回deltasoc,此处将deltasoc*1既等效为unrecorded_odo.'''
+    time_condition=(df_bms['time']>start_time)&(df_bms['time']<end_time)
+    df_bms_sub=df_bms.loc[time_condition,:].copy()
+    if len(df_bms_sub)>=2:
+        
+        df_bms_head=df_bms_sub.head(1).copy()#首行
+        df_bms_startsoc=df_bms_head['bmspacksoc'].values[0]
+        df_bms_tail=df_bms_sub.tail(1).copy()#尾行
+        df_bms_endsoc=df_bms_tail['bmspacksoc'].values[0]
+        delta_soc=df_bms_startsoc-df_bms_endsoc
+        
+        if delta_soc>0:
+            #如果df_bms出现时间不连续,则先计算deltasoc,deltasoc每变化1,续驶里程增加1,
+            unrecorded_odo=delta_soc*1
+            #print('From '+str(start_time)+' to  '+str(end_time)+' soc decrease:  '+str(delta_soc))
+        else:
+            unrecorded_odo=0#如果deltasoc不大于0,说明在充电,或者静置不动    
+    #如果行数少于2,无法计算
+    else:
+        unrecorded_odo=0
+    return unrecorded_odo

+ 139 - 0
LIB/FRONTEND/odo/ProcessDfGps.py

@@ -0,0 +1,139 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+from ProcessDfBms import *
+from math import radians, cos, sin, asin, sqrt
+
+def cal_unrecorded_gps(df_in,df_bms):
+    '''筛选出现gps时间断点的数据,用df_bms数据补齐,df_in为df_gps表格。'''
+    #未记录到的odo总和
+    accum_unrecorded_odo=0
+
+    #设置丢失的判断条件,获得信息丢失行的index
+    condition1=df_in['deltatime']>60*3#时间间隔大于3分钟。说明数据掉线了。
+    condition2=(df_in['deltatime']>90*1)&(df_in['distance']>1000)#时间间隔大于*分钟,且Distance间隔大于*,代表掉线了。
+    signal_start_list=df_in.loc[condition1|condition2,:].index.to_list()#信息丢失行
+    #如果第0行属于信息丢失行,则删除,因为需要index-1行
+    try:
+        signal_start_list.remove(0)
+    except:
+        pass
+    else:
+        pass
+    #筛选出所有GPS信号丢失,对应的开始时间-结束时间对。
+    if len(signal_start_list)>0:
+        signal_end_list=[num-1 for num in signal_start_list]#信息丢失行的前一行,此处可能如果是首行,可能会有bug。
+        pick_gps_list=[0]+signal_start_list+signal_end_list+[len(df_in)-1]#首行+尾行+信号开始行+信号结束行
+        pick_gps_list=sorted(pick_gps_list)#重新排序
+
+    #有出现信号断点的行,则进行以下计算。
+    if len(signal_start_list)>0:
+        #针对每个时间对,计算unrecorded odo
+        for start_time_index,end_time_index in zip(signal_start_list,signal_end_list):
+            last_end_time=df_in.loc[end_time_index,'time']
+            this_start_time=df_in.loc[start_time_index,'time']
+            #print('gps signal loss from: '+str(last_end_time)+'-to-'+str(this_start_time))
+            #使用cal_delatasoc计算预估里程
+            unrecorded_odo=cal_deltasoc(df_bms,last_end_time,this_start_time)
+            accum_unrecorded_odo+=unrecorded_odo
+        #print('accum_unrecorded_odo:'+str(accum_unrecorded_odo))
+    else:
+        pass
+    
+    return accum_unrecorded_odo
+
+
+def df_add_avgspeed(df_in):
+    '''Add a columns:avgspeed ,input df must have deltatime,distance column.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'avgspeed']=0
+        #从第二行开始,计算平均速度
+        else:
+            deltatime=df_in.loc[i,'deltatime']
+            distance=df_in.loc[i,'distance']
+            avgspeed=(distance/1000)/(deltatime/3600)
+            df_in.loc[i,'avgspeed']=avgspeed
+    return df_in
+
+
+def read_df_gps(path):
+    df_gps=pd.read_csv(path, encoding='gbk')#编码方式gbk
+    #重置表头
+    df_gps.rename(columns = {"时间戳": "time", "纬度":"lat", "经度":"lng", 
+                             "卫星数":"sat_num", "海拔m":"height","速度[km/h]":"speed"},  inplace=True)
+    #时间格式调整
+    df_gps['time']=pd.to_datetime(df_gps['time'])
+    #对gps进行清洗
+    df_gps=df_add_distance(df_gps)#增加distance列
+    condition=df_gps['distance']<20000#删除GPS漂移过远的点,可能为GPS错误值
+    df_gps=df_gps.loc[condition,:].copy()#删除condition中,avgspd过大的部分,很可能伴随着GPS的漂移。
+    df_gps=df_gps.reset_index(drop=True)#重置index
+    #进行预处理
+    df_gps=df_add_distance(df_gps)#增加distance列,再算一次distance
+    df_gps=df_add_deltatime(df_gps)#增加deltatime列
+    df_gps=df_add_avgspeed(df_gps)#增加avgspeed列
+
+    #df_gps.to_excel('df_gps.xlsx',sheet_name='Sheet1')
+    return df_gps
+
+def preprocess_Df_Gps(df_gps):
+    '''对Df_Gps进行预处理'''
+    #重置表头
+    df_gps.rename(columns = {"时间戳": "time", "纬度":"lat", "经度":"lng", 
+                             "卫星数":"sat_num", "海拔m":"height","速度[km/h]":"speed"},  inplace=True)
+    #删除含有空数据的行
+    df_gps=df_gps.dropna(subset=['time','lat','lng'])
+    #删除时间重复的行,保留第一次出现的行
+    df_gps=df_gps.drop_duplicates(subset=['time'],keep='first')
+    #时间格式调整
+    df_gps['time']=pd.to_datetime(df_gps['time'])
+    
+    #对gps进行清洗
+    df_gps=df_add_distance(df_gps)#增加distance列
+    condition=df_gps['distance']<20000#删除GPS漂移过远的点,可能为GPS错误值
+    df_gps=df_gps.loc[condition,:].copy()#删除condition中,avgspd过大的部分,很可能伴随着GPS的漂移。
+    df_gps=df_gps.reset_index(drop=True)#重置index
+    #进行预处理
+    df_gps=df_add_distance(df_gps)#增加distance列,再算一次distance
+    df_gps=df_add_deltatime(df_gps)#增加deltatime列
+    df_gps=df_gps.loc[df_gps['deltatime']>0.01,:].copy()#删除deltatime=0的列,两个时间戳相同,无法求速度。
+    df_gps=df_add_avgspeed(df_gps)#增加avgspeed列
+
+    #df_gps.to_excel('df_gps.xlsx',sheet_name='Sheet1')
+    return df_gps
+
+
+def df_add_distance(df_in):
+    '''Add a columns:distance,input df must have lng,lat columns.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'distance']=0
+        #从第二行开始,计算i行到i-1行,GPS距离之差
+        else:
+            lon1=df_in.loc[i-1,'lng']
+            lat1=df_in.loc[i-1,'lat']
+            lon2=df_in.loc[i,'lng']
+            lat2=df_in.loc[i,'lat']
+            distance=haversine(lon1,lat1,lon2,lat2)#haversine公式计算距离差
+            df_in.loc[i,'distance']=distance    
+    return df_in
+
+
+def haversine(lon1, lat1, lon2, lat2):
+    """
+    Calculate the great circle distance between two points 
+    on the earth (specified in decimal degrees)
+    """
+    # 将十进制度数转化为弧度
+    lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
+    # haversine公式
+    dlon = lon2 - lon1 
+    dlat = lat2 - lat1 
+    a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
+    c = 2 * asin(sqrt(a)) 
+    r = 6371 # 地球平均半径,单位为公里
+    return c * r * 1000

+ 293 - 0
LIB/FRONTEND/odo/UpdtFct.py

@@ -0,0 +1,293 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+
+#建立引擎
+engine = create_engine(str(r"mysql+mysqldb://%s:" + '%s' + "@%s/%s") % ('root', 'pengmin', 'localhost', 'qixiangdb'))
+
+conn_qx = pymysql.connect(
+        host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+        user='qx_cas',
+        password='Qx@123456',#Qx@123456
+        database='qx_cas',
+        charset='utf8'
+    )
+
+conn_local = pymysql.connect(
+        host='localhost',
+        user='root',
+        password='pengmin',
+        database='qixiangdb',
+        charset='utf8'
+    )
+
+def getNextSoc(start_soc):
+    '''输入当前的soc,寻找目标soc函数'''
+    if start_soc>80:
+        next_soc=80
+    elif start_soc>60:
+        next_soc=60
+    elif start_soc>40:
+        next_soc=40
+    elif start_soc>20:
+        next_soc=20
+    else:
+        next_soc=1
+    return next_soc
+
+def updtSnFct(sn_factor_df,end_soc,delta_range,range_soc):
+    '''输入当前的soc区间段,里程变量量,soc变化量,输出新的df
+    sn_factor_df为dataframe,delta_range单位为km,range_soc单位为km/persoc'''
+    if end_soc==80:
+        updtFctByCol(sn_factor_df,'a0',delta_range,range_soc)
+    elif end_soc==60:
+        updtFctByCol(sn_factor_df,'a1',delta_range,range_soc)
+    elif end_soc==40:
+        updtFctByCol(sn_factor_df,'a2',delta_range,range_soc)
+    elif end_soc==20:
+        updtFctByCol(sn_factor_df,'a3',delta_range,range_soc)
+    elif end_soc<20:
+        updtFctByCol(sn_factor_df,'a4',delta_range,range_soc)
+    return sn_factor_df
+
+def updtFctByCol(sn_factor_df,colmun_name,delta_range,range_soc):
+    '''更新制定列的factor,sn_factor_df为dataframe,新的系数更新到第一行。delta_range单位为km,
+    range_soc单位为km/persoc,默认按照100km更新续驶里程权重'''
+    range_soc_old=sn_factor_df.loc[0,colmun_name]#读取第0行的老factor
+    debounce_range=200#更新权重
+    new_factor=range_soc*((delta_range)/debounce_range)+range_soc_old*(1-(delta_range)/debounce_range)
+    #在第1行,存储新的factor
+    sn_factor_df.loc[1,colmun_name]=new_factor
+    return sn_factor_df
+
+def updtTodayFct(factor_input,sn_day_df):
+    '''更新今日的Factor***'''
+    sn_factor_df_last=factor_input
+    start_soc=sn_day_df.loc[0,'soc']
+    next_soc=getNextSoc(start_soc)
+    start_range=sn_day_df.loc[0,'vehodo']
+    sn=sn_day_df.loc[0,'name']
+
+    for index in range(len(sn_day_df)-1):
+    #寻找分割点,
+        index_soc=sn_day_df.loc[index,'soc']#当前行soc
+        next_index_soc=sn_day_df.loc[index+1,'soc']#下一行soc
+
+        if (index_soc>=next_soc)&(next_index_soc<next_soc):#当前行高,下一行低
+            delta_soc_tonext=start_soc-next_soc#两个距离点的soc差,单位为%
+            delta_range_tonext=sn_day_df.loc[index,'vehodo']-start_range#两个时间点的距离差,单位为m
+            delta_range_tonext_km=delta_range_tonext/1000#两个时间点的距离差,单位为km
+            range_soc_tonext=(delta_range_tonext/1000)/delta_soc_tonext#单位soc可行驶的公里数
+            print(sn+'start_soc: '+str(start_soc),'next_soc: '+str(next_soc),'delta_vehodo; '+str(round(delta_range_tonext_km,3))
+            +'km'+' range_soc:'+str(round(range_soc_tonext,3)))
+
+            if (delta_range_tonext_km)>1:
+                sn_factor_df_last=updtSnFct(sn_factor_df_last,next_soc,delta_range_tonext_km,range_soc_tonext)
+            
+            start_soc=next_index_soc#变更开始soc
+            next_soc=getNextSoc(start_soc)#变更结束soc
+            start_range=sn_day_df.loc[index+1,'vehodo']#变更开始里程    
+
+    return sn_factor_df_last
+
+def snDayDfPreProcess(sn_day_df):
+    '''预处理,判断是否在dirvemode,获取drivemode条件下的累计行驶距离。
+    增加delta_soc列,drive_flg列,vehodo列'''
+    sn_day_df=sn_day_df.reset_index(drop=True)#重置index
+    #增加列,计算delta_soc
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'delta_soc']=0
+        else:
+            sn_day_df.loc[index,'delta_soc']=sn_day_df.loc[index,'soc']-sn_day_df.loc[index-1,'soc']
+    #增加列,判断是否在drive状态
+    drive_flg=False
+    accum_distance=0
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'drive_status']=drive_flg
+            sn_day_df.loc[index,'vehodo']=0
+        else:
+            if (sn_day_df.loc[index,'delta_soc']<-0.1)|\
+                ((sn_day_df.loc[index,'delta_soc']<=0)&(sn_day_df.loc[index,'distance']>500)):#soc处于下降状态,说明在drive
+                drive_flg=True#置true
+            elif sn_day_df.loc[index,'delta_soc']>0.1:#soc处于上升状态,说明不在drive
+                drive_flg=False#置false
+                accum_distance=0#清零
+            sn_day_df.loc[index,'drive_flg']=drive_flg
+            accum_distance+=sn_day_df.loc[index,'distance']#对行驶里程进行累加
+            sn_day_df.loc[index,'vehodo']=accum_distance
+    #筛选所有的drive信息行
+    sn_day_drive_df=sn_day_df.loc[sn_day_df['drive_flg']==True,:]
+    sn_day_drive_df=sn_day_drive_df.reset_index(drop=True)#重置index
+    
+    return sn_day_drive_df 
+
+def updtAllSnFct(start_date,end_date):
+    '''计算开始时间到结束时间的,所有sn的factor'''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        updtAllSnTodayFct(start_date,end_date)#调用函数
+        print('update all sn factor from '+start_date+" to "+end_date)
+        start_date=end_date
+        i+=1#自加
+
+def updtAllSnTodayFct(start_date,end_date):
+    ''''更新今天所有sn的factorx信息,start_date和end_date相隔一天。此处还可优化'''
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+" and distance!=0;"
+    range_soc_df = pd.read_sql(sql_cmd, conn_qx)#使用read_sql方法查询qx数据库
+
+    #筛选出所有当日数据之后,筛选当日有更新的sn
+    today_sn_list=range_soc_df['name'].unique().tolist()#[:100]#先一次更新5个
+    #建立空的dataframe,用于承接所有更新的factor信息
+    today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+    for sn in today_sn_list:
+        #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+        sn_str="'"+sn+"'"
+        sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<"+start_date_str+" and sn="+sn_str
+        #此处可以限定每次查询的数量,例如不高于5行
+        factor_df=pd.read_sql(sql_cmd2, conn_local)#使用read_sql方法查询local数据库
+
+        #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+        factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+
+        if len(factor_df)==0:
+            #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+            start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+            yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+            #为sn申请一个新的factor,初始值为1
+            factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+        sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+        sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+
+        sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+        sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+        sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+        sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+        #筛选对应车辆的信息
+        condition_sn=(range_soc_df['name']==sn)
+        sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+        sn_day_df=sn_day_df.reset_index(drop=True)
+        #使用updtTodayFct函数更新今天的factor
+        if len(sn_day_df)>=2:
+            #使用process函数,进行预处理
+            sn_day_df=snDayDfPreProcess(sn_day_df)#预处理函数
+            if len(sn_day_df)>=2:
+                sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#
+                today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+    
+    #将today_sn_fct_df写入到数据库中,今天所有factor更新的系数,一次写入。
+    if len(today_sn_fct_df)>=1:
+        today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)
+
+def updtOneSnFct(sn,start_date,end_date):
+    '''计算开始时间到结束时间的,一个sn的所有factor'''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        updtOneSnTodayFct(sn,start_date,end_date)#调用函数
+        print('update one sn factor from '+start_date+" to "+end_date)
+        start_date=end_date
+        i+=1#自加
+
+def updtOneSnTodayFct(sn,start_date,end_date):
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sn_str="'"+sn+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+\
+    " and distance!=0 and name="+sn_str
+    range_soc_df = pd.read_sql(sql_cmd, conn_qx)#使用read_sql方法查询qx数据库
+
+    if len(range_soc_df)>0:
+        #筛选出所有当日数据之后,筛选当日有更新的sn
+        today_sn_list=range_soc_df['name'].unique().tolist()
+        #建立空的dataframe,用于承接所有更新的factor信息
+        today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+        for sn in today_sn_list:
+            #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+            sn_str="'"+sn+"'"
+            sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<"+start_date_str+" and sn="+sn_str
+            factor_df=pd.read_sql(sql_cmd2, conn_local)#使用read_sql方法查询local数据库
+
+            #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+            factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+
+            if len(factor_df)==0:
+                #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+                start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+                yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+                factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+                today_sn_fct_df=today_sn_fct_df.append(factor_df.loc[0,:])#将初始化的行记录到数据库
+
+            sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+            
+            sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+            sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+            sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+            sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+            sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+            #筛选对应车辆的信息
+            condition_sn=(range_soc_df['name']==sn)
+            sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+            sn_day_df=sn_day_df.reset_index(drop=True)
+            #使用updtTodayFct函数更新今天的factor
+            if len(sn_day_df)>=2:
+                #使用process函数,进行预处理
+                sn_day_df=snDayDfPreProcess(sn_day_df)#!!!!!!!!!!!增加
+                if len(sn_day_df)>=2:
+                    sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#
+                    today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+        
+        # #将today_sn_fct_df写入到数据库中
+        if len(today_sn_fct_df)>=1:
+            today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)
+            # print(sn+' factor will be update in table tb_sn_factor!')
+        return today_sn_fct_df
+
+
+
+
+
+# def updtASnTodayFct(start_date,end_date,today_sn_list):
+
+#     sql_cmd="select * from qixiang_test where time>='"+start_date+"' and time<='"+end_date+"'"
+#     range_soc_df = pd.read_sql(sql_cmd, conn)#使用read_sql方法查询数据库
+
+#     sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<'"+start_date+"'"
+#     factor_df=pd.read_sql(sql_cmd2, conn)#使用read_sql方法查询数据库
+
+#     #筛选出所有当日数据之后,筛选当日有更新的sn
+#     # today_sn_list=range_soc_df['sn'].unique().tolist()
+#     # today_sn_list=today_sn_list[:10]#更新若干个
+#     #建立空的dataframe,用于承接所有更新的factor信息
+#     today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+#     for sn in today_sn_list:
+#         sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+#         sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+#         sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+#         sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+#         sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+#         sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+#         #筛选对应车辆的信息
+#         condition_sn=(range_soc_df['sn']==sn)
+#         sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+#         sn_day_df=sn_day_df.reset_index(drop=True)
+#         #使用updtTodayFct函数更新今天的factor
+#         sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)
+#         today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+    
+#     #将today_sn_fct_df写入到数据库中
+#     today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)

+ 28 - 0
LIB/FRONTEND/odo/UpdtFct_Main.py

@@ -0,0 +1,28 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+from UpdtFct import *
+
+
+conn_qx = pymysql.connect(
+        host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+        user='qx_cas',
+        password='Qx@123456',#Qx@123456
+        database='qx_cas',
+        charset='utf8'
+    )
+
+conn_local = pymysql.connect(
+        host='localhost',
+        user='root',
+        password='pengmin',
+        database='qixiangdb',
+        charset='utf8'
+    )
+
+#指定开始时间,结束时间,更新所有sn的factor
+start_date="2021-07-18"
+end_date="2021-08-01"
+
+updtAllSnFct(start_date,end_date)

BIN
LIB/FRONTEND/odo/asset_table.xlsx


+ 66 - 0
LIB/FRONTEND/odo/main_1.py

@@ -0,0 +1,66 @@
+#coding=utf-8
+# 计算里程
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+from LIB.MIDDLE.odo.CalDist import *
+from LIB.BACKEND import DBManager
+import pdb
+
+asset_table_path='asset_table.xlsx'
+# drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\drive_info.xlsx'
+asset_sheet_num=1
+usecols_list=[4,5]
+
+asset_table=pd.read_excel(asset_table_path,sheet_name=asset_sheet_num,skiprows=1,usecols=usecols_list)
+SN_list=asset_table['SN号'].values.tolist()
+print('从6060sheet读取到:'+str(len(SN_list))+'行')
+asset_table=asset_table.rename(columns={'SN号':'SN','状态':'state'})
+
+asset_table.set_index(["SN"],inplace=True)
+col_name=asset_table.columns.tolist()
+col_name.extend(['range','accum_soc','day_start_soc','day_end_soc','day_start_time','day_end_time'])
+asset_table=asset_table.reindex(columns=col_name)
+
+start_hour='00:00:00'#每日查询最早时间
+end_hour='23:59:00'#每日查询最晚时间
+
+
+date_index=pd.date_range('2021-07-31','2021-07-31')
+for date in date_index:
+    '''遍历日期'''
+
+    str_date=str(date)[:10]
+    input_starttime=str_date+' '+start_hour
+    input_endtime=str_date+' '+end_hour
+    test_day=str_date[5:10]#月-日,用于建立sheet
+    drive_info_path='6060\\drive_info'+test_day+'_50_end_'+'.xlsx'
+
+    print(input_starttime)
+
+    drive_info_aday=pd.DataFrame()
+    SN_list_short=SN_list#先选择了0:50,50:end
+
+    for SN in SN_list_short:
+        '''遍历SN号'''
+        SN=SN.strip('\t')
+        SN=SN.strip('\n')
+
+        try:
+            range=GetDistInfo(SN,input_starttime,input_endtime)
+            range_df=pd.DataFrame([range])
+            drive_info_aday=pd.concat([drive_info_aday,range_df],axis=0)
+
+        except:
+            print(SN+' '+test_day+'fail')
+        else:
+            pass
+            #print(SN+' '+test_day+'success')
+
+    drive_info_aday.to_excel(drive_info_path,sheet_name=test_day)#sheet名称为testday

+ 106 - 0
LIB/FRONTEND/other/bat_user_relation/main.py

@@ -0,0 +1,106 @@
+from sqlalchemy import create_engine
+import pandas as pd
+
+# 连接数据库
+host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+port=3306
+db='qixiang_manage'
+user='qx_query'
+password='@Qx_query'
+engine = create_engine('mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(user,password, host, str(port),db))
+sql = "select * from py_battery_rent"
+df_rent = pd.read_sql_query(sql, engine)
+sql = "select * from py_battery_rent_change"
+df_rent_change = pd.read_sql_query(sql, engine)
+
+
+# 统计截止日期:2021年07月25日 14:00:00
+'''
+ 预处理:
+ df_rent:
+    1)删除测试电池 GY开头 以及位数不对的电池
+    2)删除user_id为空的行
+    3) 删除 qrcode为空的行
+    4)删除pay_stat 等于3 的未支付订单
+    4) 时间戳=0 的值置None
+ df_rent_change:
+    1)删除测试电池 GY开头 以及位数不对的电池
+    2)删除qrcode和new_qrcode为空的行
+'''
+# df_rent = pd.read_csv("data_rent.csv",sep=',',encoding="ANSI")
+# print(len(df_rent))
+
+df_rent = df_rent.dropna(axis=0, how='any', subset=['user_id', 'qrcode'], inplace=False)
+df_rent = df_rent[~(df_rent['pay_stat']==3)]
+df_rent['id'] = df_rent['id'].apply(lambda x:str(int(x)) if not pd.isnull(x) else None)
+df_rent['return_time'] = df_rent['return_time'].apply(lambda x:x+3600*8 if x!=0 else None)
+df_rent['pay_time'] = df_rent['pay_time'].apply(lambda x:x+3600*8 if x!=0 else None)
+df_rent['get_time'] = df_rent['get_time'].apply(lambda x:x+3600*8 if x!=0 else None)
+df_rent['end_time'] = df_rent['end_time'].apply(lambda x:x+3600*8 if x!=0 else None)
+df_rent['addtime'] = pd.to_datetime(df_rent['addtime'].values,unit='s')
+df_rent['pay_time'] = pd.to_datetime(df_rent['pay_time'].values,unit='s')
+df_rent['get_time'] = pd.to_datetime(df_rent['get_time'].values,unit='s')
+df_rent['end_time'] = pd.to_datetime(df_rent['end_time'].values,unit='s')
+df_rent['return_time'] = pd.to_datetime(df_rent['return_time'].values,unit='s')
+df_rent['addtime'] = df_rent['addtime'].apply(lambda x:x.strftime("%Y-%m-%d %H:%M:%S"))
+df_rent['pay_time'] = df_rent['pay_time'].apply(lambda x:x.strftime("%Y-%m-%d %H:%M:%S") if not pd.isna(x) else x)
+df_rent['get_time'] = df_rent['get_time'].apply(lambda x:x.strftime("%Y-%m-%d %H:%M:%S") if not pd.isna(x) else x)
+df_rent['end_time'] = df_rent['end_time'].apply(lambda x:x.strftime("%Y-%m-%d %H:%M:%S") if not pd.isna(x) else x)
+df_rent['return_time'] = df_rent['return_time'].apply(lambda x:x.strftime("%Y-%m-%d %H:%M:%S") if not pd.isna(x) else x)
+df_rent = df_rent.reset_index(drop=True)
+print(len(df_rent))
+
+# df_rent_change = pd.read_csv("data_rent_change.csv",sep=',',encoding="ANSI")
+print(len(df_rent_change))
+df_rent_change = df_rent_change.dropna(axis=0, how='any', subset=['new_qrcode', 'qrcode'], inplace=False)
+print(len(df_rent_change))
+df_rent_change = df_rent_change.reset_index(drop=True)
+df_rent_change['create_time'] = df_rent_change['create_time'].apply(lambda x:x+3600*8 if x!=0 else None)
+df_rent_change['create_time'] = pd.to_datetime(df_rent_change['create_time'].values,unit='s')
+df_rent_change['create_time'] = df_rent_change['create_time'].apply(lambda x:x.strftime("%Y-%m-%d %H:%M:%S"))
+# 将更换电池的信息,补充至rent中, 旧电池添加一条租用记录和归还记录, 并将订单的pay_time 改为电池更换时间, 
+df_groups = df_rent_change.groupby("rent_id")
+for name, df_group in df_groups:
+    df_group = df_group.sort_values("create_time")
+    df_group = df_group.reset_index(drop=True)
+    for i in range(0, len(df_group)):
+        df_rent = df_rent.append(pd.DataFrame({'addtime':[df_group.loc[i,'create_time']],'qrcode':[df_group.loc[i,'qrcode']], 'return_time':[df_group.loc[i,'create_time']],'user_id':[df_group.loc[i,'user_id']], 'f_id':[df_group.loc[i,'f_id']]}))
+
+        df_rent = df_rent.append(pd.DataFrame({'addtime':[df_rent.loc[df_rent[(df_rent['id']==str(int(df_group.loc[i,'rent_id'])))].index,'pay_time'].values[0]],
+                'qrcode':[df_group.loc[i,'qrcode']], 'pay_time':[df_rent.loc[df_rent[(df_rent['id']==str(int(df_group.loc[i,'rent_id'])))].index,'pay_time'].values[0]], 
+                'user_id':[df_group.loc[i,'user_id']], 'f_id':[df_group.loc[i,'f_id']]}))
+  
+        df_rent.loc[df_rent[(df_rent['id']==str(int(df_group.loc[i,'rent_id'])))].index,'pay_time'] = df_group.loc[i,'create_time']
+
+# 生成用来排序的时间列
+df_rent = df_rent.reset_index(drop=True)
+df_rent['sort_time'] = [None] * len(df_rent)
+for i in range(0, len(df_rent)):
+   df_rent.loc[i, 'sort_time'] =  df_rent.loc[i, 'pay_time'] if not pd.isnull(df_rent.loc[i, 'pay_time']) else df_rent.loc[i, 'return_time']
+df_rent['sort_time'] = pd.to_datetime(df_rent['sort_time'])
+# df_rent.to_csv('ttt.csv')
+df = df_rent.copy()
+df_res = pd.DataFrame(columns=['sn', 'st', 'et', 'user_id', 'agent_id'])
+df_groups = df.groupby("qrcode")
+for name, df_group in df_groups:
+    
+    # 根据sn分组后的电池,首先按照记录时间排序,然后判断用户id是否发生变化,
+    df_group = df_group.sort_values("sort_time") # 按照本条记录的生成时间排序
+    df_group = df_group.reset_index(drop=True)
+    sn = name
+    user_id = df_group.loc[0, 'user_id']
+    st =  df_group.loc[0, 'pay_time']
+    et = None
+    for i in range(1,len(df_group)):
+        if df_group.loc[i, 'user_id'] == user_id:
+            continue
+        else:
+            et = df_group.loc[i-1, 'return_time'] if not pd.isnull(df_group.loc[i-1, 'return_time']) else None
+            df_res = df_res.append(pd.DataFrame({'sn':[sn], 'st':[st], 'et':[et], 'user_id':[user_id], 'agent_id':[df_group.loc[i-1, 'f_id']]}), ignore_index=True)
+            user_id = df_group.loc[i, 'user_id']
+            st =  df_group.loc[i, 'pay_time']
+            et = None
+    et = df_group.loc[len(df_group)-1, 'return_time'] if not pd.isnull(df_group.loc[len(df_group)-1, 'return_time']) else None
+    df_res = df_res.append(pd.DataFrame({'sn':[sn], 'st':[st], 'et':[et], 'user_id':[user_id], 'agent_id':[df_group.loc[len(df_group)-1, 'f_id']]}), ignore_index=True)
+df_res.to_csv('result.csv')
+    

+ 309 - 0
LIB/FRONTEND/soh/LFPSoh 20210711.py

@@ -0,0 +1,309 @@
+# 获取数据
+from LIB.BACKEND import DBManager
+
+import os
+import pandas as pd
+import numpy as np
+import datetime
+# import matplotlib.pyplot as plt
+
+#参数输入
+Capacity = 54
+PackFullChrgVolt=69.99
+CellFullChrgVolt=3.5
+CellVoltNums=20
+CellTempNums=4
+FullChrgSoc=98
+PeakSoc=57
+# #40Ah-OCV
+# LookTab_SOC = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, 100]
+# LookTab_OCV = [3.3159, 3.4502, 3.4904, 3.5277, 3.5590, 3.5888, 3.6146, 3.6312, 3.6467, 3.6642, 3.6865, 3.7171, 3.7617,
+#                3.8031, 3.8440, 3.8888, 3.9376, 3.9891, 4.0451, 4.1068, 4.1830]
+#55Ah-OCV
+LookTab_SOC = [0.00, 	2.40, 	6.38, 	10.37, 	14.35, 	18.33, 	22.32, 	26.30, 	30.28, 	35.26, 	40.24, 	45.22, 	50.20, 	54.19, 	58.17, 	60.16, 	65.14, 	70.12, 	75.10, 	80.08, 	84.06, 	88.05, 	92.03, 	96.02, 	100.00]
+LookTab_OCV = [2.7151,	3.0298,	3.1935,	3.2009,	3.2167,	3.2393,	3.2561,	3.2703,	3.2843,	3.2871,	3.2874,	3.2868,	3.2896,	3.2917,	3.2967,	3.3128,	3.3283,	3.3286,	3.3287,	3.3288,	3.3289,	3.3296,	3.3302,	3.3314,	3.3429]
+
+#定义滑动滤波函数
+def np_move_avg(a, n, mode="same"):
+    return (np.convolve(a, np.ones((n,)) / n, mode=mode))
+
+#参数初始化
+dvdq_soh=[]
+dvdq_soh_err=[]
+bms_soh=[]
+dvdq_time=[]
+dvdq_sohcfd=[]
+sn_list=[]
+
+#获取数据时间段
+now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+start_time=now_time-datetime.timedelta(days=30)
+end_time=str(now_time)
+strat_time=str(start_time)
+
+#输入一个含有‘SN号’的xlsx
+SNdata = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+SNnums=SNdata['SN号']
+for k in range(15):
+    
+    SNnum=str(SNnums[k])
+    sn = SNnum
+    st = '2021-07-06 00:00:00'
+    et = '2021-07-07 20:00:00'
+
+    dbManager = DBManager.DBManager()
+    df_data = dbManager.get_data(sn=sn, start_time=st, end_time=et, data_groups=['bms'])
+    data = df_data['bms']
+
+    packcrnt=data['总电流[A]']
+    packvolt=data['总电压[V]']
+    SOC=data['SOC[%]']
+    SOH=data['SOH[%]']
+    bmsstat=data['充电状态']
+    time= pd.to_datetime(data['时间戳'], format='%Y-%m-%d %H:%M:%S')
+
+    #第一步:筛选充电数据
+    ChgStart=[]
+    ChgEnd=[]
+    for i in range(3, len(time) - 3):
+        if i==3 and bmsstat[i]==2 and bmsstat[i+1]==2 and bmsstat[i+2]==2:
+            ChgStart.append(i)
+        elif bmsstat[i-2]!=2 and bmsstat[i-1]!=2 and bmsstat[i]==2:
+            ChgStart.append(i)
+        elif bmsstat[i-1]==2 and bmsstat[i]!=2 and bmsstat[i+1]!=2:
+            ChgEnd.append(i)
+        elif i == (len(time) - 4) and bmsstat[len(bmsstat)-1] == 2 and bmsstat[len(bmsstat)-2] == 2:
+            ChgEnd.append(len(time)-1)
+
+    #第二步:筛选充电起始Soc<48%,电芯温度>15℃,且满充的数据
+    ChgStartValid1=[]
+    ChgEndValid1=[]
+    ChgStartValid2=[]
+    ChgEndValid2=[]
+
+    for i in range(min(len(ChgStart),len(ChgEnd))):
+
+        #获取最小温度值
+        celltemp = []
+        for j in range(1, CellTempNums+1):
+            s = str(j)
+            temp = data['单体温度' + s]
+            celltemp.append(temp[ChgEnd[i]])
+
+        #寻找最大电压值
+        cellvolt = []
+        for j in range(1, CellVoltNums+1):
+            s = str(j)
+            volt = max(data['单体电压' + s][ChgStart[i]:ChgEnd[i]]/1000)
+            cellvolt.append(volt)
+
+        #筛选满足2点法计算的数据
+        StandingTime=0
+        if max(cellvolt)>CellFullChrgVolt and SOC[ChgStart[i]]<30 and min(celltemp)>5:
+            for k in reversed(range(ChgStart[i])):
+                if abs(packcrnt[k - 2]) < 0.01:
+                    StandingTime = StandingTime + (time[k] - time[k-1]).total_seconds()
+                    if StandingTime > 600:  # 筛选静置时间>10min
+                        ChgStartValid1.append(ChgStart[i])
+                        ChgEndValid1.append(ChgEnd[i])
+                        break
+                else:
+                    break
+
+        #筛选满足DV/DQ方法的数据
+        if max(cellvolt)>CellFullChrgVolt and SOC[ChgStart[i]]<45 and min(celltemp)>5:
+            if ((time[ChgEnd[i]]-time[ChgStart[i]]).total_seconds())/(ChgEnd[i]-ChgStart[i])<60:
+                ChgStartValid2.append(ChgStart[i])
+                ChgEndValid2.append(ChgEnd[i])
+
+    #第三步:计算充电Soc和Soh
+
+    # 两点法计算soh
+    Soc=[]
+    Time=[]
+    Soc_Err=[]
+    Bms_Soc=[]
+
+    Soh1=[]
+    Time1=[]
+    Bms_Soh1=[]
+    Soh_Err1=[]
+
+    for i in range(len(ChgStartValid1)):
+
+        #寻找最大电压值
+        cellvolt = []
+        for j in range(1, CellVoltNums+1):
+            s = str(j)
+            volt = max(data['单体电压' + s])
+            cellvolt.append(volt)
+        voltmax_index = cellvolt.index(max(cellvolt)) + 1
+        cellvolt = data['单体电压' + str(voltmax_index)] / 1000
+
+        #soc
+        Soc.append(np.interp(cellvolt[ChgStartValid1[i]-3],LookTab_OCV,LookTab_SOC))
+        Time.append(time[ChgStartValid1[i]-3])
+        Bms_Soc.append(SOC[ChgStartValid1[i]-3])
+        Soc_Err.append(Bms_Soc[-1]-Soc[-1])
+
+        #soh
+        Ocv_Soc=np.interp(cellvolt[ChgStartValid1[i]-3],LookTab_OCV,LookTab_SOC)
+        Ah=0
+
+        for j in range(ChgStartValid1[i],ChgEndValid1[i]):
+            #计算soc
+            Step=(time[j]-time[j-1]).total_seconds()
+            Time.append(time[j])
+            Bms_Soc.append(SOC[j])
+            if Soc[-1]-(packcrnt[j]*Step*100)/(3600*Capacity)<100:
+                Soc.append(Soc[-1]-(packcrnt[j]*Step*100)/(3600*Capacity))
+            else:
+                Soc.append(100)
+            Soc_Err.append(Bms_Soc[-1] - Soc[-1])
+
+            #两点法计算soh
+            Ah=Ah-packcrnt[j]*Step/3600
+        Soh1.append(Ah*100/((FullChrgSoc-Ocv_Soc)*0.01*Capacity))
+        Bms_Soh1.append(SOH[i])
+        Soh_Err1.append(Bms_Soh1[-1]-Soh1[-1])
+        Time1.append(time[ChgStartValid1[i]])
+
+    # DV/DQ法计算soh
+    Soh2=[]
+    Time2=[]
+    Bms_Soh2=[]
+    Soh_Err2=[]
+    SohCfd = []
+    sn_list=[]
+
+    for i in range(len(ChgStartValid2)):
+
+        #寻找最大电压值
+        cellvolt1 = []
+        cellvolt=[]
+        for j in range(1, CellVoltNums+1):
+            s = str(j)
+            volt = data['单体电压' + s]
+            cellvolt1.append(volt[ChgEndValid2[i]])
+        voltmax1_index = cellvolt1.index(max(cellvolt1)) + 1
+        cellvolt1 = data['单体电压' + str(voltmax1_index)] / 1000
+
+        #电压采用滑动平均滤波
+        cellvolt=np_move_avg(cellvolt1, 3, mode="same")
+
+
+        #参数赋初始值
+        Ah = 0
+        Volt = cellvolt[ChgStartValid2[i]]
+        DV_Volt=[]
+        DQ_Ah = []
+        DVDQ = []
+        time2 = []
+        soc2 = []
+        Ah_tatal=[0]
+        xvolt=[]
+        #计算DV和DQ值
+        for j in range(ChgStartValid2[i],ChgEndValid2[i]):
+            Step=(time[j+1]-time[j]).total_seconds()
+            Ah=Ah-packcrnt[j]*Step/3600
+            if (cellvolt[j]-Volt)>0.0009 and Ah>0:
+                Ah_tatal.append(Ah_tatal[-1]+Ah)
+                DQ_Ah.append(Ah)
+                DV_Volt.append(cellvolt[j]-Volt)
+                DVDQ.append((DV_Volt[-1])/DQ_Ah[-1])
+                xvolt.append(cellvolt[j])
+                Volt=cellvolt[j]
+                Ah = 0
+                time2.append(time[j])
+                soc2.append(SOC[j])
+
+        #切片Soc>50且Soc<80
+        Data1 = pd.DataFrame({'SOC': soc2,
+                                'DVDQ': DVDQ,
+                                'Ah_tatal': Ah_tatal[:-1],
+                                'DQ_Ah':DQ_Ah,
+                                'DV_Volt':DV_Volt,
+                                'XVOLT':xvolt})
+
+        Data1=Data1[(Data1['SOC']>50) & (Data1['SOC']<80)]
+
+        #寻找峰值并计算Soh和置信度
+        # 获取最小温度值
+        celltemp = []
+        for j in range(1, CellTempNums+1):
+            s = str(j)
+            temp = data['单体温度' + s]
+            celltemp.append(temp[ChgStartValid2[i]])
+        if len(Data1['DVDQ'])>1:
+            PeakIndex=Data1['DVDQ'].idxmax()
+            #筛选峰值点附近±0.5%SOC内的数据
+            Data2=Data1[(Data1['SOC']>(Data1['SOC'][PeakIndex]-0.5)) & (Data1['SOC']<(Data1['SOC'][PeakIndex]+0.5))]
+            if len(Data2)>2:
+                Ah_tatal1 = Data1['Ah_tatal']
+                DVDQ = Data1['DVDQ']
+                soc2 = Data1['SOC']
+                xvolt = Data1['XVOLT']
+                if soc2[PeakIndex]>50 and soc2[PeakIndex]<80:
+                    DVDQ_SOH=(Ah_tatal[-1]-Ah_tatal1[PeakIndex]) * 100 / ((FullChrgSoc - PeakSoc) * 0.01 * Capacity)
+                    if DVDQ_SOH<95:
+                        DVDQ_SOH=DVDQ_SOH*0.3926+58.14
+                    if DVDQ_SOH>70 and DVDQ_SOH<120:
+                        Soh2.append(DVDQ_SOH)
+                        Bms_Soh2.append(SOH[ChgStartValid2[i]])
+                        Soh_Err2.append(Bms_Soh2[-1] - Soh2[-1])
+                        Time2.append(time[ChgStartValid2[i]])
+                        sn_list.append(SNnum)
+
+                        #计算置信度
+                        if min(celltemp)<10:
+                            SohCfd.append(50)
+                        elif min(celltemp)<20:
+                            SohCfd.append(80)
+                        else:
+                            SohCfd.append(100)
+            else:
+                Data1=Data1.drop([PeakIndex])
+                PeakIndex = Data1['DVDQ'].idxmax()
+                Data2 = Data1[(Data1['SOC'] > (Data1['SOC'][PeakIndex] - 0.5)) & (Data1['SOC'] < (Data1['SOC'][PeakIndex] + 0.5))]
+                if len(Data2) > 3:
+                    Ah_tatal1 = Data1['Ah_tatal']
+                    DVDQ = Data1['DVDQ']
+                    soc2 = Data1['SOC']
+                    xvolt = Data1['XVOLT']
+                    if soc2[PeakIndex]>50 and soc2[PeakIndex]<80:
+                        DVDQ_SOH=(Ah_tatal[-1]-Ah_tatal1[PeakIndex]) * 100 / ((FullChrgSoc - PeakSoc) * 0.01 * Capacity)
+                        if DVDQ_SOH<95:
+                            DVDQ_SOH=DVDQ_SOH*0.3926+58.14
+                        if DVDQ_SOH>70 and DVDQ_SOH<120:
+                            Soh2.append(DVDQ_SOH)
+                            Bms_Soh2.append(SOH[ChgStartValid2[i]])
+                            Soh_Err2.append(Bms_Soh2[-1] - Soh2[-1])
+                            Time2.append(time[ChgStartValid2[i]])
+                            sn_list.append(SNnum)
+
+                            #计算置信度
+                            if min(celltemp)<10:
+                                SohCfd.append(50)
+                            elif min(celltemp)<20:
+                                SohCfd.append(80)
+                            else:
+                                SohCfd.append(100)
+
+    #处理数据
+    if len(Soh2)>5:
+        Soh2=np_move_avg(Soh2,5,mode="valid")
+        result_soh2={'时间': Time2[4::],
+            'SN号':sn_list[4::],
+            'BMS_SOH': Bms_Soh2[4::],
+            'SOH': Soh2,
+            'SOH误差': Soh_Err2[4::]}
+    else:
+        result_soh2={'时间': Time2,
+            'SN号':sn_list,
+            'BMS_SOH': Bms_Soh2,
+            'SOH': Soh2,
+            'SOH误差': Soh_Err2}
+    #第四步:将数据存入Excel
+    Result_Soh2=pd.DataFrame(result_soh2)
+    Result_Soh2.to_csv('BMS_SOH_'+SNnum+'.csv',encoding='GB18030')

+ 173 - 0
LIB/FRONTEND/soh/NCMSoh 20210716.py

@@ -0,0 +1,173 @@
+# 获取数据
+from LIB.BACKEND import DBManager
+
+import os
+import pandas as pd
+import numpy as np
+import datetime
+# import matplotlib.pyplot as plt
+
+#参数输入
+Capacity = 41
+PackFullChrgVolt=69.99
+CellFullChrgVolt=3.5
+CellVoltNums=17
+CellTempNums=4
+FullChrgSoc=98
+PeakSoc=57
+# #40Ah-OCV
+LookTab_SOC = [0,	3.534883489,	8.358178409,	13.18141871,	18.00471528,	22.82796155,	27.65123833,	32.47444668,	37.29772717,	42.12099502,	46.94423182,	51.76744813,	56.59070685,	61.4139927,	66.23719857,	71.0604667,	75.88373853,	80.70702266,	85.5302705,	90.35352009,	95.17676458,	100]
+LookTab_OCV = [3.3159,	3.4384,	3.4774,	3.5156,	3.5478,	3.5748,	3.6058,	3.6238,	3.638,	3.6535,	3.6715,	3.6951,	3.7279,	3.7757,	3.8126,	3.8529,	3.8969,	3.9446,	3.9946,	4.0491,	4.109,	4.183]
+# #55Ah-OCV
+# LookTab_SOC = [0.00, 	2.40, 	6.38, 	10.37, 	14.35, 	18.33, 	22.32, 	26.30, 	30.28, 	35.26, 	40.24, 	45.22, 	50.20, 	54.19, 	58.17, 	60.16, 	65.14, 	70.12, 	75.10, 	80.08, 	84.06, 	88.05, 	92.03, 	96.02, 	100.00]
+# LookTab_OCV = [2.7151,	3.0298,	3.1935,	3.2009,	3.2167,	3.2393,	3.2561,	3.2703,	3.2843,	3.2871,	3.2874,	3.2868,	3.2896,	3.2917,	3.2967,	3.3128,	3.3283,	3.3286,	3.3287,	3.3288,	3.3289,	3.3296,	3.3302,	3.3314,	3.3429]
+
+#参数初始化
+Soh3=[]
+Time3=[]
+Bms_Soh3=[]
+Soh_Err3=[]
+sn_list=[]
+
+#获取数据时间段
+now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+start_time=now_time-datetime.timedelta(days=31)
+end_time=str(now_time)
+strat_time=str(start_time)
+
+#输入一个含有‘SN号’的xlsx
+SNdata = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6040骑享')
+SNnums=SNdata['SN号']
+for k in range(len(SNnums)):
+    SNnum=str(SNnums[k])
+
+    sn = SNnum
+    st = strat_time
+    et = end_time
+
+    dbManager = DBManager.DBManager()
+    df_data = dbManager.get_data(sn=sn, start_time=st, end_time=et, data_groups=['bms'])
+    data = df_data['bms']
+    # print(data)
+
+    packcrnt=data['总电流[A]']
+    packvolt=data['总电压[V]']
+    SOC=data['SOC[%]']
+    SOH=data['SOH[%]']
+    bmsstat=data['充电状态']
+    time= pd.to_datetime(data['时间戳'], format='%Y-%m-%d %H:%M:%S')
+
+    #第一步:筛选充电数据
+    if len(packcrnt)>100:
+        ChgStart=[]
+        ChgEnd=[]
+        for i in range(3, len(time) - 3):
+            if i==3 and bmsstat[i]==2 and bmsstat[i+1]==2 and bmsstat[i+2]==2:
+                ChgStart.append(i)
+            elif bmsstat[i-2]!=2 and bmsstat[i-1]!=2 and bmsstat[i]==2:
+                ChgStart.append(i)
+            elif bmsstat[i-1]==2 and bmsstat[i]!=2 and bmsstat[i+1]!=2:
+                ChgEnd.append(i-1)
+            elif i == (len(time) - 4) and bmsstat[len(bmsstat)-1] == 2 and bmsstat[len(bmsstat)-2] == 2:
+                ChgEnd.append(len(time)-2)
+
+        #第二步:筛选充电起始Soc<45% & SOC>85%,电芯温度>5℃
+        ChgStartValid1=[]
+        ChgEndValid1=[]
+        ChgStartValid2=[]
+        ChgEndValid2=[]
+        StandingNum=[]
+
+        for i in range(min(len(ChgStart),len(ChgEnd))):
+
+            #获取最小温度值
+            celltemp = []
+            for j in range(1, CellTempNums+1):
+                s = str(j)
+                temp = data['单体温度' + s]
+                celltemp.append(temp[ChgEnd[i]])
+            
+            #去除电流0点   
+            for k in range(ChgStart[i],ChgEnd[i]):
+                if packcrnt[k]<-0.5 and packcrnt[k+1]>-0.5 and packcrnt[k+2]>-0.5 and packcrnt[k+3]>-0.5:
+                    ChgEnd[i]=k
+            
+            #计算最大packvolt
+            if len(packvolt[ChgStart[i]:ChgEnd[i]])>0:
+                packvoltMAX=max(packvolt[ChgStart[i]:ChgEnd[i]])
+
+                #筛选满足2点法计算的数据
+                StandingTime=0
+                StandingTime1=0
+                StandingTime2=0
+                if SOC[ChgEnd[i]]>85 and SOC[ChgStart[i]]<45 and min(celltemp)>5:
+                    for m in range(min(len(packcrnt)-ChgEnd[i]-2,ChgStart[i]-2)):
+                        if abs(packcrnt[ChgStart[i] - m - 1]) < 0.1:
+                            StandingTime = StandingTime + (time[ChgStart[i] - m] - time[ChgStart[i] - m - 1]).total_seconds()
+                        if abs(packcrnt[ChgEnd[i] + m + 1]) < 0.1:
+                            StandingTime1 = StandingTime1 + (time[ChgEnd[i] + m + 1] - time[ChgEnd[i] + m]).total_seconds()
+                        if StandingTime > 900 and StandingTime1>900 and ((time[ChgEnd[i]]-time[ChgStart[i]]).total_seconds())/(ChgEnd[i]-ChgStart[i])<60:  #筛选静置时间>15min且慢充过程丢失数据少
+                            ChgStartValid1.append(ChgStart[i])
+                            ChgEndValid1.append(ChgEnd[i])
+                            StandingNum.append(m)
+                            break
+                        if abs(packcrnt[ChgStart[i] - m - 2])>0.5 and abs(packcrnt[ChgEnd[i] + m + 2])>0.5:
+                            break
+
+        # 计算soh
+        Soh1=[]
+        Soh2=[]
+        Time1=[]
+        Bms_Soh1=[]
+        Soh_Err1=[]
+        sn_list1=[]
+        #两点法计算Soh
+        if len(ChgStartValid1)>0:
+            for i in range(len(ChgStartValid1)):
+                #计算Ah
+                Ah=0
+                for j in range(ChgStartValid1[i],ChgEndValid1[i]):
+                    Step=(time[j+1]-time[j]).total_seconds()
+                    Ah=Ah-packcrnt[j+1]*Step/3600
+                #计算每个电芯的Soh
+                for j in range(1, CellVoltNums+1):
+                    s = str(j)
+                    cellvolt = data['单体电压' + s]/1000
+                    OCVStart=cellvolt[ChgStartValid1[i]-2]
+                    OCVEnd=cellvolt[ChgEndValid1[i]+StandingNum[i]]
+                    #soh
+                    Ocv_Soc1=np.interp(OCVStart,LookTab_OCV,LookTab_SOC)
+                    Ocv_Soc2=np.interp(OCVEnd,LookTab_OCV,LookTab_SOC)
+                    Soh2.append(Ah*100/((Ocv_Soc2-Ocv_Soc1)*0.01*Capacity))
+                Soh1.append(np.mean(Soh2))
+                Bms_Soh1.append(SOH[ChgStartValid1[i]])
+                Soh_Err1.append(Bms_Soh1[-1]-Soh1[-1])
+                Time1.append(time[ChgStartValid1[i]])
+                sn_list1.append(SNnum)
+       
+            # Soh3.append(np.mean(Soh1))
+            # Bms_Soh3.append(np.mean(Bms_Soh1))
+            # Soh_Err3.append(np.mean(Soh_Err1))
+            # Time3.append(time[ChgStartValid1[-1]])
+            # sn_list.append(SNnum)
+
+        #第四步:将数据存入Excel
+            result_soh2={'时间': Time1,
+                'SN号': sn_list1,
+                'BMS_SOH': Bms_Soh1,
+                'SOH': Soh1,
+                'SOH误差': Soh_Err1}
+
+            Result_Soh2=pd.DataFrame(result_soh2)
+            Result_Soh2.to_csv('BMS_SOH_'+SNnum+'.csv',encoding='GB18030')
+
+#     result_soh1={'时间': Time3,
+#         'SN号':sn_list,
+#         'BMS_SOH': Bms_Soh3,
+#         'SOH': Soh3,
+#         'SOH误差': Soh_Err3}
+
+# Result_Soh1=pd.DataFrame(result_soh1)
+# print(Result_Soh1)
+# Result_Soh1.to_csv('BMS_SOH_'+'6040'+'.csv',encoding='GB18030')

+ 34 - 0
LIB/FRONTEND/soh/main.py

@@ -0,0 +1,34 @@
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import traceback
+from LIB.MIDDLE.soh import NCMSoh_20210716 as NCMSoh
+from LIB.MIDDLE.soh import LFPSoh_20210711 as LFPSoh
+
+from urllib import parse
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    SNdata_6040 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6040骑享')
+    SNdata_6060 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+    SNnums_6060=SNdata_6060['SN号']
+    SNnums_6040=SNdata_6040['SN号']
+    now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+    now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+    start_time=now_time-datetime.timedelta(days=31)
+    end_time=str(now_time)
+    start_time=str(start_time)
+
+    for sn in SNnums_6040.tolist():
+        res = NCMSoh.cal_soh(sn, end_time, start_time)
+        res.to_csv('BMS_SOH_'+sn+'.csv',encoding='GB18030')
+    
+    for sn in SNnums_6060.tolist():
+        res = LFPSoh.cal_soh(sn, end_time, start_time)
+        res.to_csv('BMS_SOH_'+sn+'.csv',encoding='GB18030')

+ 6 - 0
LIB/FRONTEND/soh/soh表头及数据类型.xlsx

@@ -0,0 +1,6 @@
+表头	名称	数据类型
+时间	time	timestamps
+SN号	sn	str
+BMS_SOH	bms_soh	float64
+SOH	soh	float64
+SOH误差	soh_err	float64

BIN
LIB/FRONTEND/soh/骑享资产梳理-20210621.xlsx


+ 86 - 0
LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/BatParam.py

@@ -0,0 +1,86 @@
+
+#定义电池参数
+from types import CellType
+import sys
+
+class BatParam:
+
+    def __init__(self,celltype):
+
+        # if 'PK500' in sn:
+        #     self.celltype=1 #6040三元电芯
+        # elif 'PK502' in sn:
+        #     self.celltype=2 #4840三元电芯
+        # elif 'PK504' in sn:
+        #     self.celltype=99    #60ah林磷酸铁锂电芯
+        # elif 'MGMLXN750' in sn:
+        #     self.celltype=3 #力信50ah三元电芯
+        # elif 'MGMCLN750' in sn: 
+        #     self.celltype=4 #CATL 50ah三元电芯
+        # else:
+        #     print('未找到对应电池编号!!!')
+        #     sys.exit()
+
+        if celltype==1: #6040
+            self.Capacity = 41
+            self.PackFullChrgVolt=69.99
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=17
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    3.5348,	8.3581,	13.181,	18.004,	22.827,	27.651,	32.474,	37.297,	42.120,	46.944,	51.767,	56.590,	61.413,	66.237,	71.060,	75.883,	80.707,	85.530,	90.353,	95.176,	100]
+            self.LookTab_OCV = [3.3159,	3.4384,	3.4774,	3.5156,	3.5478,	3.5748,	3.6058,	3.6238,	3.638,	3.6535,	3.6715,	3.6951,	3.7279,	3.7757,	3.8126,	3.8529,	3.8969,	3.9446,	3.9946,	4.0491,	4.109,	4.183]
+        
+        elif celltype==2: #4840
+            self.Capacity = 41
+            self.PackFullChrgVolt=69.99
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=14
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    3.5348,	8.3581,	13.181,	18.004,	22.827,	27.651,	32.474,	37.297,	42.120,	46.944,	51.767,	56.590,	61.413,	66.237,	71.060,	75.883,	80.707,	85.530,	90.353,	95.176,	100]
+            self.LookTab_OCV = [3.3159,	3.4384,	3.4774,	3.5156,	3.5478,	3.5748,	3.6058,	3.6238,	3.638,	3.6535,	3.6715,	3.6951,	3.7279,	3.7757,	3.8126,	3.8529,	3.8969,	3.9446,	3.9946,	4.0491,	4.109,	4.183]
+        
+        elif celltype==3:
+            self.Capacity = 51
+            self.PackFullChrgVolt=80
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=20
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    5,	    10,	    15,	    20,	    25,	    30,	    35,	    40,	    45,	    50,	    55,	    60,	    65,	    70,	    75,	    80,	    85,	    90,	    95,	    100]
+            self.LookTab_OCV = [3.357, 	3.455, 	3.493, 	3.540, 	3.577, 	3.605, 	3.622, 	3.638, 	3.655, 	3.677, 	3.707, 	3.757, 	3.815, 	3.866, 	3.920, 	3.976, 	4.036, 	4.099, 	4.166, 	4.237, 	4.325]
+        
+        elif celltype==4:
+            self.Capacity = 50
+            self.PackFullChrgVolt=80
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=20
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    5,	    10,	    15,	    20,	    25,	    30,	    35,	    40,	    45,	    50,	    55,	    60,	    65,	    70,	    75,	    80,	    85,	    90,	    95,	    100]
+            self.LookTab_OCV = [3.152, 	3.397, 	3.438, 	3.481, 	3.523, 	3.560, 	3.586, 	3.604, 	3.620, 	3.638, 	3.661, 	3.693, 	3.748, 	3.803, 	3.853, 	3.903, 	3.953, 	4.006, 	4.063, 	4.121, 	4.183]
+        
+        elif celltype==99:   #60ah磷酸铁锂电芯
+            self.Capacity = 54
+            self.PackFullChrgVolt=69.99
+            self.CellFullChrgVolt=3.5
+            self.OcvInflexionBelow=3.285
+            self.OcvInflexion2=3.296
+            self.OcvInflexion3=3.328
+            self.OcvInflexionAbove=3.4
+            self.CellVoltNums=20
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=60.5
+            self.LookTab_SOC = [0.00, 	2.40, 	6.38, 	10.37, 	14.35, 	18.33, 	22.32, 	26.30, 	30.28, 	35.26, 	40.24, 	45.22, 	50.20, 	54.19, 	58.17, 	60.16, 	65.14, 	70.12, 	75.10, 	80.08, 	84.06, 	88.05, 	92.03, 	96.02, 	100.00]
+            self.LookTab_OCV = [2.7151,	3.0298,	3.1935,	3.2009,	3.2167,	3.2393,	3.2561,	3.2703,	3.2843,	3.2871,	3.2874,	3.2868,	3.2896,	3.2917,	3.2967,	3.3128,	3.3283,	3.3286,	3.3287,	3.3288,	3.3289,	3.3296,	3.3302,	3.3314,	3.3429]
+        
+        else:
+            print('未找到对应电池编号!!!')
+            # sys.exit()
+

+ 733 - 0
LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/CBMSBatSoh.py

@@ -0,0 +1,733 @@
+import pandas as pd
+import numpy as np
+import datetime
+import bisect
+import matplotlib.pyplot as plt
+from LIB.MIDDLE.CellStateEstimation.SOH.V1_0_0 import BatParam
+from LIB.MIDDLE.CellStateEstimation.SOH.V1_0_0 import DBDownload
+import BatParam
+import DBDownload
+
+class BatSoh:
+    def __init__(self,sn,celltype,df_bms,df_accum, host, port, db, user, password, tablename):  #参数初始化
+
+        self.sn=sn
+        self.celltype=celltype
+        self.param=BatParam.BatParam(celltype)
+        self.df_bms=df_bms
+        self.packcrnt=df_bms['总电流[A]']
+        self.packvolt=df_bms['总电压[V]']
+        self.bms_soc=df_bms['SOC[%]']
+        self.bms_soh=df_bms['SOH[%]']
+        self.bmsstat=df_bms['充电状态']
+        self.bmstime= pd.to_datetime(df_bms['时间戳'], format='%Y-%m-%d %H:%M:%S')
+
+        self.df_accum=df_accum
+        self.accumtime=pd.to_datetime(df_accum['时间戳'], format='%Y-%m-%d %H:%M:%S')
+
+        self.host=host
+        self.port=port
+        self.db=db
+        self.user=user
+        self.password=password
+        self.tablename=tablename
+
+    def batsoh(self):
+        if self.celltype==1 or self.celltype==2 or self.celltype==3 or self.celltype==4:
+            df_res=self._ncmsoh_twopoint()
+            return df_res
+            
+        elif self.celltype==99:
+            df_res=self._lfpsoh()
+            return df_res
+
+        else:
+            return pd.DataFrame()
+
+    def getdata(self):  #获取预算结果库的结果
+        DBManager=DBDownload.DBDownload(self.host, self.port, self.db, self.user, self.password)
+        with DBManager as DBManager:
+            self.df_soh=DBManager.getdata('time_st','time_sp','sn','method','soh','cellsoh', tablename='soh_result', sn=self.sn)
+    
+    def _np_move_avg(self,a, n, mode="same"): #定义滑动滤波函数
+        return (np.convolve(a, np.ones((n,)) / n, mode=mode))
+
+    def _chrgdata(self):    #筛选充电数据
+        self.ChgStart=[]
+        self.ChgEnd=[]
+        if len(self.packvolt)>100:
+            for i in range(3, len(self.bmstime) - 3):
+                if i==3 and self.bmsstat[i]==2 and self.bmsstat[i+1]==2 and self.bmsstat[i+2]==2:
+                    self.ChgStart.append(i)
+                elif self.bmsstat[i-2]!=2 and self.bmsstat[i-1]!=2 and self.bmsstat[i]==2:
+                    self.ChgStart.append(i)
+                elif self.bmsstat[i-1]==2 and self.bmsstat[i]!=2 and self.bmsstat[i+1]!=2:
+                    self.ChgEnd.append(i-1)
+                elif i == (len(self.bmstime) - 4) and self.bmsstat[len(self.bmsstat)-1] == 2 and self.bmsstat[len(self.bmsstat)-2] == 2:
+                    self.ChgEnd.append(len(self.bmstime)-2)
+    
+    def _celltemp_weight(self,num):   #寻找当前行数据的最小温度值
+        celltemp = []
+        for j in range(1, self.param.CellTempNums+1):
+            s = str(j)
+            celltemp.append(self.df_bms.loc[num,'单体温度' + s])
+        celltemp.remove(min(celltemp))
+        if self.celltype==99:
+            if min(celltemp)>=20:
+                self.tempweight=1
+                self.StandardStandingTime=1800
+            elif min(celltemp)>=10:
+                self.tempweight=0.6
+                self.StandardStandingTime=3600
+            elif min(celltemp)>=5:
+                self.tempweight=0.1
+                self.StandardStandingTime=7200
+            else:
+                self.tempweight=0
+                self.StandardStandingTime=10800
+        else:
+            if min(celltemp)>=20:
+                self.tempweight=1
+                self.StandardStandingTime=900
+            elif min(celltemp)>=10:
+                self.tempweight=0.8
+                self.StandardStandingTime=1200
+            elif min(celltemp)>=5:
+                self.tempweight=0.3
+                self.StandardStandingTime=1800
+            else:
+                self.tempweight=0.1
+                self.StandardStandingTime=3600
+
+    def _deltsoc_weight(self,deltsoc):   #获取SOC差对应的SOH权重值
+        if deltsoc>60:   
+            deltsoc_weight=1
+        elif deltsoc>50:
+            deltsoc_weight=0.9
+        elif deltsoc>40:
+            deltsoc_weight=0.7
+        elif deltsoc>30:
+            deltsoc_weight=0.5
+        elif deltsoc>20:
+            deltsoc_weight=0.3
+        else:
+            deltsoc_weight=0
+        return deltsoc_weight
+
+    def _cellvolt_get(self,num): #获取当前行所有电压数据
+        cellvolt=[]
+        for j in range(1, self.param.CellVoltNums+1): 
+            s = str(j)
+            cellvolt.append(self.df_bms.loc[num,'单体电压' + s]/1000)
+        return cellvolt
+                
+    def _ncmsoh_chrg(self):     #NCM充电数据soh计算 
+        self._chrgdata()
+        self.getdata()
+        ChgStartValid=[]
+        ChgEndValid=[]
+        tempweightlist=[]
+        for i in range(min(len(self.ChgStart),len(self.ChgEnd))): 
+            self._celltemp_weight(self.ChgEnd[i])               #获取温度对应的静置时间及权重            
+            for k in range(self.ChgStart[i],self.ChgEnd[i]):   #去除电流0点
+                if self.packcrnt[k]<-0.5 and self.packcrnt[k+1]>-0.5 and self.packcrnt[k+2]>-0.5 and self.packcrnt[k+3]>-0.5:
+                    self.ChgEnd[i]=k
+   
+            #筛选满足2点法计算的数据
+            StandingTime=0
+            StandingTime1=0
+            if self.bms_soc[self.ChgEnd[i]]>70 and self.bms_soc[self.ChgStart[i]]<50:
+                for m in range(min(len(self.packcrnt)-self.ChgEnd[i]-2,self.ChgStart[i]-2)):
+                    if abs(self.packcrnt[self.ChgStart[i] - m - 1]) < 0.5:
+                        StandingTime = StandingTime + (self.bmstime[self.ChgStart[i] - m] - self.bmstime[self.ChgStart[i] - m - 1]).total_seconds()
+                    if abs(self.packcrnt[self.ChgEnd[i] + m + 1]) < 0.5:
+                        StandingTime1 = StandingTime1 + (self.bmstime[self.ChgEnd[i] + m + 1] - self.bmstime[self.ChgEnd[i] + m]).total_seconds()
+                    if StandingTime > self.StandardStandingTime and StandingTime1>self.StandardStandingTime and ((self.bmstime[self.ChgEnd[i]]-self.bmstime[self.ChgStart[i]]).total_seconds())/(self.ChgEnd[i]-self.ChgStart[i])<60:  #筛选静置时间>15min且慢充过程丢失数据少
+                        if abs(self.packcrnt[self.ChgEnd[i] + m + 2])>0.5 or m==len(self.packcrnt)-self.ChgEnd[i]-3: #如果电流<0.5,继续寻找充电后的静置电压,直到末尾
+                            ChgStartValid.append(self.ChgStart[i])
+                            ChgEndValid.append(self.ChgEnd[i]+m)
+                            tempweightlist.append(self.tempweight)
+                            break
+                    if abs(self.packcrnt[self.ChgStart[i] - m - 2])>0.5 and abs(self.packcrnt[self.ChgEnd[i] + m + 2])>0.5:
+                        break
+
+        if len(ChgStartValid)>0:   #两点法计算Soh
+            df_res=pd.DataFrame(columns=('time','sn','soh','soh1'))
+            soh2=[]
+            if not self.df_soh.empty:  #获取数据库中上次计算的Soh值
+                soh_init=list(self.df_soh['soh'])[-1]
+            else:
+                soh_init=list(self.bms_soh)[-1]
+
+            for i in range(len(ChgStartValid)):
+                Ah=0
+                for j in range(ChgStartValid[i],ChgEndValid[i]):  #计算Ah
+                    Step=(self.bmstime[j+1]-self.bmstime[j]).total_seconds()
+                    Ah=Ah-self.packcrnt[j+1]*Step/3600
+
+                for j in range(1, self.param.CellVoltNums+1):    #计算每个电芯的Soh
+                    s = str(j)
+                    OCVStart=self.df_bms.loc[ChgStartValid[i]-2,'单体电压' + s]/1000
+                    OCVEnd=self.df_bms.loc[ChgEndValid[i]-1,'单体电压' + s]/1000
+                    #soh
+                    ocv_Soc1=np.interp(OCVStart,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                    ocv_Soc2=np.interp(OCVEnd,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                
+                    soh2.append(Ah*100/((ocv_Soc2-ocv_Soc1)*0.01*self.param.Capacity))
+                soh1=np.mean(soh2)
+                delt_ocv_soc=ocv_Soc2-ocv_Soc1
+                self._deltsoc_weight(delt_ocv_soc)
+                soh_res=soh_init*(1-self.deltsoc_weight*tempweightlist[i])+soh1*self.deltsoc_weight*tempweightlist[i]
+                soh_init=soh_res
+                df_res.loc[i]=[self.bmstime[ChgStartValid[i]],self.sn,soh_res,soh1]
+            
+            return df_res
+        return pd.DataFrame()
+
+    def _ncmsoh_twopoint(self):
+        standingpoint_st=[]
+        standingpoint_sp=[]
+        tempweightlist=[]
+        standingtime=0
+        for i in range(3,len(self.df_bms)-3):
+
+            if abs(self.packcrnt[i]) < 0.3:     #电流为0
+                delttime=(self.bmstime[i]-self.bmstime[i-1]).total_seconds()
+                standingtime=standingtime+delttime
+                self._celltemp_weight(i)     #获取不同温度对应的静置时间
+
+                if standingtime>self.StandardStandingTime:      #静置时间满足要求
+                    if standingpoint_st:                        
+                        if len(standingpoint_st)>len(standingpoint_sp):     #开始时刻已获取,结束时刻未获取
+                            cellvolt_now=self._cellvolt_get(i-1)     #获取当前行电压数据
+                            minocv_socnow=np.interp(min(cellvolt_now),self.param.LookTab_OCV,self.param.LookTab_SOC)
+                            cellvolt_st=self._cellvolt_get(standingpoint_st[-1])   #获取开始时刻静置后的电压数据
+                            minocv_socst=np.interp(min(cellvolt_st),self.param.LookTab_OCV,self.param.LookTab_SOC)
+
+                            if abs(minocv_socst-minocv_socnow)>=40:   #当前时刻SOC与开始时刻SOC差>=20
+                                if abs(self.packcrnt[i+2])>=0.3:    #如果下一时刻电流>=0.5,则压入当前索引
+                                    standingpoint_sp.append(i)
+                                    standingpoint_st.append(i)
+                                    tempweightlist.append(self.tempweight)
+                                    standingtime=0
+                                    continue
+                                else:
+                                    if standingtime>3600 or i==len(self.df_bms)-2:   #仍处于静置,但静置时间>1h,则直接获取sp时刻,或者到了数据末尾
+                                        standingpoint_sp.append(i)
+                                        tempweightlist.append(self.tempweight)
+                                        continue
+                            else:
+                                if minocv_socst<50 and minocv_socnow<minocv_socst and abs(self.packcrnt[i+2])>=0.3:
+                                    standingpoint_st[-1]=i
+                                    standingtime=0
+                                    continue
+                                elif minocv_socst>=50 and minocv_socnow>minocv_socst and abs(self.packcrnt[i+2])>=0.3:
+                                    standingpoint_st[-1]=i
+                                    standingtime=0
+                                    continue
+                                else:
+                                    continue
+                        else:
+                            if abs(self.packcrnt[i+2])>=0.5:
+                                standingpoint_st.append(i)
+                                standingtime=0
+                                continue
+                            else:
+                                continue
+                    else:
+                        if abs(self.packcrnt[i+2])>0.5:
+                            standingpoint_st.append(i)
+                            standingtime=0
+                            continue
+                        else:
+                            continue
+                else:
+                    continue
+            else:
+                standingtime=0
+                continue
+
+        if standingpoint_sp:
+            self.getdata()  #获取已计算的soh
+            column_name=['time_st','time_sp','sn','method','soh','cellsoh']
+            df_res=pd.DataFrame(columns=column_name)
+
+            for i in range(len(standingpoint_sp)):
+                cellocv_st=self._cellvolt_get(standingpoint_st[i])    #获取静置点所有电芯的电压
+                cellocv_sp=self._cellvolt_get(standingpoint_sp[i])
+                accumtime=self.accumtime.to_list()  #累计量的时间列表
+                timepoint_bms_st=self.bmstime[standingpoint_st[i]]   #获取静置点的时间
+                timepoint_bms_sp=self.bmstime[standingpoint_sp[i]]
+                timepoint_accum_st=bisect.bisect(accumtime,timepoint_bms_st)   #获取最接近静置点时间的累计量时间点
+                timepoint_accum_sp=bisect.bisect(accumtime,timepoint_bms_sp)
+                if timepoint_accum_sp>=len(accumtime):  #防止指针超出数据范围
+                    timepoint_accum_sp=len(accumtime)-1
+                
+                ah_packcrnt_dis=0
+                ah_packcrnt_chg=0
+                for j in range(standingpoint_st[i]+2,standingpoint_sp[i]): #计算累计Ah
+                    Step=(self.bmstime[j+1]-self.bmstime[j]).total_seconds()
+                    if self.packcrnt[j+1]>=0:
+                        ah_packcrnt_dis=ah_packcrnt_dis+self.packcrnt[j+1]*Step
+                    else:
+                        ah_packcrnt_chg=ah_packcrnt_chg-self.packcrnt[j+1]*Step  
+                ah_packcrnt_chg=ah_packcrnt_chg/3600
+                ah_packcrnt_dis=ah_packcrnt_dis/3600
+                ah_packcrnt=ah_packcrnt_chg-ah_packcrnt_dis     #两个静置点的总累计AH,负值代表放电,正值代表充电
+     
+                ah_accum_dis=self.df_accum.loc[timepoint_accum_sp,'累计放电电量']-self.df_accum.loc[timepoint_accum_st,'累计放电电量']  #两个静置点之间的放电电量
+                ah_accum_chg=self.df_accum.loc[timepoint_accum_sp,'累计充电电量']-self.df_accum.loc[timepoint_accum_st,'累计充电电量']  #两个静置点之间的充电电量
+                ah_accum_tatol=ah_accum_chg-ah_accum_dis  #两个静置点的总累计AH,负值代表放电,正值代表充电
+                ah_accum=ah_packcrnt
+
+                delt_days=(self.bmstime[standingpoint_sp[i]]-self.bmstime[standingpoint_st[i]]).total_seconds()/(3600*24)
+                if delt_days<=1: #两次时间间隔对计算结果的影响
+                    soh_weight1=1
+                elif delt_days<=2:
+                    soh_weight1=0.7
+                elif delt_days<=3:
+                    soh_weight1=0.4
+                else:
+                    soh_weight1=0
+                
+                if ah_packcrnt_dis<self.param.Capacity: #放电ah数对结果的影响
+                    soh_weight1=(1-ah_packcrnt_dis/self.param.Capacity*1.5)*soh_weight1
+                else:
+                    soh_weight1=0.1
+            
+                if self.param.Capacity**0.7*0.4 < abs(ah_accum_tatol) < self.param.Capacity:    #累计量的权重
+                    if abs(ah_accum_tatol-ah_packcrnt)<self.param.Capacity/20:
+                        soh_weight1=soh_weight1*1
+                    elif abs(ah_accum_tatol-ah_packcrnt) < self.param.Capacity/10:
+                        soh_weight1=soh_weight1*0.8
+                    else:
+                        soh_weight1=soh_weight1*0.5
+                else:
+                    if self.param.Capacity*0.7*0.4< abs(ah_packcrnt) <self.param.Capacity:
+                        soh_weight1=soh_weight1*0.5
+                    else:
+                        soh_weight1=0
+
+                cellsoh=[]
+                for j in range(self.param.CellVoltNums):    #计算每个电芯的SOH值
+                    ocv_soc1=np.interp(cellocv_st[j],self.param.LookTab_OCV,self.param.LookTab_SOC)
+                    ocv_soc2=np.interp(cellocv_sp[j],self.param.LookTab_OCV,self.param.LookTab_SOC)
+                    delt_ocv_soc=ocv_soc2-ocv_soc1
+                    delt_ocv_soc_weight=self._deltsoc_weight(abs(delt_ocv_soc))
+                    soh_weight=soh_weight1*tempweightlist[i]*delt_ocv_soc_weight
+                    cellsoh_init=ah_accum*100/((ocv_soc2-ocv_soc1)*0.01*self.param.Capacity)
+
+                    if cellsoh_init>55 and cellsoh_init<120:    #判断soh值的有效区间
+                        if len(df_res)<1:
+                            if not self.df_soh.empty:
+                                cellsoh_last=eval(self.df_soh.loc[len(self.df_soh)-1,'cellsoh'])
+                                if soh_weight>1/abs(cellsoh_init-cellsoh_last):
+                                    soh_weight=1/abs(cellsoh_init-cellsoh_last)
+                                    cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last[j]*(1-soh_weight)
+                                else:
+                                    cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last[j]*(1-soh_weight)
+                            else:
+                                cellsoh_cal=cellsoh_init*soh_weight+100*(1-soh_weight)
+                        else:
+                            cellsoh_last=eval(df_res.loc[len(df_res)-1,'cellsoh'])
+                            if soh_weight>1/abs(cellsoh_init-cellsoh_last[j]):
+                                soh_weight=1/abs(cellsoh_init-cellsoh_last[j])
+                                cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last[j]*(1-soh_weight)
+                            else:
+                                cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last[j]*(1-soh_weight)
+                        cellsoh_cal=eval(format(cellsoh_cal,'.1f'))
+                        cellsoh.append(cellsoh_cal)
+                    else:
+                        cellsoh=[]
+                        break
+                if cellsoh:
+                    soh=min(cellsoh)
+                    soh_list=[timepoint_bms_st, timepoint_bms_sp, self.sn, 1, soh, str(cellsoh)]
+                    df_res.loc[len(df_res)]=soh_list
+                else:
+                    continue
+            if df_res.empty:
+                return pd.DataFrame()
+            else:
+                return df_res
+        return pd.DataFrame()
+
+    def _lfpsoh(self):
+        standingpoint_st=[]
+        standingpoint_sp=[]
+        tempweightlist1=[]
+        cellmaxvolt_number1=[]
+        standingtime=0
+        chrg_start=[]
+        chrg_end=[]
+        tempweightlist2=[]
+        cellmaxvolt_number2=[]
+        charging=0
+
+        for i in range(3,len(self.df_bms)-3):
+
+            #获取两点法法所需数据-开始
+            if abs(self.packcrnt[i]) < 0.2:     #判断非平台区静置状态
+                delttime=(self.bmstime[i]-self.bmstime[i-1]).total_seconds()
+                standingtime=standingtime+delttime
+                self._celltemp_weight(i)     #获取不同温度对应的静置时间
+
+                if standingtime>self.StandardStandingTime and abs(self.packcrnt[i+2])>0.1:  #静置时间满足要求,且下一时刻电流>0.1A
+                    standingtime=0
+                    cellvolt_now=self._cellvolt_get(i)
+                    if max(cellvolt_now)<self.param.OcvInflexionBelow:      #当前最大电芯电压<OCV下拐点
+                        if standingpoint_st:
+                            if len(standingpoint_st)>len(standingpoint_sp):
+                                if self.packcrnt[standingpoint_st[-1]]<-1:     #判断上一次静置点的是否为满充
+                                    standingpoint_sp.append(i)
+                                    standingpoint_st.append(i)
+                                    tempweightlist1.append(self.tempweight)
+                                else:
+                                    standingpoint_st[-1]=i
+                                    tempweightlist1[-1]=self.tempweight
+                            else:
+                                standingpoint_st.append(i)
+                                tempweightlist1.append(self.tempweight)
+                        else:
+                            standingpoint_st.append(i)
+                            tempweightlist1.append(self.tempweight)
+                    else:
+                        pass
+                else:
+                    pass
+            
+            elif self.packcrnt[i]<=-1 and self.packcrnt[i-1]<=-1 and self.packcrnt[i+1]<=-1 and self.packcrnt[i+2]>-1:    #判读满充状态
+                standingtime=0
+                self._celltemp_weight(i)
+                cellvolt_now=self._cellvolt_get(i)
+                if max(cellvolt_now)>self.param.CellFullChrgVolt:
+                    if standingpoint_st:
+                        if len(standingpoint_st)>len(standingpoint_sp):
+                            if abs(self.packcrnt[standingpoint_st[-1]])<0.5:     #判断上一次静置点是否为下拐点
+                                standingpoint_sp.append(i)
+                                standingpoint_st.append(i)
+                                tempweightlist1.append(self.tempweight)
+                                cellmaxvolt_number1.append(cellvolt_now.index(max(cellvolt_now))) #获取最大电压索引
+                                cellmaxvolt_number1.append(cellvolt_now.index(max(cellvolt_now))) #获取最大电压索引
+                            else:
+                                standingpoint_st[-1]=i
+                                tempweightlist1[-1]=self.tempweight
+                                cellmaxvolt_number1[-1]=cellvolt_now.index(max(cellvolt_now))
+                    else:
+                        standingpoint_st.append(i)
+                        tempweightlist1.append(self.tempweight)
+                        cellmaxvolt_number1.append(cellvolt_now.index(max(cellvolt_now)))
+                else:
+                    pass
+            
+            else:
+                standingtime=0
+                pass
+            #获取两点法所需数据-结束
+
+            #获取DVDQ算法所需数据——开始
+            if i==3 and self.packcrnt[1]<=-1 and self.packcrnt[2]<=-1 and self.packcrnt[3]<=-1:
+                self._celltemp_weight(i)
+                chrg_start.append(i)
+                tempweightlist2.append(self.tempweight)
+                charging=1
+            elif self.packcrnt[i-1]>-1 and self.packcrnt[i]<=-1 and self.packcrnt[i+1]<=-1 and self.packcrnt[i+2]<=-1:     #判断充电开始
+                if self.bms_soc[i]<45:
+                    self._celltemp_weight(i)
+                    charging=1
+                    if len(chrg_start)>len(chrg_end):
+                        chrg_start[-1]=i
+                        tempweightlist2[-1]=self.tempweight
+                    else:
+                        chrg_start.append(i)
+                        tempweightlist2.append(self.tempweight)
+                else:
+                    pass
+            else:
+                pass
+
+            if charging==1: #充电中
+                if (self.bmstime[i+1]-self.bmstime[i]).total_seconds()>180:  #如果充电过程中时间间隔>180s,则舍弃该次充电
+                    chrg_start.remove(chrg_start[-1])
+                    tempweightlist2.remove(tempweightlist2[-1])
+                    charging=0
+                    continue
+                elif self.packcrnt[i]<=-1 and self.packcrnt[i+1]<=-1 and  self.packcrnt[i+2]>-1:  #判断电流波动时刻
+                    cellvolt_now=self._cellvolt_get(i+1)
+                    if max(cellvolt_now)>self.param.CellFullChrgVolt:   #电压>满充电压
+                        chrg_end.append(i+1)
+                        cellmaxvolt_number2.append(cellvolt_now.index(max(cellvolt_now)))   #获取最大电压索引
+                        charging=0
+                        continue
+                    else:
+                        pass
+                elif self.packcrnt[i+1]>-0.1 and self.packcrnt[i+2]>-0.1 and self.packcrnt[i+3]>-0.1:   #判断充电结束
+                    charging=0
+                    if len(chrg_start)>len(chrg_end):
+                        chrg_start.remove(chrg_start[-1])
+                        tempweightlist2.remove(tempweightlist2[-1])
+                        continue
+                    else:
+                        continue
+                elif i==len(self.packcrnt)-4 and self.packcrnt[i+1]<-1 and self.packcrnt[i+2]<-1:
+                    charging=0
+                    if len(chrg_start)>len(chrg_end):
+                        cellvolt_now=self._cellvolt_get(i)
+                        if max(cellvolt_now)>self.param.CellFullChrgVolt:   #电压>满充电压
+                            chrg_end.append(i)
+                            cellmaxvolt_number2.append(cellvolt_now.index(max(cellvolt_now)))   #获取最大电压索引
+                            continue
+                        else:
+                            chrg_start.remove(chrg_start[-1])
+                            tempweightlist2.remove(tempweightlist2[-1])
+                            continue
+                    else:
+                        continue   
+            else:
+                pass
+            #获取DVDQ算法所需数据——结束
+        
+        if standingpoint_sp or chrg_end:       #开始计算SOH
+            self.getdata()  #获取已计算的soh
+            column_name=['time_st','time_sp','sn','method','soh','cellsoh']
+            df_res=pd.DataFrame(columns=column_name)
+        
+            if standingpoint_sp:    #两点法计算SOH
+                for i in range(len(standingpoint_sp)):  #判断为满充点或者下拐点
+                    if self.packcrnt[standingpoint_sp[i]]<=-1:
+                        cellocv_st=self._cellvolt_get(standingpoint_st[i])    
+                        ocv_soc1=np.interp(cellocv_st[cellmaxvolt_number1[i]],self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        ocv_soc2=self.param.FullChrgSoc
+                    else:
+                        cellocv_sp=self._cellvolt_get(standingpoint_sp[i])
+                        ocv_soc1=self.param.FullChrgSoc
+                        ocv_soc2=np.interp(cellocv_sp[cellmaxvolt_number1[i]],self.param.LookTab_OCV,self.param.LookTab_SOC)
+
+                    cellocv_sp=self._cellvolt_get(standingpoint_sp[i])
+                    accumtime=self.accumtime.to_list()  #累计量的时间列表
+                    timepoint_bms_st=self.bmstime[standingpoint_st[i]]   #获取静置点的时间
+                    timepoint_bms_sp=self.bmstime[standingpoint_sp[i]]
+                    timepoint_accum_st=bisect.bisect(accumtime,timepoint_bms_st)   #获取最接近静置点时间的累计量时间点
+                    timepoint_accum_sp=bisect.bisect(accumtime,timepoint_bms_sp)
+                    if timepoint_accum_sp>=len(accumtime):  #防止指针超出数据范围
+                        timepoint_accum_sp=len(accumtime)-1
+                    
+                    ah_packcrnt_dis=0
+                    ah_packcrnt_chg=0
+                    for j in range(standingpoint_st[i]+2,standingpoint_sp[i]+1): #计算累计Ah
+                        Step=(self.bmstime[j+1]-self.bmstime[j]).total_seconds()
+                        if self.packcrnt[j+1]>=0:
+                            ah_packcrnt_dis=ah_packcrnt_dis+self.packcrnt[j+1]*Step
+                        else:
+                            ah_packcrnt_chg=ah_packcrnt_chg-self.packcrnt[j+1]*Step
+                    ah_packcrnt_chg=ah_packcrnt_chg/3600
+                    ah_packcrnt_dis=ah_packcrnt_dis/3600          
+                    ah_packcrnt=ah_packcrnt_chg-ah_packcrnt_dis     #两个静置点的总累计AH,负值代表放电,正值代表充电
+        
+                    ah_accum_dis=self.df_accum.loc[timepoint_accum_sp,'累计放电电量']-self.df_accum.loc[timepoint_accum_st,'累计放电电量']  #两个静置点之间的放电电量
+                    ah_accum_chg=self.df_accum.loc[timepoint_accum_sp,'累计充电电量']-self.df_accum.loc[timepoint_accum_st,'累计充电电量']  #两个静置点之间的充电电量
+                    ah_accum_tatol=ah_accum_chg-ah_accum_dis  #两个静置点的总累计AH,负值代表放电,正值代表充电
+                    ah_accum=ah_accum_tatol
+
+                    delt_days=(self.bmstime[standingpoint_sp[i]]-self.bmstime[standingpoint_st[i]]).total_seconds()/(3600*24)
+                    if delt_days<=1: #两次时间间隔对计算结果的影响
+                        soh_weight=1
+                    elif delt_days<=2:
+                        soh_weight=0.7
+                    elif delt_days<=3:
+                        soh_weight=0.4
+                    else:
+                        soh_weight=0
+                    
+                    if self.param.Capacity*0.65*0.7 < abs(ah_packcrnt) < self.param.Capacity:    #累计量的权重
+                        if abs(ah_accum_tatol-ah_packcrnt)<self.param.Capacity/20:
+                            soh_weight=soh_weight*1
+                        elif abs(ah_accum_tatol-ah_packcrnt)<self.param.Capacity/10:
+                            soh_weight=soh_weight*0.8
+                        else:
+                            soh_weight=soh_weight*0.5
+                    else:
+                        if self.param.Capacity*0.65*0.7 < abs(ah_accum) < self.param.Capacity:
+                            soh_weight=soh_weight*0.5
+                        else:
+                            soh_weight=0
+
+                    delt_ocv_soc=ocv_soc2-ocv_soc1
+                    delt_ocv_soc_weight=self._deltsoc_weight(abs(delt_ocv_soc))
+                    soh_weight=soh_weight*tempweightlist1[i]*delt_ocv_soc_weight*0.5
+                    cellsoh_init=ah_accum*100/((ocv_soc2-ocv_soc1)*0.01*self.param.Capacity)
+
+                    if cellsoh_init>65 and cellsoh_init<115:    #判断soh值的有效区间
+                        if len(df_res)<1:
+                            if not self.df_soh.empty:
+                                cellsoh_last=self.df_soh.loc[len(self.df_soh)-1,'soh']
+                                if soh_weight>1/abs(cellsoh_init-cellsoh_last):
+                                    soh_weight=1/abs(cellsoh_init-cellsoh_last)
+                                    cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                                else:
+                                    cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                            else:
+                                cellsoh_cal=cellsoh_init*soh_weight+100*(1-soh_weight)
+                        else:
+                            cellsoh_last=df_res.loc[len(df_res)-1,'soh']
+                            if soh_weight>1/abs(cellsoh_init-cellsoh_last):
+                                soh_weight=1/abs(cellsoh_init-cellsoh_last)
+                                cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                            else:
+                                cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                        
+                        cellsoh_cal=eval(format(cellsoh_cal,'.1f'))
+                        soh_list=[timepoint_bms_st, timepoint_bms_sp, self.sn, 1, cellsoh_cal, str(cellsoh_cal)]
+                        df_res.loc[len(df_res)]=soh_list
+                    else:
+                        continue
+            else:
+                pass
+
+            if chrg_end:
+                for i in range(len(chrg_end)):
+                    cellvolt_max = self.df_bms['单体电压' + str(cellmaxvolt_number2[i]+1)] / 1000     #获取最大电压
+                    cellvolt=self._np_move_avg(cellvolt_max, 3, mode="same")     #对电压进行滑动平均滤波
+                    
+                    Ah = 0  #参数赋初始值
+                    Volt = cellvolt[chrg_start[i]]
+                    DV_Volt=[]
+                    DQ_Ah = []
+                    DVDQ = []
+                    time2 = []
+                    soc2 = []
+                    Ah_tatal=[0]
+                    xvolt=[]
+                    #计算DV和DQ值
+                    for j in range(chrg_start[i],chrg_end[i]):
+                        Step=(self.bmstime[j+1]-self.bmstime[j]).total_seconds()
+                        Ah=Ah-self.packcrnt[j]*Step/3600
+                        if (cellvolt[j]-Volt)>0.0009 and Ah>0:
+                            Ah_tatal.append(Ah_tatal[-1]+Ah)
+                            DQ_Ah.append(Ah)
+                            DV_Volt.append(cellvolt[j]-Volt)
+                            DVDQ.append((DV_Volt[-1])/DQ_Ah[-1])
+                            xvolt.append(cellvolt[j])
+                            Volt=cellvolt[j]
+                            Ah = 0
+                            time2.append(self.bmstime[j])
+                            soc2.append(self.bms_soc[j])
+
+                    #切片,去除前后10min的数据
+                    df_Data1 = pd.DataFrame({'time': time2,
+                                            'SOC': soc2,
+                                            'DVDQ': DVDQ,
+                                            'Ah_tatal': Ah_tatal[:-1],
+                                            'DQ_Ah':DQ_Ah,
+                                            'DV_Volt':DV_Volt,
+                                            'XVOLT':xvolt})
+                    start_time=df_Data1.loc[0,'time']
+                    start_time=start_time+datetime.timedelta(seconds=900)
+                    end_time=df_Data1.loc[len(time2)-1,'time']
+                    end_time=end_time-datetime.timedelta(seconds=1200)
+                    if soc2[0]<40:
+                        df_Data1=df_Data1[(df_Data1['SOC']>43) & (df_Data1['time']<end_time)]
+                    else:
+                        df_Data1=df_Data1[(df_Data1['time']>start_time) & (df_Data1['time']<end_time)]
+            
+                    # ax1 = plt.subplot(3, 1, 1)
+                    # plt.plot(df_Data1['XVOLT'],df_Data1['DVDQ'],'r*-')
+                    # plt.xlabel('Volt/V')
+                    # plt.ylabel('DV/DQ')
+                    # plt.legend()
+                    # ax1 = plt.subplot(3, 1, 2)
+                    # plt.plot(df_Data1['SOC'],df_Data1['XVOLT'],'y*-')
+                    # plt.xlabel('SOC/%')
+                    # plt.ylabel('Volt/V')
+                    # plt.legend()
+                    # ax1 = plt.subplot(3, 1, 3)
+                    # plt.plot(df_Data1['SOC'], df_Data1['DVDQ'], 'r*-')
+                    # plt.xlabel('SOC/%')
+                    # plt.ylabel('DV/DQ')
+                    # plt.legend()
+                    # plt.show()
+
+                    #寻找峰值并计算Soh和置信度
+                    if len(df_Data1)>1:
+                        PeakIndex=df_Data1['DVDQ'].idxmax()
+                        #筛选峰值点附近±0.5%SOC内的数据
+                        df_Data2=df_Data1[(df_Data1['SOC']>(df_Data1['SOC'][PeakIndex]-0.5)) & (df_Data1['SOC']<(df_Data1['SOC'][PeakIndex]+0.5))]
+                        if len(df_Data2)>2:
+                            Ah_tatal1 = df_Data1['Ah_tatal']
+                            DVDQ = df_Data1['DVDQ']
+                            soc2 = df_Data1['SOC']
+                            xvolt = df_Data1['XVOLT']
+                            if soc2[PeakIndex]>43 and soc2[PeakIndex]<90:
+                                cellsoh_init=(Ah_tatal[-1]-Ah_tatal1[PeakIndex]) * 100 / ((self.param.FullChrgSoc - self.param.PeakSoc) * 0.01 * self.param.Capacity)
+                                if cellsoh_init<95:
+                                    cellsoh_init=cellsoh_init*0.3926+58.14
+                                else:
+                                    pass
+                            else:
+                                continue
+                        else:
+                            df_Data1=df_Data1.drop([PeakIndex])
+                            PeakIndex = df_Data1['DVDQ'].idxmax()
+                            df_Data2 = df_Data1[(df_Data1['SOC'] > (df_Data1['SOC'][PeakIndex] - 0.5)) & (df_Data1['SOC'] < (df_Data1['SOC'][PeakIndex] + 0.5))]
+                            if len(df_Data2) > 2:
+                                Ah_tatal1 = df_Data1['Ah_tatal']
+                                DVDQ = df_Data1['DVDQ']
+                                soc2 = df_Data1['SOC']
+                                xvolt = df_Data1['XVOLT']
+                                if soc2[PeakIndex]>40 and soc2[PeakIndex]<90:
+                                    cellsoh_init=(Ah_tatal[-1]-Ah_tatal1[PeakIndex]) * 100 / ((self.param.FullChrgSoc - self.param.PeakSoc) * 0.01 * self.param.Capacity)
+                                    if cellsoh_init<95:
+                                        cellsoh_init=cellsoh_init*0.3926+58.14
+                                    else:
+                                        pass
+                                else:
+                                    continue
+                            else:
+                                continue
+                        
+                        soh_weight=tempweightlist2[i]*0.25
+                        if cellsoh_init>65 and cellsoh_init<115:    #判断soh值的有效区间
+                            if len(df_res)<1:
+                                if not self.df_soh.empty:
+                                    cellsoh_last=self.df_soh.loc[len(self.df_soh)-1,'soh']
+                                    if soh_weight>1/abs(cellsoh_init-cellsoh_last):
+                                        soh_weight=1/abs(cellsoh_init-cellsoh_last)
+                                        cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                                    else:
+                                        cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                                else:
+                                    cellsoh_cal=cellsoh_init*soh_weight+100*(1-soh_weight)
+                            else:
+                                cellsoh_last=df_res.loc[len(df_res)-1,'soh']
+                                if soh_weight>1/abs(cellsoh_init-cellsoh_last):
+                                    soh_weight=1/abs(cellsoh_init-cellsoh_last)
+                                    cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                                else:
+                                    cellsoh_cal=cellsoh_init*soh_weight + cellsoh_last*(1-soh_weight)
+                            
+                            cellsoh_cal=eval(format(cellsoh_cal,'.1f'))
+                            soh_list=[self.bmstime[chrg_start[i]], self.bmstime[chrg_end[i]], self.sn, 2, cellsoh_cal, str(cellsoh_cal)]
+                            df_res.loc[len(df_res)]=soh_list
+                        else:
+                            continue
+                    else:
+                        continue
+            else:
+                pass
+
+            if df_res.empty:
+                return pd.DataFrame()
+            else:
+                return df_res
+        return pd.DataFrame()
+
+
+
+
+
+
+
+
+            
+                
+

+ 61 - 0
LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/DBDownload.py

@@ -0,0 +1,61 @@
+import pymysql
+import time
+import pandas as pd
+
+class DBDownload:
+
+    def __init__(self, host='', port='', db='', user='', password=''):
+        self.host = host
+        self.port = port
+        self.db = db
+        self.user = user
+        self.password = password
+        pass
+
+    def __enter__(self):
+        self.connect()
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self.close()
+
+    def connect(self):
+        conn_success_flag = 0
+        while not conn_success_flag:
+            try:
+                self.conn = pymysql.connect(host=self.host, port=self.port, user=self.user, password=self.password, database=self.db)
+            except Exception as e:
+                conn_success_flag = 0
+                print("数据库连接失败 :{}".format(e))
+                time.sleep(5)
+            else:
+                conn_success_flag = 1
+                print('数据库连接成功!')
+                self.cursor = self.conn.cursor()
+
+    def getdata(self,*param,tablename,sn):
+        print('数据获取中......')
+        param=list(param)
+        str=''
+        for i in range(len(param)):
+            if i<1:
+                str=str+param[i]
+            else:
+                str=str+','+param[i]
+        # self.cursor.execute("select %s from %s where time between '%s' and '%s'" %(str,tablename,st,sp))
+        self.cursor.execute("select %s from %s where sn='%s' order by add_time desc limit 1" %(str,tablename,sn))
+        res = self.cursor.fetchall()
+        df_res = pd.DataFrame(res, columns=param)
+        df_res = df_res.reset_index(drop=True)
+        return(df_res)
+
+    def close(self):
+        try:
+            self.cursor.close()
+            self.conn.close()
+        except Exception as e:
+            print(e)
+        else:
+            print('数据库已断开连接!')
+    
+    

BIN
LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/SOH表单.xlsx


+ 24 - 0
LIB/MIDDLE/CellStateEstimation/SOH/V1_0_0/log.py

@@ -0,0 +1,24 @@
+import logging
+import traceback
+
+class Mylog:
+
+    def __init__(self,log_name,log_level):
+        self.name=log_name
+        self.level=log_level
+    
+    def logcfg(self):
+        if len(self.level) > 0:
+            if self.level == 'debug':
+                Level=logging.DEBUG
+            elif self.level == 'info':
+                Level=logging.INFO
+            elif self.level == 'warning':
+                Level=logging.WARNING
+            else:
+                Level=logging.ERROR
+        logging.basicConfig(filename=self.name, level=Level,format='%(asctime)s - %(levelname)s - %(message)s')
+
+    def logopt(self,*info):
+        logging.error(info)
+        logging.error(traceback.format_exc())

+ 73 - 0
LIB/MIDDLE/CellStateEstimation/SOH/main.py

@@ -0,0 +1,73 @@
+import CBMSBatSoh
+import log
+#coding=utf-8
+import os
+import sys
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+# from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+from urllib import parse
+
+host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+port=3306
+db='qx_cas'
+user='qx_read'
+password='Qx@123456'
+tablename='soh_result'
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    SNdata_6040 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6040骑享')
+    SNdata_6060 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+    SNdata_4840 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='4840骑享')
+    SNdata_7250 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='7250')
+    SNnums_6060=SNdata_6060['SN号']
+    SNnums_6040=SNdata_6040['SN号']
+    SNnums_4840=SNdata_4840['SN号']
+    SNnums_7250=SNdata_7250['SN号']
+
+    SNnums=SNnums_6040.tolist()+SNnums_6060.tolist()+SNnums_4840.tolist()
+    now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+    now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+    start_time=now_time-datetime.timedelta(days=30)
+    end_time=str(now_time)
+    start_time=str(start_time)
+
+    #log信息配置
+    mylog=log.Mylog('log.txt','error')
+    mylog.logcfg()
+
+    for sn in SNnums:
+        try:
+            if 'PK500' in sn:
+                celltype=1 #6040三元电芯
+            elif 'PK502' in sn:
+                celltype=2 #4840三元电芯
+            elif 'PK504' in sn:
+                celltype=99    #60ah林磷酸铁锂电芯
+            elif 'MGMLXN750' in sn:
+                celltype=3 #力信50ah三元电芯
+            elif 'MGMCLN750' in sn: 
+                celltype=4 #CATL 50ah三元电芯
+            else:
+                print('未找到对应电池编号!!!')
+                sys.exit()
+            dbManager = DBManager.DBManager()
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms','accum'])
+            df_bms = df_data['bms']
+            df_accum=df_data['accum']
+            # df_bms.to_csv('BMS_'+sn+'.csv')
+            # df_accum.to_csv('BMS_accum_'+sn+'.csv')
+
+            BatSoh=CBMSBatSoh.BatSoh(sn,celltype,df_bms,df_accum,host, port, db, user, password, tablename)
+            df_res=BatSoh.batsoh()
+            df_res.to_csv('BMS_SOH_'+sn+'.csv',encoding='GB18030')
+             
+        except IndexError as e:
+            print(repr(e))
+            mylog.logopt(sn,e)
+            pass

+ 86 - 0
LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/BatParam.py

@@ -0,0 +1,86 @@
+
+#定义电池参数
+from types import CellType
+import sys
+
+class BatParam:
+
+    def __init__(self,celltype):
+
+        # if 'PK500' in sn:
+        #     self.celltype=1 #6040三元电芯
+        # elif 'PK502' in sn:
+        #     self.celltype=2 #4840三元电芯
+        # elif 'PK504' in sn:
+        #     self.celltype=99    #60ah林磷酸铁锂电芯
+        # elif 'MGMLXN750' in sn:
+        #     self.celltype=3 #力信50ah三元电芯
+        # elif 'MGMCLN750' in sn: 
+        #     self.celltype=4 #CATL 50ah三元电芯
+        # else:
+        #     print('未找到对应电池编号!!!')
+        #     sys.exit()
+
+        if celltype==1: #6040
+            self.Capacity = 41
+            self.PackFullChrgVolt=69.99
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=17
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    3.5348,	8.3581,	13.181,	18.004,	22.827,	27.651,	32.474,	37.297,	42.120,	46.944,	51.767,	56.590,	61.413,	66.237,	71.060,	75.883,	80.707,	85.530,	90.353,	95.176,	100]
+            self.LookTab_OCV = [3.3159,	3.4384,	3.4774,	3.5156,	3.5478,	3.5748,	3.6058,	3.6238,	3.638,	3.6535,	3.6715,	3.6951,	3.7279,	3.7757,	3.8126,	3.8529,	3.8969,	3.9446,	3.9946,	4.0491,	4.109,	4.183]
+        
+        elif celltype==2: #4840
+            self.Capacity = 41
+            self.PackFullChrgVolt=69.99
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=14
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    3.5348,	8.3581,	13.181,	18.004,	22.827,	27.651,	32.474,	37.297,	42.120,	46.944,	51.767,	56.590,	61.413,	66.237,	71.060,	75.883,	80.707,	85.530,	90.353,	95.176,	100]
+            self.LookTab_OCV = [3.3159,	3.4384,	3.4774,	3.5156,	3.5478,	3.5748,	3.6058,	3.6238,	3.638,	3.6535,	3.6715,	3.6951,	3.7279,	3.7757,	3.8126,	3.8529,	3.8969,	3.9446,	3.9946,	4.0491,	4.109,	4.183]
+        
+        elif celltype==3:
+            self.Capacity = 51
+            self.PackFullChrgVolt=80
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=20
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    5,	    10,	    15,	    20,	    25,	    30,	    35,	    40,	    45,	    50,	    55,	    60,	    65,	    70,	    75,	    80,	    85,	    90,	    95,	    100]
+            self.LookTab_OCV = [3.357, 	3.455, 	3.493, 	3.540, 	3.577, 	3.605, 	3.622, 	3.638, 	3.655, 	3.677, 	3.707, 	3.757, 	3.815, 	3.866, 	3.920, 	3.976, 	4.036, 	4.099, 	4.166, 	4.237, 	4.325]
+        
+        elif celltype==4:
+            self.Capacity = 50
+            self.PackFullChrgVolt=80
+            self.CellFullChrgVolt=4.2
+            self.CellVoltNums=20
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=57
+            self.LookTab_SOC = [0,	    5,	    10,	    15,	    20,	    25,	    30,	    35,	    40,	    45,	    50,	    55,	    60,	    65,	    70,	    75,	    80,	    85,	    90,	    95,	    100]
+            self.LookTab_OCV = [3.152, 	3.397, 	3.438, 	3.481, 	3.523, 	3.560, 	3.586, 	3.604, 	3.620, 	3.638, 	3.661, 	3.693, 	3.748, 	3.803, 	3.853, 	3.903, 	3.953, 	4.006, 	4.063, 	4.121, 	4.183]
+        
+        elif celltype==99:   #60ah磷酸铁锂电芯
+            self.Capacity = 54
+            self.PackFullChrgVolt=69.99
+            self.CellFullChrgVolt=3.5
+            self.OcvInflexionBelow=3.285
+            self.OcvInflexion2=3.296
+            self.OcvInflexion3=3.328
+            self.OcvInflexionAbove=3.4
+            self.CellVoltNums=20
+            self.CellTempNums=4
+            self.FullChrgSoc=98
+            self.PeakSoc=60.5
+            self.LookTab_SOC = [0.00, 	2.40, 	6.38, 	10.37, 	14.35, 	18.33, 	22.32, 	26.30, 	30.28, 	35.26, 	40.24, 	45.22, 	50.20, 	54.19, 	58.17, 	60.16, 	65.14, 	70.12, 	75.10, 	80.08, 	84.06, 	88.05, 	92.03, 	96.02, 	100.00]
+            self.LookTab_OCV = [2.7151,	3.0298,	3.1935,	3.2009,	3.2167,	3.2393,	3.2561,	3.2703,	3.2843,	3.2871,	3.2874,	3.2868,	3.2896,	3.2917,	3.2967,	3.3128,	3.3283,	3.3286,	3.3287,	3.3288,	3.3289,	3.3296,	3.3302,	3.3314,	3.3429]
+        
+        else:
+            print('未找到对应电池编号!!!')
+            # sys.exit()
+

+ 362 - 0
LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/CBMSBatUniform.py

@@ -0,0 +1,362 @@
+import pandas as pd
+import numpy as np
+import datetime
+import bisect
+import matplotlib.pyplot as plt
+import BatParam
+from LIB.MIDDLE.CellStateEstimation.SOH.V1_0_0 import BatParam
+
+class BatUniform:
+    def __init__(self,sn,celltype,df_bms):  #参数初始化
+
+        self.sn=sn
+        self.celltype=celltype
+        self.param=BatParam.BatParam(celltype)
+        self.df_bms=df_bms
+        self.packcrnt=df_bms['总电流[A]']
+        self.packvolt=df_bms['总电压[V]']
+        self.bms_soc=df_bms['SOC[%]']
+        self.bmstime= pd.to_datetime(df_bms['时间戳'], format='%Y-%m-%d %H:%M:%S')
+
+    def batuniform(self):
+        if self.celltype==1 or self.celltype==2 or self.celltype==3 or self.celltype==4:
+            df_res=self._ncm_uniform()
+            return df_res
+            
+        elif self.celltype==99:
+            df_res=self._lfp_uniform()
+            return df_res
+        
+        else:
+            return pd.DataFrame()
+    
+    def _np_move_avg(self,a, n, mode="same"): #定义滑动滤波函数
+        return (np.convolve(a, np.ones((n,)) / n, mode=mode))
+    
+    def _celltemp_weight(self,num):   #寻找当前行数据的最小温度值
+        celltemp = []
+        for j in range(1, self.param.CellTempNums+1):
+            s = str(j)
+            celltemp.append(self.df_bms.loc[num,'单体温度' + s])
+        celltemp.remove(min(celltemp))
+        if self.celltype==99:
+            if min(celltemp)>=20:
+                self.tempweight=1
+                self.StandardStandingTime=600
+            elif min(celltemp)>=10:
+                self.tempweight=0.6
+                self.StandardStandingTime=900
+            elif min(celltemp)>=5:
+                self.tempweight=0.
+                self.StandardStandingTime=1800
+            else:
+                self.tempweight=0.1
+                self.StandardStandingTime=3600
+        else:
+            if min(celltemp)>=20:
+                self.tempweight=1
+                self.StandardStandingTime=300
+            elif min(celltemp)>=10:
+                self.tempweight=0.8
+                self.StandardStandingTime=600
+            elif min(celltemp)>=5:
+                self.tempweight=0.6
+                self.StandardStandingTime=1800
+            else:
+                self.tempweight=0.2
+                self.StandardStandingTime=3600
+
+    def _cellvolt_get(self,num): #获取当前行所有电压数据
+        cellvolt=[]
+        for j in range(1, self.param.CellVoltNums+1): 
+            s = str(j)
+            cellvolt.append(self.df_bms.loc[num,'单体电压' + s]/1000)
+        return(cellvolt)
+
+    def _dvdq_peak(self, time, soc, cellvolt, packcrnt):    #寻找DVDQ的峰值点,并返回
+        cellvolt1 = self._np_move_avg(cellvolt, 5, mode="same")
+        Soc = 0
+        Ah = 0
+        Volt = cellvolt1[0]
+        DV_Volt = []
+        DQ_Ah = []
+        DVDQ = []
+        time1 = []
+        soc1 = []
+        soc2 = []
+        xvolt=[]
+
+        for m in range(1, len(time)):
+            Step = (time[m] - time[m - 1]).total_seconds()
+            Soc = Soc - packcrnt[m] * Step * 100 / (3600 * self.param.Capacity)
+            Ah = Ah - packcrnt[m] * Step / 3600
+            if (cellvolt[m]-Volt)>0.99 and Ah>0:
+                DQ_Ah.append(Ah)
+                DV_Volt.append(cellvolt[m]-Volt)
+                DVDQ.append((DV_Volt[-1])/DQ_Ah[-1])
+                xvolt.append(cellvolt[m]/1000)
+                Volt=cellvolt[m]
+                Ah = 0
+                soc1.append(Soc)
+                time1.append(time[m])
+                soc2.append(soc[m])
+
+        #切片,去除前后10min的数据
+        df_Data1 = pd.DataFrame({'time': time1,
+                                'SOC': soc2,
+                                'DVDQ': DVDQ,
+                                'AhSoc': soc1,
+                                'DQ_Ah':DQ_Ah,
+                                'DV_Volt':DV_Volt,
+                                'XVOLT':xvolt})
+        start_time=df_Data1.loc[0,'time']
+        start_time=start_time+datetime.timedelta(seconds=600)
+        end_time=df_Data1.loc[len(time1)-1,'time']
+        end_time=end_time-datetime.timedelta(seconds=1200)
+        if soc2[0]<40 and soc2[-1]>93:
+            df_Data1=df_Data1[(df_Data1['SOC']>44) & (df_Data1['SOC']<90)]
+        elif soc2[0]<40 and soc2[-1]<=93:
+            df_Data1=df_Data1[(df_Data1['SOC']>44) & (df_Data1['time']<end_time)]
+        elif soc2[0]>=40 and soc2[-1]>93:
+            df_Data1=df_Data1[(df_Data1['time']>start_time) & (df_Data1['SOC']<90)]
+        else:
+            df_Data1=df_Data1[(df_Data1['time']>start_time) & (df_Data1['time']<end_time)]
+
+        # ax1 = plt.subplot(3, 1, 1)
+        # plt.plot(df_Data1['XVOLT'],df_Data1['DVDQ'],'r*-')
+        # plt.xlabel('Volt/V')
+        # plt.ylabel('DV/DQ')
+        # plt.legend()
+        # ax1 = plt.subplot(3, 1, 2)
+        # plt.plot(df_Data1['SOC'],df_Data1['XVOLT'],'y*-')
+        # plt.xlabel('SOC/%')
+        # plt.ylabel('Volt/V')
+        # plt.legend()
+        # ax1 = plt.subplot(3, 1, 3)
+        # plt.plot(df_Data1['SOC'], df_Data1['DVDQ'], 'r*-')
+        # plt.xlabel('SOC/%')
+        # plt.ylabel('DV/DQ')
+        # plt.legend()
+        # plt.show()
+
+        if len(df_Data1)>2:     #寻找峰值点,且峰值点个数>2
+            PeakIndex = df_Data1['DVDQ'].idxmax()
+            df_Data2 = df_Data1[(df_Data1['SOC'] > (df_Data1['SOC'][PeakIndex] - 0.5)) & (df_Data1['SOC'] < (df_Data1['SOC'][PeakIndex] + 0.5))]
+            if len(df_Data2) > 2:
+                return df_Data1['AhSoc'][PeakIndex]
+            else:
+                df_Data1 = df_Data1.drop([PeakIndex])
+                PeakIndex = df_Data1['DVDQ'].idxmax()
+                df_Data2 = df_Data1[(df_Data1['SOC'] > (df_Data1['SOC'][PeakIndex] - 0.5)) & (df_Data1['SOC'] < (df_Data1['SOC'][PeakIndex] + 0.5))]
+                if len(df_Data2) > 2:
+                    return df_Data1['AhSoc'][PeakIndex]
+                else:
+                    return 0
+        else:
+            return 0
+ 
+    def _ncm_uniform(self):
+        column_name=['time','sn','cellsoc_diff','cellvolt_diff','cellmin_num','cellmax_num']
+        df_res=pd.DataFrame(columns=column_name)
+        standingtime=0
+
+        for i in range(1,len(self.df_bms)-2):
+
+            if abs(self.packcrnt[i]) < 0.3:     #电流为0
+                delttime=(self.bmstime[i+1]-self.bmstime[i]).total_seconds()
+                standingtime=standingtime+delttime
+                self._celltemp_weight(i)     #获取不同温度对应的静置时间
+
+                if standingtime>self.StandardStandingTime:      #静置时间满足要求
+                    if abs(self.packcrnt[i+2]) >= 0.3:
+                        standingtime=0                    
+                        cellvolt_now=self._cellvolt_get(i)     #获取当前行电压数据
+                        cellvolt_min=min(cellvolt_now)
+                        cellvolt_max=max(cellvolt_now)
+                        cellmin_num=cellvolt_now.index(cellvolt_min)+1
+                        cellmax_num=cellvolt_now.index(cellvolt_max)+1
+                        cellsoc_min=np.interp(cellvolt_min,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellsoc_max=np.interp(cellvolt_max,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellvolt_diff=(cellvolt_max-cellvolt_min)*1000
+                        cellsoc_diff=cellsoc_max-cellsoc_min
+                        cellsoc_diff=eval(format(cellsoc_diff,'.1f'))
+                        cellvolt_diff=eval(format(cellvolt_diff,'.0f'))
+                        df_res.loc[len(df_res)]=[self.bmstime[i], self.sn, cellsoc_diff, cellvolt_diff, cellmin_num, cellmax_num]
+                    elif standingtime>3600*12:
+                        standingtime=0                    
+                        cellvolt_now=self._cellvolt_get(i)     #获取当前行电压数据
+                        cellvolt_min=min(cellvolt_now)
+                        cellvolt_max=max(cellvolt_now)
+                        cellmin_num=cellvolt_now.index(cellvolt_min)+1
+                        cellmax_num=cellvolt_now.index(cellvolt_max)+1
+                        cellsoc_min=np.interp(cellvolt_min,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellsoc_max=np.interp(cellvolt_max,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellvolt_diff=(cellvolt_max-cellvolt_min)*1000
+                        cellsoc_diff=cellsoc_max-cellsoc_min
+                        cellsoc_diff=eval(format(cellsoc_diff,'.1f'))
+                        cellvolt_diff=eval(format(cellvolt_diff,'.0f'))
+                        df_res.loc[len(df_res)]=[self.bmstime[i], self.sn, cellsoc_diff, cellvolt_diff, cellmin_num, cellmax_num]
+                    elif i>=len(self.df_bms)-4:
+                        standingtime=0
+                        cellvolt_now=self._cellvolt_get(i)     #获取当前行电压数据
+                        cellvolt_min=min(cellvolt_now)
+                        cellvolt_max=max(cellvolt_now)
+                        cellmin_num=cellvolt_now.index(cellvolt_min)+1
+                        cellmax_num=cellvolt_now.index(cellvolt_max)+1
+                        cellsoc_min=np.interp(cellvolt_min,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellsoc_max=np.interp(cellvolt_max,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellvolt_diff=(cellvolt_max-cellvolt_min)*1000
+                        cellsoc_diff=cellsoc_max-cellsoc_min
+                        cellsoc_diff=eval(format(cellsoc_diff,'.1f'))
+                        cellvolt_diff=eval(format(cellvolt_diff,'.0f'))
+                        df_res.loc[len(df_res)]=[self.bmstime[i], self.sn, cellsoc_diff, cellvolt_diff, cellmin_num, cellmax_num]
+                        break
+                    else:
+                        continue
+                else:
+                    continue
+            else:
+                standingtime=0
+                continue
+
+        if df_res.empty:    #返回计算结果
+            return pd.DataFrame()
+        else:
+            return df_res
+
+    def _lfp_uniform(self):
+        column_name=['time','sn','cellsoc_diff','cellvolt_diff','cellmin_num','cellmax_num']
+        df_res=pd.DataFrame(columns=column_name)
+        standingtime=0
+        chrg_start=[]
+        chrg_end=[]
+        charging=0
+
+        for i in range(3,len(self.df_bms)-3):
+
+            #静置电压法计算电芯一致性
+            if abs(self.packcrnt[i]) < 0.2:     #电流为0
+                delttime=(self.bmstime[i+1]-self.bmstime[i]).total_seconds()
+                standingtime=standingtime+delttime
+                self._celltemp_weight(i)     #获取不同温度对应的静置时间
+
+                if standingtime>self.StandardStandingTime:      #静置时间满足要求
+                    cellvolt_now=self._cellvolt_get(i)     #获取当前行电压数据
+                    cellvolt_min=min(cellvolt_now)
+                    cellvolt_max=max(cellvolt_now)
+                    if abs(self.packcrnt[i+2]) >= 0.2 and cellvolt_max < self.param.OcvInflexionBelow:     
+                        standingtime=0                   
+                        cellmin_num=cellvolt_now.index(cellvolt_min)+1
+                        cellmax_num=cellvolt_now.index(cellvolt_max)+1
+                        cellsoc_min=np.interp(cellvolt_min,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellsoc_max=np.interp(cellvolt_max,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellvolt_diff=(cellvolt_max-cellvolt_min)*1000
+                        cellsoc_diff=cellsoc_max-cellsoc_min
+                        cellsoc_diff=eval(format(cellsoc_diff,'.1f'))
+                        cellvolt_diff=eval(format(cellvolt_diff,'.0f'))
+                        df_res.loc[len(df_res)]=[self.bmstime[i], self.sn, cellsoc_diff, cellvolt_diff, cellmin_num, cellmax_num]
+                    elif i>=len(self.df_bms)-4 and cellvolt_max < self.param.OcvInflexionBelow:
+                        standingtime=0
+                        cellmin_num=cellvolt_now.index(cellvolt_min)+1
+                        cellmax_num=cellvolt_now.index(cellvolt_max)+1
+                        cellsoc_min=np.interp(cellvolt_min,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellsoc_max=np.interp(cellvolt_max,self.param.LookTab_OCV,self.param.LookTab_SOC)
+                        cellvolt_diff=(cellvolt_max-cellvolt_min)*1000
+                        cellsoc_diff=cellsoc_max-cellsoc_min
+                        cellsoc_diff=eval(format(cellsoc_diff,'.1f'))
+                        cellvolt_diff=eval(format(cellvolt_diff,'.0f'))
+                        df_res.loc[len(df_res)]=[self.bmstime[i], self.sn, cellsoc_diff, cellvolt_diff, cellmin_num, cellmax_num]
+                    else:
+                        pass
+                else:
+                    pass
+            else:
+                standingtime=0
+                pass   
+
+            #获取DVDQ算法所需数据——开始
+            if i==3 and self.packcrnt[1]<=-1 and self.packcrnt[2]<=-1 and self.packcrnt[3]<=-1:
+                chrg_start.append(i)
+                charging=1
+            elif self.packcrnt[i-1]>-1 and self.packcrnt[i]<=-1 and self.packcrnt[i+1]<=-1 and self.packcrnt[i+2]<=-1:     #判断充电开始
+                if self.bms_soc[i]<45:
+                    charging=1
+                    if len(chrg_start)>len(chrg_end):
+                        chrg_start[-1]=i
+                    else:
+                        chrg_start.append(i)
+                else:
+                    pass
+            else:
+                pass
+
+            if charging==1: #充电中
+                if (self.bmstime[i+1]-self.bmstime[i]).total_seconds()>180:  #如果充电过程中时间间隔>180s,则舍弃该次充电
+                    chrg_start.remove(chrg_start[-1])
+                    charging=0
+                    continue
+                elif self.packcrnt[i]<=-1 and self.packcrnt[i+1]<=-1 and  self.packcrnt[i+2]>-1:  #判断电流波动时刻
+                    cellvolt_now=self._cellvolt_get(i+1)
+                    if max(cellvolt_now)>self.param.CellFullChrgVolt:   #电压>满充电压
+                        chrg_end.append(i+1)
+                        charging=0
+                        continue
+                    else:
+                        pass
+                elif self.packcrnt[i+1]>-0.1 and self.packcrnt[i+2]>-0.1 and self.packcrnt[i+3]>-0.1:   #判断充电结束
+                    charging=0
+                    if len(chrg_start)>len(chrg_end):
+                        if self.bms_soc[i]>90:
+                            chrg_end.append(i)
+                        else:
+                            chrg_start.remove(chrg_start[-1])
+                            continue
+                    else:
+                        continue
+                elif i==len(self.packcrnt)-4 and self.packcrnt[i+1]<-1 and self.packcrnt[i+2]<-1:
+                    charging=0
+                    if len(chrg_start)>len(chrg_end):
+                        if self.bms_soc[i]>90:   #soc>90
+                            chrg_end.append(i)
+                            continue
+                        else:
+                            chrg_start.remove(chrg_start[-1])
+                            continue
+                    else:
+                        continue   
+            else:
+                pass
+            #获取DVDQ算法所需数据——结束
+
+        if chrg_end:    #DVDQ方法计算soc差
+            peaksoc_list=[]
+            for i in range(len(chrg_end)):
+                peaksoc_list = []
+                for j in range(1, self.param.CellVoltNums + 1):
+                    s = str(j)
+                    cellvolt = self.df_bms['单体电压' + s]
+                    cellvolt = list(cellvolt[chrg_start[i]:chrg_end[i]])
+                    time = list(self.bmstime[chrg_start[i]:chrg_end[i]])
+                    packcrnt = list(self.packcrnt[chrg_start[i]:chrg_end[i]])
+                    soc = list(self.bms_soc[chrg_start[i]:chrg_end[i]])
+                    peaksoc = self._dvdq_peak(time, soc, cellvolt, packcrnt)
+                    if peaksoc>1:
+                        peaksoc_list.append(peaksoc)    #计算到达峰值点的累计Soc
+                    else:
+                        pass
+                if len(peaksoc_list)>11:
+                    peaksoc_max=max(peaksoc_list)
+                    peaksoc_min=min(peaksoc_list)
+                    peaksoc_maxnum=peaksoc_list.index(peaksoc_min)+1
+                    peaksoc_minnum=peaksoc_list.index(peaksoc_max)+1
+                    cellsoc_diff=peaksoc_max-peaksoc_min
+                    cellsoc_diff=eval(format(cellsoc_diff,'.1f'))
+                    df_res.loc[len(df_res)]=[self.bmstime[chrg_start[i]], self.sn, cellsoc_diff, 0, peaksoc_minnum, peaksoc_maxnum]
+                else:
+                    pass
+
+        if df_res.empty:
+            return pd.DataFrame()
+        else:
+            df_res.sort_values(by='time', ascending=True, inplace=True)
+            return df_res

BIN
LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/Uniform表单.xlsx


+ 24 - 0
LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/log.py

@@ -0,0 +1,24 @@
+import logging
+import traceback
+
+class Mylog:
+
+    def __init__(self,log_name,log_level):
+        self.name=log_name
+        self.level=log_level
+    
+    def logcfg(self):
+        if len(self.level) > 0:
+            if self.level == 'debug':
+                Level=logging.DEBUG
+            elif self.level == 'info':
+                Level=logging.INFO
+            elif self.level == 'warning':
+                Level=logging.WARNING
+            else:
+                Level=logging.ERROR
+        logging.basicConfig(filename=self.name, level=Level,format='%(asctime)s - %(levelname)s - %(message)s')
+
+    def logopt(self,*info):
+        logging.error(info)
+        logging.error(traceback.format_exc())

+ 73 - 0
LIB/MIDDLE/CellStateEstimation/Uniform/V1_0_0/main.py

@@ -0,0 +1,73 @@
+import CBMSBatUniform
+import log
+
+#coding=utf-8
+import os
+import sys
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+# from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+from LIB.MIDDLE.soh import NCMSoh_20210716 as NCMSoh
+from LIB.MIDDLE.soh import LFPSoh_20210711 as LFPSoh
+from urllib import parse
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    SNdata_6040 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6040骑享')
+    SNdata_6060 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+    SNdata_4840 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='4840骑享')
+    SNdata_7250 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='7250')
+    SNnums_6060=SNdata_6060['SN号']
+    SNnums_6040=SNdata_6040['SN号']
+    SNnums_4840=SNdata_4840['SN号']
+    SNnums_7250=SNdata_7250['SN号']
+
+    SNnums=SNnums_6040.tolist()+SNnums_6060.tolist()+SNnums_4840.tolist()+SNnums_7250.tolist()
+    now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+    now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+    start_time=now_time-datetime.timedelta(days=5)
+    end_time=str(now_time)
+    start_time=str(start_time)
+
+    #log信息配置
+    mylog=log.Mylog('log.txt','error')
+    mylog.logcfg()
+
+    for sn in SNnums:
+        try:
+            if 'PK500' in sn:
+                celltype=1 #6040三元电芯
+            elif 'PK502' in sn:
+                celltype=2 #4840三元电芯
+            elif 'PK504' in sn:
+                celltype=99    #60ah林磷酸铁锂电芯
+            elif 'MGMLXN750' in sn:
+                celltype=3 #力信50ah三元电芯
+            elif 'MGMCLN750' in sn: 
+                celltype=4 #CATL 50ah三元电芯
+            else:
+                print('未找到对应电池编号!!!')
+                sys.exit()
+            
+            # sn='PK50001A100000035'
+            # start_time='2021-08-10 9:49:37'
+            # end_time='2021-08-29 19:49:37'
+
+            dbManager = DBManager.DBManager()
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
+            df_bms = df_data['bms']
+            # df_bms.to_csv('BMS_'+sn+'.csv',encoding='GB18030')
+
+            BatUniform=CBMSBatUniform.BatUniform(sn,celltype,df_bms)
+            df_res=BatUniform.batuniform()
+            df_res.to_csv('CBMS_Uniform_'+sn+'.csv',encoding='GB18030')
+        
+        
+        except IndexError as e:
+            print(repr(e))
+            mylog.logopt(sn,e)
+            pass

+ 71 - 0
LIB/MIDDLE/CellStateEstimation/Uniform/main.py

@@ -0,0 +1,71 @@
+import CBMSBatUniform
+import log
+
+#coding=utf-8
+import os
+import sys
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+# from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+from urllib import parse
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    SNdata_6040 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6040骑享')
+    SNdata_6060 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+    SNdata_4840 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='4840骑享')
+    SNdata_7250 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='7250')
+    SNnums_6060=SNdata_6060['SN号']
+    SNnums_6040=SNdata_6040['SN号']
+    SNnums_4840=SNdata_4840['SN号']
+    SNnums_7250=SNdata_7250['SN号']
+
+    SNnums=SNnums_6040.tolist()+SNnums_6060.tolist()+SNnums_4840.tolist()+SNnums_7250.tolist()
+    now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+    now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+    start_time=now_time-datetime.timedelta(days=5)
+    end_time=str(now_time)
+    start_time=str(start_time)
+
+    #log信息配置
+    mylog=log.Mylog('log.txt','error')
+    mylog.logcfg()
+
+    for sn in SNnums:
+        try:
+            if 'PK500' in sn:
+                celltype=1 #6040三元电芯
+            elif 'PK502' in sn:
+                celltype=2 #4840三元电芯
+            elif 'PK504' in sn:
+                celltype=99    #60ah林磷酸铁锂电芯
+            elif 'MGMLXN750' in sn:
+                celltype=3 #力信50ah三元电芯
+            elif 'MGMCLN750' in sn: 
+                celltype=4 #CATL 50ah三元电芯
+            else:
+                print('未找到对应电池编号!!!')
+                sys.exit()
+            
+            # sn='PK50001A100000035'
+            # start_time='2021-08-10 9:49:37'
+            # end_time='2021-08-29 19:49:37'
+
+            dbManager = DBManager.DBManager()
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
+            df_bms = df_data['bms']
+            # df_bms.to_csv('BMS_'+sn+'.csv',encoding='GB18030')
+
+            BatUniform=CBMSBatUniform.BatUniform(sn,celltype,df_bms)
+            df_res=BatUniform.batuniform()
+            df_res.to_csv('CBMS_Uniform_'+sn+'.csv',encoding='GB18030')
+        
+        
+        except IndexError as e:
+            print(repr(e))
+            mylog.logopt(sn,e)
+            pass

+ 422 - 0
LIB/MIDDLE/DrivingRange/UpdtFct.py

@@ -0,0 +1,422 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+from sqlalchemy.orm import sessionmaker
+import pdb
+
+# #建立引擎
+# engine = create_engine(str(r"mysql+mysqldb://%s:" + '%s' + "@%s/%s") % ('root', 'pengmin', 'localhost', 'qixiangdb'))
+# #连接到qx数据库
+# conn_qx = pymysql.connect(
+#         host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+#         user='qx_cas',
+#         password='Qx@123456',#Qx@123456
+#         database='qx_cas',
+#         charset='utf8'
+#     )
+# #连接到本地数据库,输出物
+# conn_local = pymysql.connect(
+#         host='localhost',
+#         user='root',
+#         password='pengmin',
+#         database='qixiangdb',
+#         charset='utf8'
+#     )
+
+#计算下一个soc
+def getNextSoc(start_soc):
+    '''输入当前的soc,寻找目标soc函数'''
+    if start_soc>80:
+        next_soc=80
+    elif start_soc>60:
+        next_soc=60
+    elif start_soc>40:
+        next_soc=40
+    elif start_soc>20:
+        next_soc=20
+    else:
+        next_soc=5#下一次目标soc
+    return next_soc
+#更新全部5个区间段的factor
+def updtSnFct(sn_factor_df,end_soc,delta_range,range_soc):
+    '''输入当前的soc区间段,里程变量量,soc变化量,输出新的df
+    sn_factor_df为dataframe,delta_range单位为km,range_soc单位为km/persoc'''
+    if end_soc==80:
+        updtFctByCol(sn_factor_df,'a0',delta_range,range_soc)
+    elif end_soc==60:
+        updtFctByCol(sn_factor_df,'a1',delta_range,range_soc)
+    elif end_soc==40:
+        updtFctByCol(sn_factor_df,'a2',delta_range,range_soc)
+    elif end_soc==20:
+        updtFctByCol(sn_factor_df,'a3',delta_range,range_soc)
+    elif end_soc<20:
+        updtFctByCol(sn_factor_df,'a4',delta_range,range_soc)
+    return sn_factor_df
+#更新一列的factor
+def updtFctByCol(sn_factor_df,colmun_name,delta_range,range_soc):
+    '''更新制定列的factor,sn_factor_df为dataframe,新的系数更新到第一行。delta_range单位为km,
+    range_soc单位为km/persoc,默认按照100km更新续驶里程权重'''
+    range_soc_old=sn_factor_df.loc[0,colmun_name]#读取第0行的老factor
+    debounce_range=100#更新权重
+    new_factor=range_soc*((delta_range)/debounce_range)+range_soc_old*(1-delta_range/debounce_range)
+    #在第1行,存储新的factor
+    sn_factor_df.loc[1,colmun_name]=new_factor
+    return sn_factor_df
+#更新今日的factor
+def updtTodayFct(factor_input,sn_day_df):
+    '''更新今日的Factor***'''
+    sn_factor_df_last=factor_input
+    start_soc=sn_day_df.loc[0,'soc']#首行soc
+    next_soc=getNextSoc(start_soc)#下一个目标soc
+    start_range=sn_day_df.loc[0,'vehodo']#首行vehodo
+    sn=sn_day_df.loc[0,'name']#sn号
+
+    for index in range(len(sn_day_df)-1):
+    #寻找分割点,
+        index_soc=sn_day_df.loc[index,'soc']#当前行soc
+        next_index_soc=sn_day_df.loc[index+1,'soc']#下一行soc
+
+        if (index_soc>=next_soc)&(next_index_soc<next_soc):
+            #当前行soc>目标soc,下一行低soc<目标soc,说明到达了分割点80-60-40-20
+            delta_soc_tonext=start_soc-next_soc#两个距离点的soc差,单位为%
+            delta_range_tonext=sn_day_df.loc[index,'vehodo']-start_range#两个时间点的距离差,单位为m
+            delta_range_tonext_km=delta_range_tonext/1000#两个时间点的距离差,单位为km
+            range_soc_tonext=(delta_range_tonext_km)/delta_soc_tonext#单位soc可行驶的公里数
+
+            # print(sn+'start_soc: '+str(start_soc),'next_soc: '+str(next_soc),'delta_vehodo; '+str(round(delta_range_tonext_km,3))
+            # +'km'+' range_soc:'+str(round(range_soc_tonext,3)))#调试用语句,看单次factor变化量
+
+            if (delta_range_tonext_km>1)&(delta_range_tonext_km<5*delta_soc_tonext):
+                #里程变化量>1km。且<5倍的soc变化量,大于此值认为不合理。
+                sn_factor_df_last=updtSnFct(sn_factor_df_last,next_soc,delta_range_tonext_km,range_soc_tonext)
+            
+            start_soc=next_index_soc#变更开始soc
+            next_soc=getNextSoc(start_soc)#变更结束soc
+            start_range=sn_day_df.loc[index+1,'vehodo']#变更开始里程    
+
+    return sn_factor_df_last
+#对driveinfo进行预处理
+def snDayDfPreProcess(sn_day_df):
+    '''预处理,判断是否在dirvemode,获取drivemode条件下的累计行驶距离。
+    增加delta_soc列,drive_flg列,vehodo列'''
+    sn_day_df=sn_day_df.reset_index(drop=True)#重置index
+    #增加列,计算delta_soc
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'delta_soc']=0
+        else:
+            sn_day_df.loc[index,'delta_soc']=sn_day_df.loc[index,'soc']-sn_day_df.loc[index-1,'soc']
+    #增加列,判断是否在drive状态
+    drive_flg=False
+    accum_distance=0
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'drive_status']=drive_flg
+            sn_day_df.loc[index,'vehodo']=0
+        else:
+            if (sn_day_df.loc[index,'delta_soc']<-0.1)|\
+                ((sn_day_df.loc[index,'delta_soc']<=0)&(sn_day_df.loc[index,'distance']>500)):#soc处于下降状态,说明在drive
+                drive_flg=True#置true
+            elif sn_day_df.loc[index,'delta_soc']>0.1:#soc处于上升状态,说明不在drive
+                drive_flg=False#置false
+                accum_distance=0#清零
+            sn_day_df.loc[index,'drive_flg']=drive_flg
+            accum_distance+=sn_day_df.loc[index,'distance']#对行驶里程进行累加
+            sn_day_df.loc[index,'vehodo']=accum_distance
+    #筛选所有的drive信息行
+    sn_day_drive_df=sn_day_df.loc[sn_day_df['drive_flg']==True,:]
+    #按时间进行一次筛选,此处丢弃了晚上0点以后的行车数据
+
+    sn_day_drive_df=sn_day_drive_df.reset_index(drop=True)#重置index
+    
+    return sn_day_drive_df 
+
+
+#更新所有sn,连读多天的的factor
+def updtAllSnFct(start_date,end_date, db_engine, db_local, db_qx, sn_table_name='tb_sn_factor'):
+    '''计算开始时间到结束时间的,所有sn的factor'''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        updtAllSnTodayFct(start_date,end_date, db_engine, db_local, db_qx, sn_table_name)#调用函数
+        # print('update all sn factor from '+start_date+" to "+end_date)
+        start_date=end_date
+        i+=1#自加
+
+#更新所有sn,一天的factor
+def updtAllSnTodayFct(start_date,end_date, db_engine, db_local, db_qx, sn_table_name):
+    ''''更新今天所有sn的factorx信息,start_date和end_date相隔一天。此处还可优化'''
+    # conn_local = pymysql.connect(
+    #     host='localhost',
+    #     user='root',
+    #     password='pengmin',
+    #     database='qixiangdb',
+    #     charset='utf8'
+    #     )
+    
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+" and distance!=0;"
+    range_soc_df = pd.read_sql(sql_cmd, db_qx)#使用read_sql方法查询qx数据库
+
+    #筛选出所有当日数据之后,筛选当日有更新的sn
+    today_sn_list=range_soc_df['name'].unique().tolist()#[:100]#先一次更新5个
+    #建立空的dataframe,用于承接所有更新的factor信息
+    today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+    for sn in today_sn_list:
+        #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+        sn_str="'"+sn+"'"
+        update_today_factor_flg=True
+        sql_cmd3="select sn,date,a0,a1,a2,a3,a4 from {} where date=".format(sn_table_name)+start_date_str+" and sn="+sn_str
+        factor_today_df=pd.read_sql(sql_cmd3, db_local)#使用read_sql方法查询local数据库
+        if len(factor_today_df)>=1:
+            # print(sn+' '+start_date_str+' factor exist in table! Factor not update.')
+            update_today_factor_flg=False
+
+        sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from {} where date<".format(sn_table_name)+start_date_str+" and sn="+sn_str
+        #此处可以限定每次查询的数量,例如不高于5行
+        factor_df=pd.read_sql(sql_cmd2, db_local)#使用read_sql方法查询local数据库
+
+        #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+        factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+
+        if len(factor_df)==0:
+            #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+            start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+            yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+            #为sn申请一个新的factor,初始值为1
+            factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+        sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+        sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+
+        sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+        sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+        sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+        sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+        #筛选对应车辆的信息
+        condition_sn=(range_soc_df['name']==sn)
+        sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+        sn_day_df=sn_day_df.reset_index(drop=True)
+        #使用updtTodayFct函数更新今天的factor
+        if len(sn_day_df)>=2:
+            #使用process函数,进行预处理
+            sn_day_df=snDayDfPreProcess(sn_day_df)#预处理函数
+            # 临时措施,删除每天晚上0点以后的数据,5点以前的数据,防止对驾驶cycle判断产生影响。
+            day_start_time=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+            day_morning_time=day_start_time+datetime.timedelta(hours=5)
+            morning_time_str=day_morning_time.strftime('%Y-%m-%d %H:%M:%S')
+            sn_day_df=sn_day_df.loc[sn_day_df['time']>morning_time_str,:]#去除掉了每天晚上0点以后的数据,短期措施
+            sn_day_df=sn_day_df.reset_index(drop=True)#重置index
+
+            if len(sn_day_df)>=2:
+                sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#
+                if (len(sn_factor_df_new)>=2)&(update_today_factor_flg):#如果factor
+                    today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+    
+    #将today_sn_fct_df写入到数据库中,今天所有factor更新的系数,一次写入。
+    if len(today_sn_fct_df)>=1:
+        today_sn_fct_df.to_sql(sn_table_name,con=db_engine,chunksize=10000,if_exists='append',index=False)
+
+#更新一个sn,连续多天的factor
+def updtOneSnFct(sn,start_date,end_date,db_engine, db_local, db_qx, sn_table_name='tb_sn_factor'):
+    '''计算开始时间到结束时间的,一个sn的所有factor。
+    重复多次调用,updtOneSnTodayFct。
+    '''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        # print('update one '+sn+'factor from '+start_date+" to "+end_date)
+        updtOneSnTodayFct(sn,start_date,end_date,db_engine, db_local, db_qx, sn_table_name)#调用函数,更新当日的factor。
+        start_date=end_date
+        i+=1#自加
+#更新一个sn,一天的factor
+def updtOneSnTodayFct(sn,start_date,end_date,db_engine, db_local, db_qx, sn_table_name):
+    '''更新一个sn,一天的factor。'''
+    #重新建立连接,更新数据库
+    # conn_local = pymysql.connect(
+    #     host='localhost',
+    #     user='root',
+    #     password='pengmin',
+    #     database='qixiangdb',
+    #     charset='utf8'
+    #     )
+
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sn_str="'"+sn+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+\
+    " and distance!=0 and name="+sn_str
+    range_soc_df = pd.read_sql(sql_cmd, db_qx)#使用read_sql方法查询qx数据库
+
+    if len(range_soc_df)>0:
+        #筛选出所有当日数据之后,筛选当日有更新的sn
+        today_sn_list=range_soc_df['name'].unique().tolist()
+        #建立空的dataframe,用于承接所有更新的factor信息
+        today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+        for sn in today_sn_list:
+            #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+            sn_str="'"+sn+"'"
+
+            update_today_factor_flg=True
+            sql_cmd3="select sn,date,a0,a1,a2,a3,a4 from {} where date=".format(sn_table_name)+start_date_str+" and sn="+sn_str
+            factor_today_df=pd.read_sql(sql_cmd3, db_local)#使用read_sql方法查询local数据库
+            if len(factor_today_df)>=1:
+                # print(sn+' '+start_date_str+' factor exist in table! Factor not update.')
+                update_today_factor_flg=False
+
+            sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from {} where date<=".format(sn_table_name)+start_date_str+" and sn="+sn_str
+            factor_df=pd.read_sql(sql_cmd2, db_local)#使用read_sql方法查询local数据库
+            #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+            factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+            # pdb.set_trace()
+            if len(factor_df)==0:
+                #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+                start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+                yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+                factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+                today_sn_fct_df=today_sn_fct_df.append(factor_df.loc[0,:])#将初始化的行记录到数据库
+
+            sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+            sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+
+            sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+            sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+            sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+            sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+            #筛选对应车辆的信息
+            condition_sn=(range_soc_df['name']==sn)
+            sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+            sn_day_df=sn_day_df.reset_index(drop=True)
+            #使用updtTodayFct函数更新今天的factor
+            if len(sn_day_df)>=2:
+                #使用process函数,进行预处理
+                sn_day_df=snDayDfPreProcess(sn_day_df)#!!!!!!!!!!!增加
+                # 临时措施,删除每天晚上0点以后的数据,5点以前的数据,防止对驾驶cycle判断产生影响。
+                day_start_time=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+                day_morning_time=day_start_time+datetime.timedelta(hours=5)
+                morning_time_str=day_morning_time.strftime('%Y-%m-%d %H:%M:%S')
+                sn_day_df=sn_day_df.loc[sn_day_df['time']>morning_time_str,:]#去除掉了每天晚上0点以后的数据,短期措施
+                sn_day_df=sn_day_df.reset_index(drop=True)#重置index
+
+                if len(sn_day_df)>=2:
+                    sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#更新fator的主函数
+
+                    if (len(sn_factor_df_new)>=2)&(update_today_factor_flg):#如果今日factor没有更新
+                        today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+        
+        # #将today_sn_fct_df写入到数据库中
+        if len(today_sn_fct_df)>=1:
+            today_sn_fct_df.to_sql(sn_table_name,con=db_engine,chunksize=10000,if_exists='append',index=False)
+            # print(sn+' factor will be update in table tb_sn_factor!')
+        return sn_factor_df_new
+
+
+#更新最新的factor,一天调用一次。
+def updtNewestFctTb(current_time, db_local, sn_table_name='tb_sn_factor'):
+
+    '''更新tb_sn_factor_newest,只保留最新日期的factor。
+    从tb_sn_factor中,筛选最新的日期。
+    函数每天运行一次,从tb_sn_factor中筛选最新日期的factor。'''
+
+    current_time=current_time#当前时间
+    current_time_str=current_time.strftime('%Y-%m-%d %H:%M:%S')#时间格式化为字符串,年-月-日 时-分-秒
+    current_time_str="'"+current_time_str+"'"
+
+    sql_cmd_4="select sn,date,a0,a1,a2,a3,a4 from {} where date<".format(sn_table_name)+current_time_str
+    factor_all_df = pd.read_sql(sql_cmd_4, db_local)#使用read_sql方法查询qx数据库
+    #筛选今天之前的所有factor,只保留最近的一天。
+    sn_list=factor_all_df['sn'].unique().tolist()#筛选sn序列
+    newest_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])#声明空df
+
+    for sn in sn_list:
+        condition_sn=(factor_all_df['sn']==sn)
+        factor_pick_df=factor_all_df.loc[condition_sn,:]#按照sn进行筛选
+        factor_pick_df=factor_pick_df.sort_values(by='date')#按照日期排序
+        factor_last_df=factor_pick_df.tail(1)#选择最后日期
+        newest_sn_fct_df=newest_sn_fct_df.append(factor_last_df)#拼接到空df中
+    
+
+    #按照日期排序,只保留最近的一天,输出factor_unique_df,方法为replace。
+    #本函数,每天需要运行一次,用于更新factor。
+    # newest_sn_fct_df.to_sql(sn_newest_table_name,con=db_engine,chunksize=10000,\
+    #     if_exists='replace',index=False)
+    return newest_sn_fct_df
+#使用factor和soc推荐剩余续驶里程
+def calDistFromFct(input_df):
+    '''根据sn-time-soc-a0-a1-a2-a3-a4,使用factor正向计算计算VehElecRng。'''
+    row_df=input_df.copy()
+    soc=row_df['soc']#获取soc
+    factor=[]
+    factor.append(row_df['a4'])#0~20之间的factor
+    factor.append(row_df['a3'])#20~40之间的factor
+    factor.append(row_df['a2'])#40~60之间的factor
+    factor.append(row_df['a1'])#60~80之间的factor
+    factor.append(row_df['a0'])#80~100之间的factor
+
+    gap=20
+    yushu=soc%gap#余数部分
+    zhengshu=soc//gap#整数部分
+    i=0
+    range=0
+    while i<zhengshu:
+        dur_factor=factor[i]#当前权重
+        range+=dur_factor*gap#分段累加里程
+        i=i+1
+    if yushu>0.01:#避免soc=100时报错
+        range=range+yushu*factor[zhengshu]#最后把余项对应的里程加上
+    row_df['vehelecrng']=range#给VehElecRng列赋值
+    return row_df
+#更新当前时间对应的里程,每5min调用一次
+def updtVehElecRng(db_qx, db_local, sn_newest_table_name='tb_sn_factor_newest', input_time='2021-07-29 12:01:00'):
+    '''更新续驶里程,到tb_sn_factor_soc_range。
+    部署时设置每5min更新一次。
+    '''
+    #设置一个时间作为结束时间
+    # current_time=datetime.datetime.now()
+    current_time_raw=input_time#当前时间
+    current_time=datetime.datetime.strptime(current_time_raw,'%Y-%m-%d %H:%M:%S')#字符串转时间
+
+    #结束时间往前4min,59s,作为起始时间
+    before6min_time_str=(current_time+datetime.timedelta(minutes=-4,seconds=-59)).strftime('%Y-%m-%d %H:%M:%S')#6min前
+    before6min_time_str="'"+before6min_time_str+"'"
+    current_time_str=current_time.strftime('%Y-%m-%d %H:%M:%S')#时间格式化为字符串
+    current_time_str="'"+current_time_str+"'"
+
+    #从drive_info里面读取,该时间段内的name,time,soc三列
+    sql_cmd="select name,time,soc from drive_info where time between "+before6min_time_str+" and "+current_time_str
+    # print(sql_cmd)
+    range_soc_df = pd.read_sql(sql_cmd, db_qx)#使用read_sql方法查询qx数据库
+    range_soc_df.rename(columns={'name':'sn'},inplace=True)#将name列重命名为sn列
+
+    #任务2,从tb_sn_factor_newest里面读取最新的factor,获取距离今天最近的一个factor list
+    sql_cmd_1="select sn,a0,a1,a2,a3,a4 from {}".format(sn_newest_table_name)
+    # print(sql_cmd_1)
+    sn_factor_newest_df_raw = pd.read_sql(sql_cmd_1, db_local)#使用read_sql方法查询qx数据库
+
+    #任务3,将range_soc_df和sn_factor_newest_df_raw,双表合并成为一个新表格。
+    sn_soc_factor_df=pd.merge(range_soc_df,sn_factor_newest_df_raw,how='left',on='sn')
+    sn_soc_factor_df.fillna(1,inplace=True)#如果range_soc_df中有sn号,但sn_factor_newest_df_raw中没有。用1填充。
+    # sn_soc_factor_df.head()
+    #填充完成后,sn-time-soc-a0-a1-a2-a3-a4都已经齐全。
+
+    #任务4,调用函数,将VehElecRng计算出来
+    sn_soc_factor_range_df=pd.DataFrame([],columns=['sn','time','soc','a0','a1','a2','a3','a4','vehelecrng'])
+    for index in sn_soc_factor_df.index.tolist():
+        input_df=sn_soc_factor_df.loc[index,:]#挑选
+        sn_soc_factor_range_row=calDistFromFct(input_df)#计算VehElecRng
+        sn_soc_factor_range_df=sn_soc_factor_range_df.append(sn_soc_factor_range_row)#拼接
+
+    ##任务5,将sn_soc_factor_range_df写入到tb_sn_factor_soc_range中,使用替换关系。
+    # sn_soc_factor_range_df.to_sql(range_table_name,con=db_engine,chunksize=10000,\
+    #     if_exists='replace',index=False)    
+    return sn_soc_factor_range_df
+

+ 12 - 0
LIB/MIDDLE/DrivingRange/UpdtFctTable.py

@@ -0,0 +1,12 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+from UpdtFct import *
+
+#调度周期:每天运行一次。
+
+#更新所有sn,连读多日的factor,如果start_date和end_date相隔一天,代表更新start_date的factor。
+start_date="2021-07-23"
+end_date="2021-07-28"
+updtAllSnFct(start_date,end_date)

+ 11 - 0
LIB/MIDDLE/DrivingRange/UpdtFctTableNewest.py

@@ -0,0 +1,11 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+from UpdtFct import *
+
+#调度周期:在UpdtFctTable运行结束之后,运行一次,不需要输入参数。
+
+#更新factor到最新状态,只保留最新的。
+
+updtNewestFctTb()

+ 16 - 0
LIB/MIDDLE/DrivingRange/UpdtVehElecRng.py

@@ -0,0 +1,16 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+from UpdtFct import *
+
+#调度周期:程序每5分钟运行一次
+
+#更新剩余里程,每5min一次,几秒钟运行结束。
+test_time=datetime.datetime.now()#当前系统时间
+input_time=datetime.datetime.strftime(test_time,'%Y-%m-%d %H:%M:%S')
+
+# input_time='2021-07-29 11:59:00'#手动设定一个时间
+
+#函数每5min调度一次,input_time为当前时间,更新tb_sn_factor_soc_range表格
+updtVehElecRng(input_time)

BIN
LIB/MIDDLE/DrivingRange/计算续驶里程程序介绍.docx


+ 6 - 6
LIB/MIDDLE/IndexStaByOneCycle.py

@@ -200,13 +200,13 @@ class IndexStaByOneCycle():
 基于单一状态(一次行车、一次静置、一次充电)的指标统计库
 基于单一状态(一次行车、一次静置、一次充电)的指标统计库
 
 
 '''
 '''
-__author__ = 'Wang Liming'
+__author__ = 'lmstack'
 
 
-import CONFIGURE.PathSetting as PathSetting
-import sys
-sys.path.append(PathSetting.backend_path)
-import datetime
-import Tools
+# import CONFIGURE.PathSetting as PathSetting
+# import sys
+# sys.path.append(PathSetting.backend_path)
+# import datetime
+# import Tools
 import pandas as pd
 import pandas as pd
 import numpy as np
 import numpy as np
 
 

+ 6 - 6
LIB/MIDDLE/IndexStaByPeriod.py

@@ -175,17 +175,17 @@ class IndexStaByPeriod():
 基于某个周期(一天,一周...)的指标统计库
 基于某个周期(一天,一周...)的指标统计库
 
 
 '''
 '''
-__author__ = 'Wang Liming'
+__author__ = 'lmstack'
 
 
-import CONFIGURE.PathSetting as PathSetting
-import sys
-sys.path.append(PathSetting.backend_path)
-sys.path.append(PathSetting.middle_path)
+# import CONFIGURE.PathSetting as PathSetting
+# import sys
+# sys.path.append(PathSetting.backend_path)
+# sys.path.append(PathSetting.middle_path)
 import datetime
 import datetime
 import Tools
 import Tools
 import pandas as pd
 import pandas as pd
 import numpy as np
 import numpy as np
-import IndexStaByOneCycle
+from LIB.MIDDLE import IndexStaByOneCycle
 
 
 class IndexStaByPeriod():
 class IndexStaByPeriod():
     def __init__(self):
     def __init__(self):

+ 160 - 0
LIB/MIDDLE/LeakCurrent/LFPLeakCurrent20210812.py

@@ -0,0 +1,160 @@
+# 获取数据
+from LIB.BACKEND import DBManager
+
+import os
+import pandas as pd
+import numpy as np
+import bisect
+import datetime
+# import matplotlib.pyplot as plt
+
+#参数输入
+Capacity = 53.6
+PackFullChrgVolt=69.99
+CellFullChrgVolt=3.37
+CellVoltNums=20
+CellTempNums=4
+FullChrgSoc=98
+CellVoltPort=[3.357,3.358,3.359,3.36,3.361]
+PeakSoc=57
+# #40Ah-OCV
+# LookTab_SOC = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, 100]
+# LookTab_OCV = [3.3159, 3.4502, 3.4904, 3.5277, 3.5590, 3.5888, 3.6146, 3.6312, 3.6467, 3.6642, 3.6865, 3.7171, 3.7617,
+#                3.8031, 3.8440, 3.8888, 3.9376, 3.9891, 4.0451, 4.1068, 4.1830]
+#55Ah-OCV
+LookTab_SOC = [0,  10,  20,  30,  40,  50,  60,  70,  80,  90,  100]
+LookTab_OCV = [3.1820, 3.2250, 3.2730, 3.2840, 3.2860, 3.2920, 3.3210, 3.3260, 3.3270, 3.3270, 3.3640]
+
+# 获取数据时间段
+def cal_LFPLeakCurrent(sn, end_time, start_time):
+    end_time = end_time
+    strat_time = start_time
+
+    sn = sn
+    st = strat_time
+    et = end_time
+
+    dbManager = DBManager.DBManager()
+    df_data = dbManager.get_data(sn=sn, start_time=st, end_time=et, data_groups=['bms'])
+    df_bms = df_data['bms']
+
+    #寻找电压最大值
+    packcrnt=df_bms['总电流[A]']
+    SOC=df_bms['SOC[%]']
+    bmsstat=df_bms['充电状态']
+    time= pd.to_datetime(df_bms['时间戳'], format='%Y-%m-%d %H:%M:%S')
+
+    #第一步:筛选充电数据
+    ChgStart=[]
+    ChgEnd=[]
+    for i in range(3, len(time) - 3):
+        if i==3 and bmsstat[i]==2 and bmsstat[i+1]==2 and bmsstat[i+2]==2:
+            ChgStart.append(i)
+        elif bmsstat[i-2]!=2 and bmsstat[i-1]!=2 and bmsstat[i]==2:
+            ChgStart.append(i)
+        elif bmsstat[i-1]==2 and bmsstat[i]!=2 and bmsstat[i+1]!=2:
+            ChgEnd.append(i)
+        elif i == (len(time) - 4) and bmsstat[len(bmsstat)-1] == 2 and bmsstat[len(bmsstat)-2] == 2:
+            ChgEnd.append(len(time)-1)
+
+    #第二步:筛选充电起始Soc<45%,且单体最小电压>3.37V的数据
+    ChgStartValid=[]
+    ChgEndValid=[]
+    if ChgStart:
+        for i in range(len(ChgEnd)):
+            #寻找最小电压值
+            cellvolt = []
+            for j in range(1, CellVoltNums+1):
+                s = str(j)
+                volt = df_bms['单体电压' + s]/1000
+                cellvolt.append(max(volt[ChgStart[i]:ChgEnd[i]]))
+            if min(cellvolt)>CellFullChrgVolt and SOC[ChgStart[i]]<40 and (ChgEnd[i]-ChgStart[i])>10:
+                if ((time[ChgEnd[i]]-time[ChgStart[i]]).total_seconds())/(ChgEnd[i]-ChgStart[i])<30:
+                    ChgStartValid.append(ChgStart[i])
+                    ChgEndValid.append(ChgEnd[i])
+
+    #第三步:计算充电每个单体到达3.368V的Ah差
+    #定义寻找电压3.368V的数据点
+    def data_search(data1,data2,data3,data4):
+        Soc=0
+        for m in range(1,len(data1)):
+            t=(data2[m]-data2[m-1]).total_seconds()
+            Soc=Soc-data3[m]*t/(3600*Capacity)
+            if data1[m]>data4:
+                DetaT=(data2[m]-data2[0]).total_seconds()
+                return Soc,m
+                break
+    
+    if ChgStartValid:
+        df_DetaTime=pd.DataFrame()
+        df_DetaTime1=pd.DataFrame()
+        df_detatime=pd.DataFrame()
+        for i in range(len(ChgStartValid)):
+            DetaSoc1=[]
+            DetaSoc2 = []
+            DetaSoc=[]
+            a=list(range(5))
+            b=list(range(5))
+            #计算1-10号电芯到达特定电压值得时间和SOC
+            for j in range(1, CellVoltNums-9):
+                s = str(j)
+                cellvolt = df_bms['单体电压' + s]/1000
+                cellvolt=list(cellvolt[ChgStartValid[i]:ChgEndValid[i]])
+                Time=list(time[ChgStartValid[i]:ChgEndValid[i]])
+                Packcrnt=list(packcrnt[ChgStartValid[i]:ChgEndValid[i]])
+                for k in range(len(CellVoltPort)):
+                    a[k],b[k]=data_search(cellvolt,Time,Packcrnt,CellVoltPort[k])
+                DetaSoc1.append(np.mean(a))  #计算到达3.368V的时长
+                # DetaT.append((Time[b]-Time[0]).total_seconds())
+
+            #计算1-10号电芯到达特定电压值的平均Soc
+            Socmean1=(sum(DetaSoc1)-max(DetaSoc1)-min(DetaSoc1))/(len(DetaSoc1)-2)
+            # Tmean=np.mean(DetaT)
+
+            ##计算11-20号电芯到达特定电压值得时间和SOC
+            for j in range(11, CellVoltNums+1):
+                s = str(j)
+                cellvolt = df_bms['单体电压' + s]/1000
+                cellvolt=list(cellvolt[ChgStartValid[i]:ChgEndValid[i]])
+                Time=list(time[ChgStartValid[i]:ChgEndValid[i]])
+                Packcrnt=list(packcrnt[ChgStartValid[i]:ChgEndValid[i]])
+                for k in range(len(CellVoltPort)):
+                    a[k],b[k]=data_search(cellvolt,Time,Packcrnt,CellVoltPort[k])
+                DetaSoc2.append(np.mean(a))  #计算到达3.368V的时长
+
+            #计算11-20号电芯到达特定电压值的平均Soc
+            Socmean2=(sum(DetaSoc2)-max(DetaSoc2)-min(DetaSoc2))/(len(DetaSoc2)-2)
+
+            #计算每个电芯的Soc差
+
+            DetaSoc3=DetaSoc1+DetaSoc2
+            for j in range(len(DetaSoc3)):
+                if j<10:
+                    Socmean=Socmean1
+                else:
+                    Socmean=Socmean2
+                DetaSoc.append(DetaSoc3[j]-Socmean)
+                # DetaSoc.append((DetaT[j]-Tmean)*9.5/(Capacity*3600))
+            df_DetaTime[time[ChgStartValid[i]]]=DetaSoc
+        #漏电流计算
+        column=[]
+        time1=[]
+        sn1=[]
+
+        for index, row in df_DetaTime.iteritems():
+            column.append(index) #提取列名称
+
+        for  i in range(1,len(column)):#计算漏电流值
+            df_DetaTime1[column[i]] = df_DetaTime.apply(lambda x: (x[column[i-1]] -  x[column[i]])*1000*Capacity*3600/((column[i]-column[i-1]).total_seconds()), axis=1)
+            time1.append(column[i])
+            sn1.append(sn)
+        df_detatime['time']=time1
+        df_detatime['sn']=sn1
+
+        for i in range(CellVoltNums):
+            cell=[]
+            for j in range(1,len(column)):
+                cell.append(df_DetaTime1[column[j]][i])
+            df_detatime['cell'+str(i+1)]=cell
+        return df_detatime
+    return pd.DataFrame()

+ 23 - 0
LIB/MIDDLE/LeakCurrent/LeakCurrent表头及数据类型.xlsx

@@ -0,0 +1,23 @@
+ąí͡	ĂűłĆ	ĘýžÝŔŕĐÍ
+time	time	timestamps
+SN	sn	str
+cell1	cell1	float64
+cell2	cell2	float64
+cell3	cell3	float64
+cell4	cell4	float64
+cell5	cell5	float64
+cell6	cell6	float64
+cell7	cell7	float64
+cell8	cell8	float64
+cell9	cell9	float64
+cell10	cell10	float64
+cell11	cell11	float64
+cell12	cell12	float64
+cell13	cell13	float64
+cell14	cell14	float64
+cell15	cell15	float64
+cell16	cell16	float64
+cell17	cell17	float64
+cell18	cell18	float64
+cell19	cell19	float64
+cell20	cell20	float64

+ 27 - 0
LIB/MIDDLE/LeakCurrent/main.py

@@ -0,0 +1,27 @@
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from LIB.MIDDLE import SignalMonitor
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import traceback
+import LFPLeakCurrent20210812 as LFPLeakCurrent
+
+from urllib import parse
+
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    SNdata_6060 = pd.read_excel('骑享资产梳理-20210621.xlsx', sheet_name='6060')
+    SNnums_6060=SNdata_6060['SN号']
+    now_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+    now_time=datetime.datetime.strptime(now_time,'%Y-%m-%d %H:%M:%S')
+    start_time=now_time-datetime.timedelta(days=31)
+    end_time=str(now_time)
+    start_time=str(start_time)
+
+    for sn in SNnums_6060.tolist():
+        res = LFPLeakCurrent.cal_LFPLeakCurrent(sn, end_time, start_time)
+        res.to_csv('BMS_LeakCurrent_'+sn+'.csv',encoding='GB18030')

+ 56 - 31
LIB/MIDDLE/SignalMonitor.py

@@ -1,15 +1,15 @@
 import datetime
 import datetime
-import os
+# import os
 import pandas as pd
 import pandas as pd
-import Tools
-import sys
-import xlutils
+# import Tools
+# import sys
+# import xlutils
 from xlrd import open_workbook
 from xlrd import open_workbook
 from xlutils.copy import copy
 from xlutils.copy import copy
 
 
-import CONFIGURE.PathSetting as PathSetting
-sys.path.append(PathSetting.backend_path)
-import DBManager
+# import CONFIGURE.PathSetting as PathSetting
+# sys.path.append(PathSetting.backend_path)
+from LIB.BACKEND import DBManager
 dbManager = DBManager.DBManager()
 dbManager = DBManager.DBManager()
 
 
 class SignalMonitor():
 class SignalMonitor():
@@ -75,7 +75,7 @@ class SignalMonitor():
         return df_state
         return df_state
     
     
     @staticmethod
     @staticmethod
-    def _judge_offline_state_between_messages(sn, PackState_new, PackState_old, Timestamp_new, Timestamp_old, df_res, mode):
+    def _judge_offline_state_between_messages(sn, PackState_new, PackState_old, Timestamp_new, Timestamp_old, lat, long, df_res, mode):
         delta_time = (Timestamp_new - Timestamp_old).total_seconds()
         delta_time = (Timestamp_new - Timestamp_old).total_seconds()
         max_state = max(PackState_new, PackState_old)
         max_state = max(PackState_new, PackState_old)
         if max_state == 0:
         if max_state == 0:
@@ -108,8 +108,12 @@ class SignalMonitor():
             LineState = 2
             LineState = 2
         
         
         if LineState > 0:
         if LineState > 0:
-            df_res = df_res.append({'sn':sn[0], 'PackState':PackState_new*16+PackState_old, 'LineState':LineState, 'StartTime':Timestamp_old, 
+            if mode == 'BMS':
+                df_res = df_res.append({'sn':sn[0], 'PackState':PackState_new*16+PackState_old, 'LineState':LineState, 'StartTime':Timestamp_old, 
                         'EndTime':Timestamp_new, 'OfflineTime':delta_time}, ignore_index=True)
                         'EndTime':Timestamp_new, 'OfflineTime':delta_time}, ignore_index=True)
+            elif mode == 'GPS':
+                df_res = df_res.append({'sn':sn[0], 'PackState':PackState_new*16+PackState_old, 'LineState':LineState, 'StartTime':Timestamp_old, 
+                    'EndTime':Timestamp_new, 'OfflineTime':delta_time, 'latitude':lat, 'longitude':long}, ignore_index=True)
         return LineState, df_res
         return LineState, df_res
 
 
     @staticmethod
     @staticmethod
@@ -119,17 +123,35 @@ class SignalMonitor():
             df_state.loc[0,'LineState'] = 0
             df_state.loc[0,'LineState'] = 0
             while index < len(df_state)-1:
             while index < len(df_state)-1:
                 index = index + 1
                 index = index + 1
-                LineState, df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[index, 'PackState'], df_state.loc[index-1, 'PackState'], 
-                df_state.loc[index, 'Timestamp'], df_state.loc[index-1, 'Timestamp'], df_res, mode=mode)
+                if mode == 'BMS':
+                    LineState, df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[index, 'PackState'], df_state.loc[index-1, 'PackState'], 
+                    df_state.loc[index, 'Timestamp'], df_state.loc[index-1, 'Timestamp'], None, None,
+                    df_res, mode=mode)
+                elif mode == 'GPS':
+                    LineState, df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[index, 'PackState'], df_state.loc[index-1, 'PackState'], 
+                    df_state.loc[index, 'Timestamp'], df_state.loc[index-1, 'Timestamp'], df_state.loc[index-1, 'latitude'], df_state.loc[index-1, 'longitude'],
+                    df_res, mode=mode)
                 df_state.loc[index, 'LineState'] = LineState
                 df_state.loc[index, 'LineState'] = LineState
         else:
         else:
             df_last_info = df_last_state.loc[len(df_last_state) - 1]
             df_last_info = df_last_state.loc[len(df_last_state) - 1]
-            df_state.loc[0,'LineState'], df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[0, 'PackState'], df_last_info['PackState'], 
-                df_state.loc[0, 'Timestamp'], df_last_info['Timestamp'], df_res, mode=mode)
+            if mode == 'BMS':
+                df_state.loc[0,'LineState'], df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[0, 'PackState'], df_last_info['PackState'], 
+                    df_state.loc[0, 'Timestamp'], df_last_info['Timestamp'], None, None,
+                    df_res, mode=mode)
+            elif mode == 'GPS':
+                df_state.loc[0,'LineState'], df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[0, 'PackState'], df_last_info['PackState'], 
+                    df_state.loc[0, 'Timestamp'], df_last_info['Timestamp'], df_state.loc[0, 'latitude'], df_state.loc[0, 'longitude'],
+                    df_res, mode=mode)
             while index < len(df_state)-1:
             while index < len(df_state)-1:
                 index = index + 1
                 index = index + 1
-                LineState, df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[index, 'PackState'], df_state.loc[index-1, 'PackState'], 
-                df_state.loc[index, 'Timestamp'], df_state.loc[index-1, 'Timestamp'], df_res, mode=mode)
+                if mode == 'BMS':
+                    LineState, df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[index, 'PackState'], df_state.loc[index-1, 'PackState'], 
+                    df_state.loc[index, 'Timestamp'], df_state.loc[index-1, 'Timestamp'], None, None,
+                    df_res, mode=mode)
+                elif mode == 'GPS':
+                    LineState, df_res = SignalMonitor._judge_offline_state_between_messages(sn, df_state.loc[index, 'PackState'], df_state.loc[index-1, 'PackState'], 
+                    df_state.loc[index, 'Timestamp'], df_state.loc[index-1, 'Timestamp'], df_state.loc[index-1, 'latitude'], df_state.loc[index-1, 'longitude'],
+                    df_res, mode=mode)
                 df_state.loc[index, 'LineState'] = LineState
                 df_state.loc[index, 'LineState'] = LineState
         # SignalMonitor._file_write(r'D:\result_03.xls', df_state)
         # SignalMonitor._file_write(r'D:\result_03.xls', df_state)
         return df_res
         return df_res
@@ -137,22 +159,22 @@ class SignalMonitor():
     @staticmethod
     @staticmethod
     def _set_gps_working_states(df_state, df_state_gps):
     def _set_gps_working_states(df_state, df_state_gps):
         for i in range(0, len(df_state_gps)):
         for i in range(0, len(df_state_gps)):
-                if df_state_gps.loc[i, 'Timestamp'] <= df_state.loc[0, 'Timestamp']:
-                    df_state_gps.loc[i, 'PackState'] = df_state.loc[0, 'PackState']
-                elif df_state_gps.loc[i, 'Timestamp'] >= df_state.loc[len(df_state)-1, 'Timestamp']:
-                    df_state_gps.loc[i:len(df_state_gps)-1, 'PackState'] = df_state.loc[len(df_state)-1, 'PackState']
-                    break
+            if df_state_gps.loc[i, 'Timestamp'] <= df_state.loc[0, 'Timestamp']:
+                df_state_gps.loc[i, 'PackState'] = df_state.loc[0, 'PackState']
+            elif df_state_gps.loc[i, 'Timestamp'] >= df_state.loc[len(df_state)-1, 'Timestamp']:
+                df_state_gps.loc[i:len(df_state_gps)-1, 'PackState'] = df_state.loc[len(df_state)-1, 'PackState']
+                break
+            else:
+                index0 = max(df_state[df_state['Timestamp'] <= df_state_gps.loc[i, 'Timestamp']].index)
+                index1 = min(df_state[df_state['Timestamp'] >= df_state_gps.loc[i, 'Timestamp']].index)
+                front = (df_state_gps.loc[i, 'Timestamp'] - df_state.loc[index0, 'Timestamp']).total_seconds()
+                back = (df_state.loc[index1, 'Timestamp'] - df_state_gps.loc[i, 'Timestamp']).total_seconds()
+                if front > back:
+                    df_state_gps.loc[i, 'PackState'] = df_state.loc[index1, 'PackState']
+                elif front == back:
+                    df_state_gps.loc[i, 'PackState'] = max(df_state.loc[index1, 'PackState'], df_state.loc[index0, 'PackState'])
                 else:
                 else:
-                    index0 = max(df_state[df_state['Timestamp'] <= df_state_gps.loc[i, 'Timestamp']].index)
-                    index1 = min(df_state[df_state['Timestamp'] >= df_state_gps.loc[i, 'Timestamp']].index)
-                    front = (df_state_gps.loc[i, 'Timestamp'] - df_state.loc[index0, 'Timestamp']).total_seconds()
-                    back = (df_state.loc[index1, 'Timestamp'] - df_state_gps.loc[i, 'Timestamp']).total_seconds()
-                    if front > back:
-                        df_state_gps.loc[i, 'PackState'] = df_state.loc[index1, 'PackState']
-                    elif front == back:
-                        df_state_gps.loc[i, 'PackState'] = max(df_state.loc[index1, 'PackState'], df_state.loc[index0, 'PackState'])
-                    else:
-                        df_state_gps.loc[i, 'PackState'] = df_state.loc[index0, 'PackState']
+                    df_state_gps.loc[i, 'PackState'] = df_state.loc[index0, 'PackState']
         return df_state_gps
         return df_state_gps
     
     
     @staticmethod
     @staticmethod
@@ -209,7 +231,7 @@ class SignalMonitor():
         return df_res,df_state, df_last_state
         return df_res,df_state, df_last_state
     
     
     def get_gps_offline_stat(self,sn, st, et, df_state, df_res_gps, df_last_state_gps, cal_Period=24):    # 计算一段时间内GPS信号统计数据
     def get_gps_offline_stat(self,sn, st, et, df_state, df_res_gps, df_last_state_gps, cal_Period=24):    # 计算一段时间内GPS信号统计数据
-        df_state_gps = pd.DataFrame(columns=['sn', 'Timestamp', 'PackState', 'LineState'])
+        df_state_gps = pd.DataFrame(columns=['sn', 'Timestamp', 'PackState', 'LineState', 'latitude', 'longitude'])
         # print("start_time is {}, limit_time is {}".format(st, limit_time))
         # print("start_time is {}, limit_time is {}".format(st, limit_time))
         end_time = st + datetime.timedelta(hours=cal_Period)    # 结束时间
         end_time = st + datetime.timedelta(hours=cal_Period)    # 结束时间
         start_time_str = st.strftime('%Y-%m-%d %H:%M:%S')
         start_time_str = st.strftime('%Y-%m-%d %H:%M:%S')
@@ -223,6 +245,9 @@ class SignalMonitor():
 
 
         df_state_gps['Timestamp'] = df_gps['时间戳']
         df_state_gps['Timestamp'] = df_gps['时间戳']
         df_state_gps['sn'] = sn[0]
         df_state_gps['sn'] = sn[0]
+        df_state_gps['latitude'] = df_gps['纬度']
+        df_state_gps['longitude'] = df_gps['经度']
+
 
 
         if len(df_state_gps) > 0:    # 无数据则不计算    
         if len(df_state_gps) > 0:    # 无数据则不计算    
             df_state_gps = SignalMonitor._set_gps_working_states(df_state, df_state_gps)    # 根据同时间段内BMS状态计算GPS数据对应的BMS状态
             df_state_gps = SignalMonitor._set_gps_working_states(df_state, df_state_gps)    # 根据同时间段内BMS状态计算GPS数据对应的BMS状态

+ 125 - 0
LIB/MIDDLE/odo/CalDist.py

@@ -0,0 +1,125 @@
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+
+from LIB.BACKEND import DBManager
+#####################################配置环境分割线#################################################
+
+def GetDistInfo(input_sn,input_starttime,input_endtime):
+
+    #####################################配置参数分割线#################################################
+    dbManager = DBManager.DBManager()
+    data_raw = dbManager.get_data(sn=input_sn, start_time=input_starttime, 
+        end_time=input_endtime)
+    #拆包预处理
+    df_bms_raw=data_raw['bms']
+    df_gps_raw=data_raw['gps']
+    df_bms=preprocess_Df_Bms(df_bms_raw)
+    df_gps=preprocess_Df_Gps(df_gps_raw)
+    
+    #####################################数据预处理分割线#################################################
+
+    # mode: 0:正常取数; 1:7255 取数
+    if input_sn[0:2] == 'UD' or input_sn[0:2] == 'MG':
+        mode = 1
+    else:
+        mode = 0
+    #获取状态表,mode默认为0,mode=1放电时电流为负,mode=0充电时电流为正
+
+    df_bms_drive_timetable=get_bms_drive_timetable(df_bms,mode)
+    df_gps_drive_cycle_accum=pd.DataFrame()
+    if len(df_bms_drive_timetable)>0:
+        for index in range(len(df_bms_drive_timetable)):
+            #筛选drivecycle数据
+            drive_start_time=df_bms_drive_timetable.loc[index,'drive_start_time']#开始时间
+            drive_end_time=df_bms_drive_timetable.loc[index,'drive_end_time']#结束时间
+
+            time_condition=(df_gps['time']>drive_start_time)&(df_gps['time']<drive_end_time)#时间判断条件
+            df_gps_drive_cycle=df_gps.loc[time_condition,:].copy()
+            df_gps_drive_cycle=df_gps_drive_cycle.reset_index(drop=True)#重置index
+            #计算drivecycle GPS累计里程,存入表格
+            condition_a=df_gps_drive_cycle['deltatime']>60*3
+            condition_b=(df_gps_drive_cycle['deltatime']>90*1)&(df_gps_drive_cycle['distance']>1000)
+            drive_cycle_dist_array=df_gps_drive_cycle.loc[~(condition_a|condition_b),'distance'].values
+            drive_cycle_dist_array=drive_cycle_dist_array[np.where((drive_cycle_dist_array>=1)&(drive_cycle_dist_array<1000))[0]]
+            gps_dist=drive_cycle_dist_array.sum()
+            df_bms_drive_timetable.loc[index,'gps_dist']=gps_dist#得到GPS路径
+            #计算头-尾的空缺时间段对应的预估SOC
+            if len(df_gps_drive_cycle)>2:
+                gps_starttime=df_gps_drive_cycle.loc[1,'time']#gps开始时间
+                gps_endtime=df_gps_drive_cycle.loc[df_gps_drive_cycle.index[-1],'time']#gps结束时间
+                #从drive_start_time到gps开始时间,使用SOC计算的里程
+                #gps结束时间到drive_end_time,使用SOC计算的里程
+                unrecorded_odo_head=cal_deltasoc(df_bms,drive_start_time,gps_starttime)
+                unrecorded_odo_tail=cal_deltasoc(df_bms,gps_endtime,drive_end_time)
+            else:
+                #计算数据丢失行unrecordeodo
+                unrecorded_odo_head=cal_deltasoc(df_bms,drive_start_time,drive_end_time)
+                unrecorded_odo_tail=0
+            #计算中间的预估SOC
+            predict_dist=cal_unrecorded_gps(df_gps_drive_cycle,df_bms)
+            #计算总的预估SOC
+            totaldist=predict_dist+unrecorded_odo_head+ unrecorded_odo_tail#得到GPS路径
+            df_bms_drive_timetable.loc[index,'predict_dist']=totaldist
+    else :
+        pass
+
+    #####################################统计行驶里程End#################################################
+    #打印输出结果#
+    index_list=list(range(len(df_bms_drive_timetable)))
+
+    dist_gps=0
+    dist_predict=0
+    day_start_time=''#当日开始时间
+    day_end_time=''#当日结束时间
+    day_start_soc=0#当日开始soc
+    day_end_soc=0#当日结束soc
+    day_min_soc=101#当日最低soc
+    drive_accum_soc=0#累计使用SOC
+
+    if len(df_bms_drive_timetable)>0:
+        #开始行
+        day_start_soc=df_bms_drive_timetable.loc[1,'drive_start_soc']#开始soc
+        day_start_time=df_bms_drive_timetable.loc[1,'drive_start_time']#开始时间
+        #结束行
+        day_end_time=df_bms_drive_timetable.loc[len(df_bms_drive_timetable)-1,'drive_end_time']#结束时间
+        day_end_soc=df_bms_drive_timetable.loc[len(df_bms_drive_timetable)-1,'drive_end_soc']#结束soc
+
+    for index in index_list:
+        '''汇总里程'''
+        dist_gps+=df_bms_drive_timetable.loc[index,'gps_dist']/1000#计算GPS里程
+        dist_predict+=df_bms_drive_timetable.loc[index,'predict_dist']#计算预估里程
+        drive_start_soc=df_bms_drive_timetable.loc[index,'drive_start_soc']#驾驶周期开始的soc
+        drive_end_soc=df_bms_drive_timetable.loc[index,'drive_end_soc']#驾驶周期结束的soc
+        day_min_soc=min(day_min_soc,drive_start_soc,drive_end_soc)
+
+        delta_soc=drive_start_soc-drive_end_soc#驾驶周期SOC变化量
+        drive_accum_soc+=abs(delta_soc)#所有drive cycle累计消耗的soc
+
+    # gps_score=get_df_gps_score(input_starttime,input_endtime,df_gps)
+    # gps_score=round(gps_score,1)
+    #计算总里程
+    dist_gps=round(dist_gps,3)
+    dist_predict=round(dist_predict,3)
+    dist_all=round(dist_gps+dist_predict,3)
+    #输出统计结果
+    # print ('为您查询到,从'+input_starttime+'到'+input_endtime+'时间段内:')
+    # print('SOC变化量:'+str(df_bms['bmspacksoc'].max()-df_bms['bmspacksoc'].min())+' %')
+    # print('行驶总里程:'+str(dist_all)+' km')
+
+    return {'SN':input_sn,'range':dist_all,'accum_soc':drive_accum_soc,'day_start_soc':day_start_soc,
+    'day_end_soc':day_end_soc,'day_start_time':day_start_time,'day_end_time':day_end_time,
+    'day_min_soc':day_min_soc}
+    # print('其中GPS信号在线时里程:'+str(dist_gps)+' km')
+    # print('其中GPS信号掉线时预估里程:'+str(dist_predict)+' km')
+    # print('GPS信号质量评分为:'+str(gps_score),'分\n')
+
+    #####################################打印结果End#################################################
+
+

+ 68 - 0
LIB/MIDDLE/odo/CalDist_Batch.py

@@ -0,0 +1,68 @@
+from math import radians, cos, sin, asin, sqrt
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+from GpsRank import *
+from ProcessDfBms import *
+from ProcessDfGps import *
+from CalDist import *
+from LIB.BACKEND import DBManager
+import pdb
+
+asset_table_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\asset_table.xlsx'
+drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\drive_info.xlsx'
+asset_sheet_num=1
+usecols_list=[4,5]
+
+asset_table=pd.read_excel(asset_table_path,sheet_name=asset_sheet_num,skiprows=1,usecols=usecols_list)
+SN_list=asset_table['SN号'].values.tolist()
+print('从6060sheet读取到:'+str(len(SN_list))+'行')
+asset_table=asset_table.rename(columns={'SN号':'SN','状态':'state'})
+
+asset_table.set_index(["SN"],inplace=True)
+col_name=asset_table.columns.tolist()
+col_name.extend(['range','accum_soc','day_start_soc','day_end_soc','day_start_time','day_end_time'])
+asset_table=asset_table.reindex(columns=col_name)
+
+start_hour='00:00:00'#每日查询最早时间
+end_hour='23:59:00'#每日查询最晚时间
+
+
+date_index=pd.date_range('2021-07-31','2021-07-31')
+for date in date_index:
+    '''遍历日期'''
+
+    str_date=str(date)[:10]
+    input_starttime=str_date+' '+start_hour
+    input_endtime=str_date+' '+end_hour
+    test_day=str_date[5:10]#月-日,用于建立sheet
+    drive_info_path='D:\\work\\Qixiang\\data_analyze_platform\\pengmin\\AllCarDist\\6060\\drive_info'+test_day+'_50_end_'+'.xlsx'
+
+    print(input_starttime)
+
+    drive_info_aday=pd.DataFrame()
+    SN_list_short=SN_list#先选择了0:50,50:end
+
+    for SN in SN_list_short:
+        '''遍历SN号'''
+        SN=SN.strip('\t')
+        SN=SN.strip('\n')
+
+        try:
+            range=GetDistInfo(SN,input_starttime,input_endtime)
+            range_df=pd.DataFrame([range])
+            drive_info_aday=pd.concat([drive_info_aday,range_df],axis=0)
+
+        except:
+            print(SN+' '+test_day+'fail')
+        else:
+            pass
+            #print(SN+' '+test_day+'success')
+
+    drive_info_aday.to_excel(drive_info_path,sheet_name=test_day)#sheet名称为testday
+    
+    
+
+

+ 77 - 0
LIB/MIDDLE/odo/GpsRank.py

@@ -0,0 +1,77 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+def cal_gps_score(df):
+    '''在获取信号,优、良、合格、掉线的比例之后,计算gps的总评分'''
+    score=0
+    for index in range(len(df)):
+        time_percent=df.loc[index,'累计时间占比']
+        if df.loc[index,'GPS质量']=='优':
+            score+=time_percent*0
+        elif df.loc[index,'GPS质量']=='良':
+            score+=time_percent*0.3
+        elif df.loc[index,'GPS质量']=='合格':
+            score+=time_percent*0.5
+        elif df.loc[index,'GPS质量']=='掉线':
+            score+=time_percent*1
+    return (1-score)*100
+
+def gps_rank(df_gps_signal_table,df_gps,signal_rank,dist_factor):
+    '''gps信号质量分析函数,需要输入表格,df_gps,信号等级,权重'''
+    gps_signal_condition=(df_gps['gps_signal']==signal_rank)
+    dist=df_gps.loc[gps_signal_condition,'distance'].values.sum()
+    deltatime=df_gps.loc[gps_signal_condition,'deltatime'].values.sum()
+    df_gps_signal_table_condition=(df_gps_signal_table['gps_signal']==signal_rank)
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_distance']=dist/1000
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_deltatime']=deltatime
+    df_gps_signal_table.loc[df_gps_signal_table_condition,'accum_distance_factor']=dist/1000*dist_factor
+    return df_gps_signal_table
+
+def get_df_gps_score(starttime,endtime,df_gps):
+    '''对df_gps中的gps质量进行评分,返回一个数值'''
+    test_start_time=starttime#'2021-05-29 17:16:39'
+    test_end_time=endtime#'2021-05-29 20:08:08'
+
+    test_time_condition=(df_gps['time']>test_start_time)&(df_gps['time']<test_end_time)
+    df_gps_test=df_gps.loc[test_time_condition,:].copy()
+    df_gps_test=df_gps_test.reset_index(drop=True)#重置index
+    #按照deltatime打标签
+    gps_deltatime_bins=[0,30,60,120,10000]#优-良-合格-掉线
+    name=['优','良','合格','掉线']
+    df_gps_test['gps_signal']=pd.cut(df_gps_test['deltatime'], gps_deltatime_bins,labels=name)
+    df_gps_test['gps_signal'].value_counts()
+    #声明一个gps信号按类别统计table
+    df_gps_signal_table=pd.DataFrame()
+    df_gps_signal_table['gps_signal']=df_gps_test['gps_signal'].value_counts().index.tolist()
+    df_gps_signal_table['num']=df_gps_test['gps_signal'].value_counts().values.tolist()
+
+    #分类进行统计
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'优',1.00)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'良',1.05)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'合格',1.2)
+    df_gps_signal_table=gps_rank(df_gps_signal_table,df_gps_test,'掉线',1)
+
+    #次数占比,时间占比
+    all_num=df_gps_signal_table['num'].sum()
+    df_gps_signal_table['num_percent']=df_gps_signal_table['num']/all_num
+    all_accum_deltatime=df_gps_signal_table['accum_deltatime'].sum()
+    df_gps_signal_table['accum_deltatime_percent']=df_gps_signal_table['accum_deltatime']/all_accum_deltatime
+
+    #选择参数
+    df_gps_signal_table=df_gps_signal_table[['gps_signal','num','num_percent','accum_distance',
+                                            'accum_distance_factor','accum_deltatime','accum_deltatime_percent']]
+    df_gps_signal_table=df_gps_signal_table.rename(columns={'gps_signal':'GPS质量','num':'数量','num_percent':'数量占比',
+                                                        'accum_distance':'累计里程','accum_distance_factor':'累计里程修正值',
+                                                        'accum_deltatime':'累计时间','accum_deltatime_percent':'累计时间占比'})
+
+    df_gps_signal_table.loc[:,['GPS质量','累计时间','累计时间占比']]
+    gps_score=cal_gps_score(df_gps_signal_table)#调用函数计算gps评分
+    
+    #输出结果,评分
+    #print('From '+test_start_time+'  to '+test_end_time)
+    #print('GPS信号质量评分:'+str(gps_score))
+
+    return gps_score
+

+ 159 - 0
LIB/MIDDLE/odo/ProcessDfBms.py

@@ -0,0 +1,159 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+
+def get_bms_drive_timetable(df_bms,battery_mode):
+    '''对df_bms进行处理,得到行车的时间表。'''
+
+    #####################step1 先使用电流做充电状态的判断#############################################
+    if battery_mode==0:#mode=0,电流为正代表放电
+        condition_chrg=df_bms['bmspackcrnt']<0##根据电流,挑选充电状态
+        df_bms.loc[condition_chrg,'bscsta']='chrg'
+        condition_drive=df_bms['bmspackcrnt']>0.01##根据电流,挑选行驶状态
+        df_bms.loc[condition_drive,'bscsta']='drive'
+        df_bms.loc[~(condition_drive|condition_chrg),'bscsta']='idle'#静置状态
+    else :#mode=1,电流为负代表放电
+        condition_chrg=df_bms['bmspackcrnt']>0##根据电流,挑选充电状态
+        df_bms.loc[condition_chrg,'bscsta']='chrg'
+        condition_drive=df_bms['bmspackcrnt']<-0.01##根据电流,挑选行驶状态
+        df_bms.loc[condition_drive,'bscsta']='drive'
+        df_bms.loc[~(condition_drive|condition_chrg),'bscsta']='idle'#静置状态
+
+    #####################step2 对drive进行debounce,进入时立即进入,退出时debounce,5分钟。##########
+    index=0
+    debounce_row=10#debounce判断持续10行
+    debounce_time=300#debounce判断持续300秒
+    #对上一步初步状态进行二次处理
+    while index<(len(df_bms)-debounce_row):
+        mode_0=df_bms.loc[index,'bscsta']
+        mode_1=df_bms.loc[index+1,'bscsta']
+        #如果发现了边界行,则进行debounce判断
+        if (mode_0=='drive')&(mode_1!='drive'):#如果从drive变为idle
+            accum_subtime=0#累计时间初始化
+
+            for sub_index in range(debounce_row):#往下处理10行
+                sub_time=df_bms.loc[index+sub_index,'deltatime']
+                accum_subtime+=sub_time
+                #如果累计时间不到300秒,则设置为drive
+                if accum_subtime<debounce_time:
+                    df_bms.loc[index+sub_index,'bscsta']='drive'
+            index=index+debounce_row#处理10行以后的数据
+        #如果从idle变为drivemode,则将idle变为drive,包容前一行
+        elif (mode_0!='drive')&(mode_1=='drive'): 
+            df_bms.loc[index,'bscsta']='drive'
+            index=index+1
+        else:
+            index=index+1
+
+
+    #######################step3 对drivemode的时间进行分段###########################################
+    not_drive_flg=0#初始化
+    #输出drivemode的时间段,包含开始时间、结束时间
+    df_bms_drive_timetable_index=0
+    df_bms_drive_timetable=pd.DataFrame([],columns={'drive_start_time','drive_end_time',
+                                                    'gps_dist','predict_dist','drive_start_soc','drive_end_soc'})
+    for index in range(len(df_bms)):
+        temp_bscsta=df_bms.loc[index,'bscsta']
+        
+        if (temp_bscsta=='drive')&(not_drive_flg==0):
+            drive_start_time=df_bms.loc[index,'time']
+            not_drive_flg=1
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_start_time']=drive_start_time
+            #startsoc
+            drive_start_soc=df_bms.loc[index,'bmspacksoc']
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_start_soc']=drive_start_soc
+
+        elif (temp_bscsta!='drive')&(not_drive_flg==1):
+            drive_end_time=df_bms.loc[index,'time']
+            not_drive_flg=0
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_end_time']=drive_end_time
+            #endsoc
+            drive_end_soc=df_bms.loc[index,'bmspacksoc']
+            df_bms_drive_timetable.loc[df_bms_drive_timetable_index,'drive_end_soc']=drive_end_soc
+            df_bms_drive_timetable_index+=1#index++
+
+    #删除时间信息不齐全的行
+    df_bms_drive_timetable=df_bms_drive_timetable.dropna(subset=['drive_end_time','drive_start_time'])
+    
+    return df_bms_drive_timetable
+
+
+def read_df_bms(path):
+    '''从路径中读取df_bms,进行预处理'''
+    df_bms=pd.read_csv(path, encoding='gbk')#编码方式gbk
+    #筛选表头,重命名
+    bms_columns=['时间戳','总电流[A]','总电压[V]','SOC[%]']
+    df_bms=df_bms.loc[:,bms_columns].copy()
+    df_bms.rename(columns = {"时间戳": "time", "总电流[A]": "bmspackcrnt", 
+                             "总电压[V]": "bmspackvol", "SOC[%]": "bmspacksoc"},inplace=True)#表头替换
+    #时间格式调整
+    df_bms['time']=df_bms['time'].apply(lambda x:datetime.strptime(x,'%Y-%m-%d %H:%M:%S'))#时间格式调整
+    #进行预处理
+    df_bms=df_add_deltatime(df_bms)#增加deltatime列 
+    return df_bms
+
+def preprocess_Df_Bms(df_bms):
+    '''对获得的df_bms,进行预处理'''
+    #筛选表头,重命名
+    bms_columns=['时间戳','总电流[A]','总电压[V]','SOC[%]']
+    df_bms=df_bms.loc[:,bms_columns].copy()
+    df_bms.rename(columns = {"时间戳": "time", "总电流[A]": "bmspackcrnt", 
+                             "总电压[V]": "bmspackvol", "SOC[%]": "bmspacksoc"},inplace=True)#表头替换
+    #删除空行
+    df_bms=df_bms.dropna(subset=['time'])
+    #删除时间重复的行,保留第一次出现的行
+    df_bms=df_bms.drop_duplicates(subset=['time'],keep='first')
+    #时间格式调整
+    df_bms['time']=df_bms['time'].apply(lambda x:datetime.strptime(x,'%Y-%m-%d %H:%M:%S'))#时间格式调整
+    #进行预处理
+    df_bms=df_add_deltatime(df_bms)#增加deltatime列 
+    return df_bms
+
+
+def df_add_deltatime(df_in):
+    '''Add a columns:deltatime,input df must have time column.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'deltatime']=0
+        #从第二行开始,计算i行到i-1行,GPS距离之差
+        else:
+            time1=df_in.loc[i-1,'time']
+            time2=df_in.loc[i,'time']
+            deltatime=time_interval(time1,time2)#计算时间差,返回单位为秒。
+            df_in.loc[i,'deltatime']=deltatime
+    return df_in
+
+
+def time_interval(time1,time2):
+    """
+    Calculate the time interval between two times,
+    return the seconds
+    """
+    deltatime=time2-time1
+    return deltatime.seconds
+
+
+def cal_deltasoc(df_bms,start_time,end_time):
+    '''输入开始时间和结束时间,返回deltasoc,此处将deltasoc*1既等效为unrecorded_odo.'''
+    time_condition=(df_bms['time']>start_time)&(df_bms['time']<end_time)
+    df_bms_sub=df_bms.loc[time_condition,:].copy()
+    if len(df_bms_sub)>=2:
+        
+        df_bms_head=df_bms_sub.head(1).copy()#首行
+        df_bms_startsoc=df_bms_head['bmspacksoc'].values[0]
+        df_bms_tail=df_bms_sub.tail(1).copy()#尾行
+        df_bms_endsoc=df_bms_tail['bmspacksoc'].values[0]
+        delta_soc=df_bms_startsoc-df_bms_endsoc
+        
+        if delta_soc>0:
+            #如果df_bms出现时间不连续,则先计算deltasoc,deltasoc每变化1,续驶里程增加1,
+            unrecorded_odo=delta_soc*1
+            #print('From '+str(start_time)+' to  '+str(end_time)+' soc decrease:  '+str(delta_soc))
+        else:
+            unrecorded_odo=0#如果deltasoc不大于0,说明在充电,或者静置不动    
+    #如果行数少于2,无法计算
+    else:
+        unrecorded_odo=0
+    return unrecorded_odo

+ 139 - 0
LIB/MIDDLE/odo/ProcessDfGps.py

@@ -0,0 +1,139 @@
+import pandas as pd
+import numpy as np
+from datetime import datetime
+from datetime import timedelta
+from ProcessDfBms import *
+from math import radians, cos, sin, asin, sqrt
+
+def cal_unrecorded_gps(df_in,df_bms):
+    '''筛选出现gps时间断点的数据,用df_bms数据补齐,df_in为df_gps表格。'''
+    #未记录到的odo总和
+    accum_unrecorded_odo=0
+
+    #设置丢失的判断条件,获得信息丢失行的index
+    condition1=df_in['deltatime']>60*3#时间间隔大于3分钟。说明数据掉线了。
+    condition2=(df_in['deltatime']>90*1)&(df_in['distance']>1000)#时间间隔大于*分钟,且Distance间隔大于*,代表掉线了。
+    signal_start_list=df_in.loc[condition1|condition2,:].index.to_list()#信息丢失行
+    #如果第0行属于信息丢失行,则删除,因为需要index-1行
+    try:
+        signal_start_list.remove(0)
+    except:
+        pass
+    else:
+        pass
+    #筛选出所有GPS信号丢失,对应的开始时间-结束时间对。
+    if len(signal_start_list)>0:
+        signal_end_list=[num-1 for num in signal_start_list]#信息丢失行的前一行,此处可能如果是首行,可能会有bug。
+        pick_gps_list=[0]+signal_start_list+signal_end_list+[len(df_in)-1]#首行+尾行+信号开始行+信号结束行
+        pick_gps_list=sorted(pick_gps_list)#重新排序
+
+    #有出现信号断点的行,则进行以下计算。
+    if len(signal_start_list)>0:
+        #针对每个时间对,计算unrecorded odo
+        for start_time_index,end_time_index in zip(signal_start_list,signal_end_list):
+            last_end_time=df_in.loc[end_time_index,'time']
+            this_start_time=df_in.loc[start_time_index,'time']
+            #print('gps signal loss from: '+str(last_end_time)+'-to-'+str(this_start_time))
+            #使用cal_delatasoc计算预估里程
+            unrecorded_odo=cal_deltasoc(df_bms,last_end_time,this_start_time)
+            accum_unrecorded_odo+=unrecorded_odo
+        #print('accum_unrecorded_odo:'+str(accum_unrecorded_odo))
+    else:
+        pass
+    
+    return accum_unrecorded_odo
+
+
+def df_add_avgspeed(df_in):
+    '''Add a columns:avgspeed ,input df must have deltatime,distance column.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'avgspeed']=0
+        #从第二行开始,计算平均速度
+        else:
+            deltatime=df_in.loc[i,'deltatime']
+            distance=df_in.loc[i,'distance']
+            avgspeed=(distance/1000)/(deltatime/3600)
+            df_in.loc[i,'avgspeed']=avgspeed
+    return df_in
+
+
+def read_df_gps(path):
+    df_gps=pd.read_csv(path, encoding='gbk')#编码方式gbk
+    #重置表头
+    df_gps.rename(columns = {"时间戳": "time", "纬度":"lat", "经度":"lng", 
+                             "卫星数":"sat_num", "海拔m":"height","速度[km/h]":"speed"},  inplace=True)
+    #时间格式调整
+    df_gps['time']=pd.to_datetime(df_gps['time'])
+    #对gps进行清洗
+    df_gps=df_add_distance(df_gps)#增加distance列
+    condition=df_gps['distance']<20000#删除GPS漂移过远的点,可能为GPS错误值
+    df_gps=df_gps.loc[condition,:].copy()#删除condition中,avgspd过大的部分,很可能伴随着GPS的漂移。
+    df_gps=df_gps.reset_index(drop=True)#重置index
+    #进行预处理
+    df_gps=df_add_distance(df_gps)#增加distance列,再算一次distance
+    df_gps=df_add_deltatime(df_gps)#增加deltatime列
+    df_gps=df_add_avgspeed(df_gps)#增加avgspeed列
+
+    #df_gps.to_excel('df_gps.xlsx',sheet_name='Sheet1')
+    return df_gps
+
+def preprocess_Df_Gps(df_gps):
+    '''对Df_Gps进行预处理'''
+    #重置表头
+    df_gps.rename(columns = {"时间戳": "time", "纬度":"lat", "经度":"lng", 
+                             "卫星数":"sat_num", "海拔m":"height","速度[km/h]":"speed"},  inplace=True)
+    #删除含有空数据的行
+    df_gps=df_gps.dropna(subset=['time','lat','lng'])
+    #删除时间重复的行,保留第一次出现的行
+    df_gps=df_gps.drop_duplicates(subset=['time'],keep='first')
+    #时间格式调整
+    df_gps['time']=pd.to_datetime(df_gps['time'])
+    
+    #对gps进行清洗
+    df_gps=df_add_distance(df_gps)#增加distance列
+    condition=df_gps['distance']<20000#删除GPS漂移过远的点,可能为GPS错误值
+    df_gps=df_gps.loc[condition,:].copy()#删除condition中,avgspd过大的部分,很可能伴随着GPS的漂移。
+    df_gps=df_gps.reset_index(drop=True)#重置index
+    #进行预处理
+    df_gps=df_add_distance(df_gps)#增加distance列,再算一次distance
+    df_gps=df_add_deltatime(df_gps)#增加deltatime列
+    df_gps=df_gps.loc[df_gps['deltatime']>0.01,:].copy()#删除deltatime=0的列,两个时间戳相同,无法求速度。
+    df_gps=df_add_avgspeed(df_gps)#增加avgspeed列
+
+    #df_gps.to_excel('df_gps.xlsx',sheet_name='Sheet1')
+    return df_gps
+
+
+def df_add_distance(df_in):
+    '''Add a columns:distance,input df must have lng,lat columns.'''
+    for i in range(len(df_in)):
+        #首行默认为0
+        if i==0:
+            df_in.loc[i,'distance']=0
+        #从第二行开始,计算i行到i-1行,GPS距离之差
+        else:
+            lon1=df_in.loc[i-1,'lng']
+            lat1=df_in.loc[i-1,'lat']
+            lon2=df_in.loc[i,'lng']
+            lat2=df_in.loc[i,'lat']
+            distance=haversine(lon1,lat1,lon2,lat2)#haversine公式计算距离差
+            df_in.loc[i,'distance']=distance    
+    return df_in
+
+
+def haversine(lon1, lat1, lon2, lat2):
+    """
+    Calculate the great circle distance between two points 
+    on the earth (specified in decimal degrees)
+    """
+    # 将十进制度数转化为弧度
+    lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
+    # haversine公式
+    dlon = lon2 - lon1 
+    dlat = lat2 - lat1 
+    a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
+    c = 2 * asin(sqrt(a)) 
+    r = 6371 # 地球平均半径,单位为公里
+    return c * r * 1000

+ 293 - 0
LIB/MIDDLE/odo/UpdtFct.py

@@ -0,0 +1,293 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+
+#建立引擎
+engine = create_engine(str(r"mysql+mysqldb://%s:" + '%s' + "@%s/%s") % ('root', 'pengmin', 'localhost', 'qixiangdb'))
+
+conn_qx = pymysql.connect(
+        host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+        user='qx_cas',
+        password='Qx@123456',#Qx@123456
+        database='qx_cas',
+        charset='utf8'
+    )
+
+conn_local = pymysql.connect(
+        host='localhost',
+        user='root',
+        password='pengmin',
+        database='qixiangdb',
+        charset='utf8'
+    )
+
+def getNextSoc(start_soc):
+    '''输入当前的soc,寻找目标soc函数'''
+    if start_soc>80:
+        next_soc=80
+    elif start_soc>60:
+        next_soc=60
+    elif start_soc>40:
+        next_soc=40
+    elif start_soc>20:
+        next_soc=20
+    else:
+        next_soc=1
+    return next_soc
+
+def updtSnFct(sn_factor_df,end_soc,delta_range,range_soc):
+    '''输入当前的soc区间段,里程变量量,soc变化量,输出新的df
+    sn_factor_df为dataframe,delta_range单位为km,range_soc单位为km/persoc'''
+    if end_soc==80:
+        updtFctByCol(sn_factor_df,'a0',delta_range,range_soc)
+    elif end_soc==60:
+        updtFctByCol(sn_factor_df,'a1',delta_range,range_soc)
+    elif end_soc==40:
+        updtFctByCol(sn_factor_df,'a2',delta_range,range_soc)
+    elif end_soc==20:
+        updtFctByCol(sn_factor_df,'a3',delta_range,range_soc)
+    elif end_soc<20:
+        updtFctByCol(sn_factor_df,'a4',delta_range,range_soc)
+    return sn_factor_df
+
+def updtFctByCol(sn_factor_df,colmun_name,delta_range,range_soc):
+    '''更新制定列的factor,sn_factor_df为dataframe,新的系数更新到第一行。delta_range单位为km,
+    range_soc单位为km/persoc,默认按照100km更新续驶里程权重'''
+    range_soc_old=sn_factor_df.loc[0,colmun_name]#读取第0行的老factor
+    debounce_range=200#更新权重
+    new_factor=range_soc*((delta_range)/debounce_range)+range_soc_old*(1-(delta_range)/debounce_range)
+    #在第1行,存储新的factor
+    sn_factor_df.loc[1,colmun_name]=new_factor
+    return sn_factor_df
+
+def updtTodayFct(factor_input,sn_day_df):
+    '''更新今日的Factor***'''
+    sn_factor_df_last=factor_input
+    start_soc=sn_day_df.loc[0,'soc']
+    next_soc=getNextSoc(start_soc)
+    start_range=sn_day_df.loc[0,'vehodo']
+    sn=sn_day_df.loc[0,'name']
+
+    for index in range(len(sn_day_df)-1):
+    #寻找分割点,
+        index_soc=sn_day_df.loc[index,'soc']#当前行soc
+        next_index_soc=sn_day_df.loc[index+1,'soc']#下一行soc
+
+        if (index_soc>=next_soc)&(next_index_soc<next_soc):#当前行高,下一行低
+            delta_soc_tonext=start_soc-next_soc#两个距离点的soc差,单位为%
+            delta_range_tonext=sn_day_df.loc[index,'vehodo']-start_range#两个时间点的距离差,单位为m
+            delta_range_tonext_km=delta_range_tonext/1000#两个时间点的距离差,单位为km
+            range_soc_tonext=(delta_range_tonext/1000)/delta_soc_tonext#单位soc可行驶的公里数
+            print(sn+'start_soc: '+str(start_soc),'next_soc: '+str(next_soc),'delta_vehodo; '+str(round(delta_range_tonext_km,3))
+            +'km'+' range_soc:'+str(round(range_soc_tonext,3)))
+
+            if (delta_range_tonext_km)>1:
+                sn_factor_df_last=updtSnFct(sn_factor_df_last,next_soc,delta_range_tonext_km,range_soc_tonext)
+            
+            start_soc=next_index_soc#变更开始soc
+            next_soc=getNextSoc(start_soc)#变更结束soc
+            start_range=sn_day_df.loc[index+1,'vehodo']#变更开始里程    
+
+    return sn_factor_df_last
+
+def snDayDfPreProcess(sn_day_df):
+    '''预处理,判断是否在dirvemode,获取drivemode条件下的累计行驶距离。
+    增加delta_soc列,drive_flg列,vehodo列'''
+    sn_day_df=sn_day_df.reset_index(drop=True)#重置index
+    #增加列,计算delta_soc
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'delta_soc']=0
+        else:
+            sn_day_df.loc[index,'delta_soc']=sn_day_df.loc[index,'soc']-sn_day_df.loc[index-1,'soc']
+    #增加列,判断是否在drive状态
+    drive_flg=False
+    accum_distance=0
+    for index in range(len(sn_day_df)):
+        if index==0:
+            sn_day_df.loc[index,'drive_status']=drive_flg
+            sn_day_df.loc[index,'vehodo']=0
+        else:
+            if (sn_day_df.loc[index,'delta_soc']<-0.1)|\
+                ((sn_day_df.loc[index,'delta_soc']<=0)&(sn_day_df.loc[index,'distance']>500)):#soc处于下降状态,说明在drive
+                drive_flg=True#置true
+            elif sn_day_df.loc[index,'delta_soc']>0.1:#soc处于上升状态,说明不在drive
+                drive_flg=False#置false
+                accum_distance=0#清零
+            sn_day_df.loc[index,'drive_flg']=drive_flg
+            accum_distance+=sn_day_df.loc[index,'distance']#对行驶里程进行累加
+            sn_day_df.loc[index,'vehodo']=accum_distance
+    #筛选所有的drive信息行
+    sn_day_drive_df=sn_day_df.loc[sn_day_df['drive_flg']==True,:]
+    sn_day_drive_df=sn_day_drive_df.reset_index(drop=True)#重置index
+    
+    return sn_day_drive_df 
+
+def updtAllSnFct(start_date,end_date):
+    '''计算开始时间到结束时间的,所有sn的factor'''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        updtAllSnTodayFct(start_date,end_date)#调用函数
+        print('update all sn factor from '+start_date+" to "+end_date)
+        start_date=end_date
+        i+=1#自加
+
+def updtAllSnTodayFct(start_date,end_date):
+    ''''更新今天所有sn的factorx信息,start_date和end_date相隔一天。此处还可优化'''
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+" and distance!=0;"
+    range_soc_df = pd.read_sql(sql_cmd, conn_qx)#使用read_sql方法查询qx数据库
+
+    #筛选出所有当日数据之后,筛选当日有更新的sn
+    today_sn_list=range_soc_df['name'].unique().tolist()#[:100]#先一次更新5个
+    #建立空的dataframe,用于承接所有更新的factor信息
+    today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+    for sn in today_sn_list:
+        #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+        sn_str="'"+sn+"'"
+        sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<"+start_date_str+" and sn="+sn_str
+        #此处可以限定每次查询的数量,例如不高于5行
+        factor_df=pd.read_sql(sql_cmd2, conn_local)#使用read_sql方法查询local数据库
+
+        #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+        factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+
+        if len(factor_df)==0:
+            #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+            start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+            yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+            #为sn申请一个新的factor,初始值为1
+            factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+        sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+        sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+
+        sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+        sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+        sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+        sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+        #筛选对应车辆的信息
+        condition_sn=(range_soc_df['name']==sn)
+        sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+        sn_day_df=sn_day_df.reset_index(drop=True)
+        #使用updtTodayFct函数更新今天的factor
+        if len(sn_day_df)>=2:
+            #使用process函数,进行预处理
+            sn_day_df=snDayDfPreProcess(sn_day_df)#预处理函数
+            if len(sn_day_df)>=2:
+                sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#
+                today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+    
+    #将today_sn_fct_df写入到数据库中,今天所有factor更新的系数,一次写入。
+    if len(today_sn_fct_df)>=1:
+        today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)
+
+def updtOneSnFct(sn,start_date,end_date):
+    '''计算开始时间到结束时间的,一个sn的所有factor'''
+    start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')#开始时间
+    end_date_datetime=datetime.datetime.strptime(end_date,'%Y-%m-%d')#开始时间
+    delta_day=(end_date_datetime-start_date_datetime).days#间隔天数
+    i=1
+    while i<=delta_day:
+        end_date=(start_date_datetime+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
+        updtOneSnTodayFct(sn,start_date,end_date)#调用函数
+        print('update one sn factor from '+start_date+" to "+end_date)
+        start_date=end_date
+        i+=1#自加
+
+def updtOneSnTodayFct(sn,start_date,end_date):
+    start_date_str="'"+start_date+"'"
+    end_date_str="'"+end_date+"'"
+    sn_str="'"+sn+"'"
+    sql_cmd="select * from drive_info where time between "+start_date_str+" and "+end_date_str+\
+    " and distance!=0 and name="+sn_str
+    range_soc_df = pd.read_sql(sql_cmd, conn_qx)#使用read_sql方法查询qx数据库
+
+    if len(range_soc_df)>0:
+        #筛选出所有当日数据之后,筛选当日有更新的sn
+        today_sn_list=range_soc_df['name'].unique().tolist()
+        #建立空的dataframe,用于承接所有更新的factor信息
+        today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+        for sn in today_sn_list:
+            #寻找factor_df,里面是否有sn号,如果没有sn对应信息,则新增信息。
+            sn_str="'"+sn+"'"
+            sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<"+start_date_str+" and sn="+sn_str
+            factor_df=pd.read_sql(sql_cmd2, conn_local)#使用read_sql方法查询local数据库
+
+            #按照sn号和日期进行去重,避免运行时重复产生factor数据,保留第一次出现的行。
+            factor_df=factor_df.drop_duplicates(subset=['sn','date'],keep='first')
+
+            if len(factor_df)==0:
+                #如果没有搜索到factor历史数据,则声明一个新的进行初始化
+                start_date_datetime=datetime.datetime.strptime(start_date,'%Y-%m-%d')
+                yesterday=(start_date_datetime+datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+                factor_df=pd.DataFrame({'sn':sn,'date':yesterday,'a0':[1],'a1':[1],'a2':[1],'a3':[1],'a4':[1]})
+                today_sn_fct_df=today_sn_fct_df.append(factor_df.loc[0,:])#将初始化的行记录到数据库
+
+            sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+            
+            sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+            sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+            sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+            sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+            sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+            #筛选对应车辆的信息
+            condition_sn=(range_soc_df['name']==sn)
+            sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+            sn_day_df=sn_day_df.reset_index(drop=True)
+            #使用updtTodayFct函数更新今天的factor
+            if len(sn_day_df)>=2:
+                #使用process函数,进行预处理
+                sn_day_df=snDayDfPreProcess(sn_day_df)#!!!!!!!!!!!增加
+                if len(sn_day_df)>=2:
+                    sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)#
+                    today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+        
+        # #将today_sn_fct_df写入到数据库中
+        if len(today_sn_fct_df)>=1:
+            today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)
+            # print(sn+' factor will be update in table tb_sn_factor!')
+        return today_sn_fct_df
+
+
+
+
+
+# def updtASnTodayFct(start_date,end_date,today_sn_list):
+
+#     sql_cmd="select * from qixiang_test where time>='"+start_date+"' and time<='"+end_date+"'"
+#     range_soc_df = pd.read_sql(sql_cmd, conn)#使用read_sql方法查询数据库
+
+#     sql_cmd2="select sn,date,a0,a1,a2,a3,a4 from tb_sn_factor where date<'"+start_date+"'"
+#     factor_df=pd.read_sql(sql_cmd2, conn)#使用read_sql方法查询数据库
+
+#     #筛选出所有当日数据之后,筛选当日有更新的sn
+#     # today_sn_list=range_soc_df['sn'].unique().tolist()
+#     # today_sn_list=today_sn_list[:10]#更新若干个
+#     #建立空的dataframe,用于承接所有更新的factor信息
+#     today_sn_fct_df=pd.DataFrame([],columns=['sn','date','a0','a1','a2','a3','a4'])
+
+#     for sn in today_sn_list:
+#         sn_factor_df=factor_df.loc[factor_df['sn']==sn,:]#筛选sn对应的factor
+#         sn_factor_df=sn_factor_df.sort_values(by='date',ascending='True')#按照日期排序
+#         sn_factor_df_last=sn_factor_df.tail(1).copy()#寻找最后一行,代表最近日期
+#         sn_factor_df_last=sn_factor_df_last.append(sn_factor_df_last)#新增加一行,用于存储新的factor
+#         sn_factor_df_last=sn_factor_df_last.reset_index(drop=True)#重置index
+#         sn_factor_df_last.loc[1,'date']=start_date#更改后一行的date为当前日期
+#         #筛选对应车辆的信息
+#         condition_sn=(range_soc_df['sn']==sn)
+#         sn_day_df=range_soc_df.loc[condition_sn,:].copy()
+#         sn_day_df=sn_day_df.reset_index(drop=True)
+#         #使用updtTodayFct函数更新今天的factor
+#         sn_factor_df_new=updtTodayFct(sn_factor_df_last,sn_day_df)
+#         today_sn_fct_df=today_sn_fct_df.append(sn_factor_df_new.loc[1,:])#筛选第一行,进行拼接,最后写入到数据库中
+    
+#     #将today_sn_fct_df写入到数据库中
+#     today_sn_fct_df.to_sql('tb_sn_factor',con=engine,chunksize=10000,if_exists='append',index=False)

+ 28 - 0
LIB/MIDDLE/odo/UpdtFct_Main.py

@@ -0,0 +1,28 @@
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine
+import datetime
+from UpdtFct import *
+
+
+conn_qx = pymysql.connect(
+        host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com',
+        user='qx_cas',
+        password='Qx@123456',#Qx@123456
+        database='qx_cas',
+        charset='utf8'
+    )
+
+conn_local = pymysql.connect(
+        host='localhost',
+        user='root',
+        password='pengmin',
+        database='qixiangdb',
+        charset='utf8'
+    )
+
+#指定开始时间,结束时间,更新所有sn的factor
+start_date="2021-07-18"
+end_date="2021-08-01"
+
+updtAllSnFct(start_date,end_date)

+ 28 - 0
LIB/MIDDLE/算法类别(模板)/算法名/V_1_0_0/core_algorithm.py

@@ -0,0 +1,28 @@
+import other_algorithm
+
+class Algo:
+
+     def __init__(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun1(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun2(self):
+          pass
+
+     # 算法对外提供的函数
+     def core_algorithm(self, data1, data2, param1, param2, ....):
+
+          #核心算法逻辑
+          _fun1();
+          _fun2();
+          data3 = other_algorithm(data1);
+          res1 = f(data1, data2, param1,...)
+          res2 = f(data1, data3, param2,...)
+
+          return [res1, res2]
+
+

+ 28 - 0
LIB/MIDDLE/算法类别(模板)/算法名/V_1_0_1/core_algorithm.py

@@ -0,0 +1,28 @@
+import other_algorithm
+
+class Algo:
+
+     def __init__(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun1(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun2(self):
+          pass
+
+     # 算法对外提供的函数
+     def core_algorithm(self, data1, data2, param1, param2, ....):
+
+          #核心算法逻辑
+          _fun1();
+          _fun2();
+          data3 = other_algorithm(data1);
+          res1 = f(data1, data2, param1,...)
+          res2 = f(data1, data3, param2,...)
+          
+          return [res1, res2]
+
+

+ 38 - 0
LIB/MIDDLE/算法类别(模板)/算法名/main.py

@@ -0,0 +1,38 @@
+import get_data
+import get_data_by_sql
+import get_data_from_other_algorithm
+import data_process
+import core_algorithm
+
+# 数据库连接参数
+conn = connect(host, ......)
+# 准备算法输入参数
+parameter1 = * 
+parameter2 = *
+
+# 多次调用核心算法时,将循环写在外面
+for (i=1:n){
+     
+     # 获取数据
+     data1 = get_data(sn[i], start_time, end_time, ....) # 函数取数
+     data2 = get_data_by_sql(sn[i], start_time, end_time, ....) # sql 语句直接数据库取数
+     data3 = get_data_from_other_algorithm(data1, parameter1, ....)  # 调用其他人的算法得到数据
+
+     # 通用数据预处理 (可选,由算法说明文档说明算法输入数据是否需要预处理)
+     data1 = data_process(data1)
+     data2 = data_process(data2)
+     
+     # 调用核心算法
+     [res1, res2] = core_algorithm(data1, data2, data3, parameter1, parameter2, ....)
+
+     # 使用结果
+     res1 = res1.append(res)
+     res1.to_csv(...)
+     res1.to_sql(...)
+}
+
+# 批量使用结果
+res.to_csv(...)
+res1.to_sql(...)
+
+

+ 28 - 0
LIB/MIDDLE/算法类别(模板)/算法名2/V_1_0_0/core_algorithm.py

@@ -0,0 +1,28 @@
+import other_algorithm
+
+class Algo:
+
+     def __init__(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun1(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun2(self):
+          pass
+
+     # 算法对外提供的函数
+     def core_algorithm(self, data1, data2, param1, param2, ....):
+
+          #核心算法逻辑
+          _fun1();
+          _fun2();
+          data3 = other_algorithm(data1);
+          res1 = f(data1, data2, param1,...)
+          res2 = f(data1, data3, param2,...)
+
+          return [res1, res2]
+
+

+ 28 - 0
LIB/MIDDLE/算法类别(模板)/算法名2/V_1_0_1/core_algorithm.py

@@ -0,0 +1,28 @@
+import other_algorithm
+
+class Algo:
+
+     def __init__(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun1(self):
+          pass
+
+     # 算法内部调用的函数
+     def _fun2(self):
+          pass
+
+     # 算法对外提供的函数
+     def core_algorithm(self, data1, data2, param1, param2, ....):
+
+          #核心算法逻辑
+          _fun1();
+          _fun2();
+          data3 = other_algorithm(data1);
+          res1 = f(data1, data2, param1,...)
+          res2 = f(data1, data3, param2,...)
+
+          return [res1, res2]
+
+

+ 38 - 0
LIB/MIDDLE/算法类别(模板)/算法名2/main.py

@@ -0,0 +1,38 @@
+import get_data
+import get_data_by_sql
+import get_data_from_other_algorithm
+import data_process
+import core_algorithm
+
+# 数据库连接参数
+conn = connect(host, ......)
+# 准备算法输入参数
+parameter1 = * 
+parameter2 = *
+
+# 多次调用核心算法时,将循环写在外面
+for (i=1:n){
+     
+     # 获取数据
+     data1 = get_data(sn[i], start_time, end_time, ....) # 函数取数
+     data2 = get_data_by_sql(sn[i], start_time, end_time, ....) # sql 语句直接数据库取数
+     data3 = get_data_from_other_algorithm(data1, parameter1, ....)  # 调用其他人的算法得到数据
+
+     # 通用数据预处理 (可选,由算法说明文档说明算法输入数据是否需要预处理)
+     data1 = data_process(data1)
+     data2 = data_process(data2)
+     
+     # 调用核心算法
+     [res1, res2] = core_algorithm(data1, data2, data3, parameter1, parameter2, ....)
+
+     # 使用结果
+     res1 = res1.append(res)
+     res1.to_csv(...)
+     res1.to_sql(...)
+}
+
+# 批量使用结果
+res.to_csv(...)
+res1.to_sql(...)
+
+

+ 39 - 2
demo.ipynb

@@ -116,11 +116,48 @@
   },
   },
   {
   {
    "cell_type": "code",
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 2,
    "source": [
    "source": [
     "# 生成pydoc 说明文档\r\n",
     "# 生成pydoc 说明文档\r\n",
-    "! python -m pydoc -w LIB\\BACKEND\\Tools.py"
+    "!python -m pydoc -w LIB\\BACKEND\\DataPreProcess.py"
    ],
    ],
+   "outputs": [
+    {
+     "output_type": "stream",
+     "name": "stdout",
+     "text": [
+      "problem in LIB\\BACKEND\\DataPreProcess.py - ModuleNotFoundError: No module named 'DBManager'\n"
+     ]
+    }
+   ],
+   "metadata": {}
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "source": [
+    "from LIB.BACKEND import DBManager, Log\r\n",
+    "log = Log.Mylog(log_name='signal_monitor', log_level = 'info')\r\n",
+    "log.set_file_hl(file_name='info.log', log_level='info')\r\n",
+    "log.set_file_hl(file_name='error.log', log_level='error')\r\n",
+    "logger = log.get_logger()\r\n"
+   ],
+   "outputs": [],
+   "metadata": {}
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "source": [
+    "logger.error(\"ttt5\")"
+   ],
+   "outputs": [],
+   "metadata": {}
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "source": [],
    "outputs": [],
    "outputs": [],
    "metadata": {}
    "metadata": {}
   }
   }

+ 28 - 0
demo.py

@@ -0,0 +1,28 @@
+#LIB/MIDDLE/算法名/main.py   该文件调用核心算法
+
+# 准备算法输入参数
+parameter1 = * 
+parameter2 = *
+
+
+# 多次调用核心算法时,将循环写在外面
+for (i=1:n){
+     
+     # 获取数据
+     data1 = get_data(sn[i], start_time, end_time, ....)
+     data2 = get_data_by_sql(sn[i], start_time, end_time, ....)
+
+     # 通用数据预处理 (可选,由算法说明文档说明算法输入数据是否需要预处理)
+     data1 = data_process(data1)
+     data2 = data_process(data2)
+     
+     # 调用核心算法
+     [res1, res2] = core_algorithm(data1, data2, parameter1, parameter2)
+
+     # 使用结果
+     res1 = res1.append(res)
+     res1.to_csv(...)
+}
+
+# 批量使用结果
+res.to_csv(...)

+ 1 - 1
函数说明/DBManager.html

@@ -107,5 +107,5 @@ Data descriptors defined here:<br>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
     
     
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
-<td width="100%">wlm</td></tr></table>
+<td width="100%">lmstack</td></tr></table>
 </body></html>
 </body></html>

+ 1 - 1
函数说明/DataPreProcess.html

@@ -147,5 +147,5 @@ Data descriptors defined here:<br>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
     
     
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
-<td width="100%">wlm</td></tr></table>
+<td width="100%">lmstack</td></tr></table>
 </body></html>
 </body></html>

+ 1 - 1
函数说明/IndexStaByOneCycle.html

@@ -141,5 +141,5 @@ Data descriptors defined here:<br>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
     
     
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
-<td width="100%">wlm</td></tr></table>
+<td width="100%">lmstack</td></tr></table>
 </body></html>
 </body></html>

+ 1 - 1
函数说明/IndexStaByPeriod.html

@@ -111,5 +111,5 @@ Data descriptors defined here:<br>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
     
     
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
-<td width="100%">wlm</td></tr></table>
+<td width="100%">lmstack</td></tr></table>
 </body></html>
 </body></html>

+ 1 - 1
函数说明/Log.html

@@ -76,5 +76,5 @@ Data descriptors defined here:<br>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
     
     
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
-<td width="100%">wlm</td></tr></table>
+<td width="100%">lmstack</td></tr></table>
 </body></html>
 </body></html>

+ 1 - 1
函数说明/Tools.html

@@ -82,5 +82,5 @@ Data descriptors defined here:<br>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
 <font color="#ffffff" face="helvetica, arial"><big><strong>Author</strong></big></font></td></tr>
     
     
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
 <tr><td bgcolor="#7799ee"><tt>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</tt></td><td>&nbsp;</td>
-<td width="100%">wlm</td></tr></table>
+<td width="100%">lmstack</td></tr></table>
 </body></html>
 </body></html>

BIN
数据分析平台手册.doc


BIN
数据分析平台手册.pdf