Browse Source

update 电压离群

lmstack 3 years ago
parent
commit
fe63229adc

+ 3 - 3
LIB/FRONTEND/CellStateEstimation/InterShort/deploy.py

@@ -26,7 +26,7 @@ if __name__ == "__main__":
     
     history_run_flag = False # 历史数据运行标志
     
-
+    
     # 更新sn列表
     host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
     port=3306
@@ -100,9 +100,9 @@ if __name__ == "__main__":
             
             logger.info("pid-{} celltype-{} SN: {} START!".format(os.getpid(), celltype, sn))
             
-            DBRead=DBDownload.DBDownload(host, port, db, user, password)
+            DBRead=DBDownload.DBDownload(host, port, db, user, password, 1)
             with DBRead as DBRead:
-                df_soh=DBRead.getdata('time_st','time_sp','sn','method','soh','cellsoh', tablename=tablename, sn=sn)
+                df_soh=DBRead.getdata('time_st','time_sp','sn','method','soh','cellsoh', tablename=tablename, sn=sn, timename='',st='',sp='')
             dbManager = DBManager.DBManager()
             
             # 处理运行历史数据

+ 2 - 1
LIB/FRONTEND/CellStateEstimation/InterShort/run.bat

@@ -1,3 +1,4 @@
 cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\CellStateEstimation\InterShort
 title cal_intershort
-D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\CellStateEstimation\InterShort\deploy.py
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\CellStateEstimation\InterShort\deploy.py
+pause

+ 2 - 2
LIB/FRONTEND/CellStateEstimation/SOH/deploy.py

@@ -114,9 +114,9 @@ if __name__ == "__main__":
                     else:
                         start_time = pd.to_datetime(str(df_first_data_time.loc[df_first_data_time[df_first_data_time['sn']==sn].index, 'first_data_time'].values[0])).strftime("%Y-%m-%d 00:00:00")
             
-            DBRead=DBDownload.DBDownload(host, port, db, user, password)
+            DBRead=DBDownload.DBDownload(host, port, db, user, password, 1)
             with DBRead as DBRead:
-                df_soh=DBRead.getdata('time_st','time_sp','sn','method','soh','cellsoh', tablename=tablename, sn=sn)
+                df_soh=DBRead.getdata('time_st','time_sp','sn','method','soh','cellsoh', tablename=tablename, sn=sn,timename='',st='',sp=''))
                 
             df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms','accum'])
             df_bms = df_data['bms']

+ 47 - 0
LIB/FRONTEND/OutlierDetection/VoltOutlier/create_table.py

@@ -0,0 +1,47 @@
+'''
+定义表的结构,并在数据库中创建对应的数据表
+'''
+__author__ = 'lmstack'
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, String, create_engine, Integer, DateTime, BigInteger, FLOAT, TIMESTAMP, func, Text
+from urllib import parse
+Base = declarative_base()
+
+
+class ConsistencyDeltaSoc(Base):
+    __tablename__ = "outlierdetection_volt"
+    __table_args__ = ({'comment': '利群检测'})  # 添加索引和表注释
+
+    id = Column(Integer, primary_key=True, autoincrement=True, comment="主键")
+    add_time = Column(TIMESTAMP(True), server_default=func.now(), comment='记录创建时间') # 创建时间
+    update_time = Column(TIMESTAMP(True), nullable=False, server_default=func.now(), onupdate=func.now(), comment='记录更新时间') # 更新时间
+    
+    sn = Column(String(64), comment="sn")
+    time = Column(TIMESTAMP(True), comment="报警时间")
+
+    cellnum = Column(String(64), comment="报警编号")
+    value = Column(FLOAT, comment="zscore")
+    type = Column(String(64), comment="报警类型")
+
+
+    # def __init__(self, sn, current, time_stamp, pack_state, line_state):
+    #     self.sn = sn
+    #     self.current = current
+    #     self.time_stamp = time_stamp
+    #     self.pack_state = pack_state
+    #     self.line_state = line_state
+
+# 执行该文件,创建表格到对应的数据库中
+if __name__ == "__main__":
+    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port = 3306
+    user = 'qx_cas'
+    password = parse.quote_plus('Qx@123456')
+    database = 'qx_cas'
+    
+    db_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, database
+        ))
+    Base.metadata.create_all(db_engine)

+ 125 - 0
LIB/FRONTEND/OutlierDetection/VoltOutlier/deploy.py

@@ -0,0 +1,125 @@
+
+__author__ = 'lmstack'
+#coding=utf-8
+import os
+import datetime
+import pandas as pd
+from LIB.BACKEND import DBManager, Log
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+import time, datetime
+import dateutil.relativedelta
+import traceback
+from LIB.MIDDLE.OutlierDetection.VoltOutlier.V_1_0_0 import sta
+from urllib import parse
+import pymysql
+import pdb
+dbManager = DBManager.DBManager()
+if __name__ == "__main__":
+    
+    # 时间设置
+    now_time = datetime.datetime.now()
+    pre_time = now_time + dateutil.relativedelta.relativedelta(days=-8)# 最近一周
+    end_time=datetime.datetime.strftime(now_time,"%Y-%m-%d 00:00:00")
+    start_time=datetime.datetime.strftime(pre_time,"%Y-%m-%d 00:00:00")
+    
+    history_run_flag = False # 历史数据运行标志
+    
+    
+    # 更新sn列表
+    host='rm-bp10j10qy42bzy0q7.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qixiang_oss'
+    user='qixiang_oss'
+    password='Qixiang2021'
+    conn = pymysql.connect(host=host, port=port, user=user, password=password, database=db)
+    cursor = conn.cursor()
+    cursor.execute("select sn, imei, add_time from app_device")
+    res = cursor.fetchall()
+    df_sn = pd.DataFrame(res, columns=['sn', 'imei', 'add_time'])
+    df_sn = df_sn.reset_index(drop=True)
+    conn.close();
+    
+    # 数据库配置
+    host = 'rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port = 3306
+    user = 'qx_cas'
+    password = parse.quote_plus('Qx@123456')
+    database = 'qx_cas'
+
+    db_engine = create_engine(
+        "mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8".format(
+            user, password, host, port, database
+        ))
+    DbSession = sessionmaker(bind=db_engine)
+    
+    # 运行历史数据配置
+    
+    df_first_data_time = pd.read_sql("select * from bat_first_data_time", db_engine)
+
+    
+    # 日志配置
+    now_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()).replace(":","_")
+    log_path = 'log/' + now_str
+    if not os.path.exists(log_path):
+        os.makedirs(log_path)
+    log = Log.Mylog(log_name='voltoutlier', log_level = 'info')
+    log.set_file_hl(file_name='{}/info.log'.format(log_path), log_level='info', size=1024* 1024 * 100)
+    log.set_file_hl(file_name='{}/error.log'.format(log_path), log_level='error', size=1024* 1024 * 100)
+    logger = log.get_logger()
+
+    logger.info("pid is {}".format(os.getpid()))
+    
+
+
+    # 算法参数
+    host='rm-bp10j10qy42bzy0q77o.mysql.rds.aliyuncs.com'
+    port=3306
+    db='qx_cas'
+    user='qx_read'
+    password='Qx@123456'
+    tablename='cellStateEstimation_soh'
+
+    for i in range(0, len(df_sn)):
+        try:
+            sn = df_sn.loc[i, 'sn']
+            
+            logger.info("pid-{} SN: {} START!".format(os.getpid(), sn))
+            
+            
+            # 处理运行历史数据
+            if (history_run_flag):
+                this_sn = df_first_data_time[df_first_data_time['sn']==sn]
+                if (len(this_sn) == 0):
+                    start_time = pd.to_datetime(str(df_sn.loc[df_sn[df_sn['sn']==sn].index, 'add_time'].values[0])).strftime("%Y-%m-%d 00:00:00")
+                else:
+                    first_data_time = df_first_data_time.loc[df_first_data_time[df_first_data_time['sn']==sn].index, 'first_data_time'].values[0]
+                    if pd.isnull(first_data_time):
+                        start_time = "2018-01-01 00:00:00"
+                    else:
+                        start_time = pd.to_datetime(str(df_first_data_time.loc[df_first_data_time[df_first_data_time['sn']==sn].index, 'first_data_time'].values[0])).strftime("%Y-%m-%d 00:00:00")
+
+            df_data = dbManager.get_data(sn=sn, start_time=start_time, end_time=end_time, data_groups=['bms'])
+            df_bms = df_data['bms']
+            volt_column = [x for x in df_bms.columns if '单体电压' in x]
+            columns = ['时间戳']
+            columns.extend(volt_column)
+            
+            df_ori = df_bms[columns]
+            df_ori.rename(columns = {'时间戳':'time'}, inplace=True)
+            df = df_ori.drop_duplicates(subset=['time']).reset_index(drop=True) # 删除时间相同的数据
+            df_result_1,time_list_1 = sta.cal_voltdiff_uniform(df,volt_column, window=50, step=10, window2=5, step2=3)
+            df_result_2,time_list_2 = sta.cal_volt_uniform(df,volt_column, window=50, step=10)
+
+            df_result_1['time'] = time_list_1
+            df_result_2['time'] = time_list_2
+            
+            df_all_result = sta.instorage(sn, df_result_1, df_result_2)
+
+            if not df_all_result.empty:
+                df_all_result.to_sql("outlierdetection_volt",con=db_engine, if_exists="append",index=False)
+            logger.info("pid-{} SN: {} DONE!".format(os.getpid(), sn))
+        except:
+            logger.error(traceback.format_exc)
+            logger.error(u"{} :{},{} 任务运行错误\n".format(sn,start_time,end_time), exc_info=True)
+

File diff suppressed because it is too large
+ 36 - 0
LIB/FRONTEND/OutlierDetection/VoltOutlier/main.ipynb


+ 4 - 0
LIB/FRONTEND/OutlierDetection/VoltOutlier/run.bat

@@ -0,0 +1,4 @@
+cd /d D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\OutlierDetection\VoltOutlier
+title cal_outliervolt
+D:\env\py_pro\python.exe D:\deploy\python_platform\data_analyze_platform\LIB\FRONTEND\OutlierDetection\VoltOutlier\deploy.py
+pause

+ 2 - 2
LIB/FRONTEND/odo/DailyMileageEstimation/deploy.py

@@ -10,7 +10,7 @@ from sqlalchemy.orm import sessionmaker
 import time, datetime
 import dateutil.relativedelta
 import traceback
-from LIB.MIDDLE.odo.DailyMileageEstimation.V1_0_2 import cal_mileage
+from LIB.MIDDLE.odo.DailyMileageEstimation.V1_0_3 import cal_mileage
 from urllib import parse
 import pymysql
 import pdb
@@ -108,7 +108,7 @@ if __name__ == "__main__":
             #             start_time = pd.to_datetime(str(df_first_data_time.loc[df_first_data_time[df_first_data_time['sn']==sn].index, 'first_data_time'].values[0])).strftime("%Y-%m-%d 00:00:00")
 
             if not df_res.empty:
-                df_res.columns = ['time', 'sn', 'current', 'soc', 'charging_status', 'latitude', 'longitude', 'energy_consump', 'voltage', 'mileage']
+                df_res.columns = ['time', 'sn', 'current', 'voltage', 'soc', 'charging_status', 'latitude', 'longitude', 'energy_consump',  'mileage']
                 df_res.to_sql("odo_dailyMileageEstimation",con=db_engine, if_exists="append",index=False)
             logger.info("pid-{} SN: {} DONE!".format(os.getpid(), sn))
         except:

+ 2 - 2
LIB/FRONTEND/odo/DailyMileageEstimation/deployhistory.py

@@ -10,7 +10,7 @@ from sqlalchemy.orm import sessionmaker
 import time, datetime
 import dateutil.relativedelta
 import traceback
-from LIB.MIDDLE.odo.DailyMileageEstimation.V1_0_2 import cal_mileage
+from LIB.MIDDLE.odo.DailyMileageEstimation.V1_0_3 import cal_mileage
 from urllib import parse
 import pymysql
 import pdb
@@ -112,7 +112,7 @@ if __name__ == "__main__":
                 # df_res.to_csv('Mileage_'+sn+'.csv')
                 
             if not df_res.empty:
-                df_res.columns = ['time', 'sn', 'current', 'soc', 'charging_status', 'latitude', 'longitude', 'energy_consump', 'voltage', 'mileage']
+                df_res.columns = ['time', 'sn', 'current', 'voltage', 'soc', 'charging_status', 'latitude', 'longitude', 'energy_consump',  'mileage']
                 df_res.to_sql("odo_dailyMileageEstimation",con=db_engine, if_exists="append",index=False)
             logger.info("pid-{} SN: {} DONE!".format(os.getpid(), sn))
         except:

Some files were not shown because too many files changed in this diff