import time import datetime import requests import numpy as np import pandas as pd from pathlib import Path from bs4 import BeautifulSoup import pandas as pd import matplotlib.pyplot as plt import matplotlib.colors as mcolors from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, String, Float, Integer, DateTime API_URL = "https://energy-iot.chinatowercom.cn/api/device/device/historyPerformance" API_HEADER = { "accept": "application/json, text/plain, */*", "Accept-Encoding": "gzip, deflate, br, zstd", "accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6", "Connection": "keep-alive", "Content-Length": "170", "content-type": "application/json;charset=UTF-8", "Cookie": "HWWAFSESID=455f2793ca86a3aaf0; HWWAFSESTIME=1734509454212; dc04ed2361044be8a9355f6efb378cf2=WyIyODM4MDM2MDcxIl0", "authorization": "Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiZXhwIjoxNzM0NjYzNDQ5LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiJhZmZhNmY1My05ZDA4LTQ2ODUtODU3MS05YzA5ODAxMGJjZWYiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.q0X4qrgL4wRUTZL8c_5oTIUGW0Lsivxw8pYQ1iMIqLnyJrUeS7IQKNRavMhc4NEdQ9uG6ZgFVHIj80HbzH8DHCxssCPLdv9_TXksI5podU2aU6Vjh6AaN1THFYAE2uflj1saBnQ5_gKiK0-cAcXDeJNSt_u6Cd9hI1ejEUPPzO_hLg-NLzch7yIB-HPvhoDNnl0n5pSYoQpT8XaKT14HezL3VQrLII69Vme38S2dMmmkiAeIyhHQi56kXZ11K45Lu5bHXv6fDg2Mfr9VgVuTleZldiO69BAmG0h1-HqTQuGE39jtGWrrCnFduRZR6VsaOWWJy3qyqUbXWMOli1Yy1g", "Host": "energy-iot.chinatowercom.cn", "Origin": "https://energy-iot.chinatowercom.cn", "Sec-Fetch-Dest": "empty", "Sec-Fetch-Mode": "cors", "Sec-Fetch-Site": "same-origin", "sec-ch-ua": "\"Microsoft Edge\";v=\"131\", \"Chromium\";v=\"131\", \"Not_A Brand\";v=\"24\"", "sec-ch-ua-mobile": "?0", "sec-ch-ua-platform": "Windows", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0", } SemaMap_adapter = { 'facturer': ('0305113001', 'adapter', False, "厂家"), 'version': ('0305114001', 'adapter', False, "软件版本"), 'model': ('0305115001', 'adapter', False, "型号"), 'status': ('0305116001', 'adapter', False, "开关机状态"), 'temp': ('0305117001', 'adapter', True, "温度"), 'volt_in': ('0305118001', 'adapter', True, "输入电压"), 'curr_in': ('0305119001', 'adapter', True, "输入电流"), 'volt_out': ('0305120001', 'adapter', True, "输出电压"), 'curr_out': ('0305121001', 'adapter', True, "输出电流"), 'power_out': ('0305122001', 'adapter', True, "输出功率"), } semamap_combiner = { 'IMSI': ('0305102001', 'combiner', False, "IMSI"), 'ICCID': ('0305103001', 'combiner', False, "SIM卡ICCID"), 'MSISDN': ('0305104001', 'combiner', False, "MSISDN"), 'dev_type': ('0305101001', 'combiner', False, "系统类型"), 'facturer': ('0305107001', 'combiner', False, "汇流箱厂家"), 'model': ('0305108001', 'combiner', False, "汇流箱型号"), 'ver_software': ('0305105001', 'combiner', False, "软件版本"), 'ver_hardware': ('0305106001', 'combiner', False, "硬件版本"), 'power_total': ('0305109001', 'combiner', True, "系统总功率"), 'energy_total': ('0305110001', 'combiner', True, "系统累计发电量"), 'energy_daily': ('0305111001', 'combiner', True, "系统日发电量"), } SemaMap_meter = { 'mtr_id': ('0305123001', 'meter', False, "电表号"), 'mtr_volt': ('0305124001', 'meter', True, "直流电压"), 'mtr_curr': ('0436101001', 'meter', True, "直流总电流"), 'mtr_power': ('0436102001', 'meter', True, "总有功功率"), 'mtr_energy_total': ('0305125001', 'meter', True, "总有功电能"), 'mtr_energy_daily': ('0305126001', 'meter', True, "日有功电能"), 'mtr_energy_total_T': ('0305127001', 'meter', True, "尖时段总正向有功电能"), 'mtr_energy_total_P': ('0305128001', 'meter', True, "峰时段总正向有功电能"), 'mtr_energy_total_F': ('0305129001', 'meter', True, "平时段总正向有功电能"), 'mtr_energy_total_V': ('0305130001', 'meter', True, "谷时段总正向有功电能"), 'mtr_energy_daily_T': ('0305131001', 'meter', True, "尖时段日正向有功电能"), 'mtr_energy_daily_P': ('0305132001', 'meter', True, "峰时段日正向有功电能"), 'mtr_energy_daily_F': ('0305133001', 'meter', True, "平时段日正向有功电能"), 'mtr_energy_daily_V': ('0305134001', 'meter', True, "谷时段日正向有功电能"), } API_Map = { 'refreshToken': ['https://energy-iot.chinatowercom.cn/api/auth/refreshToken', None], 'search_stn': ['https://energy-iot.chinatowercom.cn/api/device/station/list', None], 'search_dev': ['https://energy-iot.chinatowercom.cn/api/device/device/page', None], 'dev_info': ['https://energy-iot.chinatowercom.cn/api/device/device/devInfo', None], 'perf_real': ['https://energy-iot.chinatowercom.cn/api/device/device/perfReal', None], 'history': ['https://energy-iot.chinatowercom.cn/api/device/device/historyPerformance', [SemaMap_adapter, SemaMap_meter]], 'page': ['https://energy-iot.chinatowercom.cn/api/device/device/page', None], 'station': ['https://energy-iot.chinatowercom.cn/api/device/station/detail/', None], } class Lamina_Data(object): """ 叠光主站数据分析 """ def __init__(self, database="sqlite:///:memory", header=None): """ 初始化 """ self.engine = create_engine(database) metadata = MetaData() metadata.reflect(bind=self.engine) if 'history' not in metadata.tables: history_table = Table( 'history', metadata, Column('dev', String(50)), Column('mid', String(50)), Column('time', DateTime), Column('value', Float) ) metadata.create_all(self.engine) if 'log' not in metadata.tables: log_table = Table( 'log', metadata, Column('dev', String(50)), Column('mid', String(50)), Column('Timestamp_start', DateTime), Column('Timestamp_end', DateTime), ) metadata.create_all(self.engine) self.data = { 'history': pd.read_sql_table('history', self.engine), } self.api_origin = { 'domain': 'https://energy-iot.chinatowercom.cn/api', 'header': API_HEADER, } def save_history_data(self): """ 保存历史数据 """ data_memory = self.data['history'] data_file = pd.read_sql_table('history', self.engine) merged_df = pd.merge(data_memory, data_file[['dev', 'mid', 'time']], on=['dev', 'mid', 'time'], how='left', indicator=True) filtered_data_memory = merged_df[merged_df['_merge'] == 'left_only'].drop(columns='_merge') filtered_data_memory.to_sql('history', self.engine, if_exists='append', index=False) print(f"成功插入 {len(filtered_data_memory)} 条数据") return len(filtered_data_memory) def save_data(func): """ 保存函数返回数据 """ def wrapper(*args, **kwds): self: Lamina_Data = args[0] result = func(*args, **kwds) if isinstance(result, pd.DataFrame): if result.shape[0] != 0: self.data['history'] = pd.concat([self.data['history'], result], ignore_index=True) return result return wrapper def get_histoty_data_by_database(self, device_id, data_type, time_start:int, time_end:int): """ 读取历史数据 """ database = self.data['history'] filter_data = database[database['dev'] == device_id & database['mid'] == SemaMap_adapter[data_type][0] & database['time'].between(time_start, time_end)] return filter_data @save_data def get_history_data_by_net(self, device_id, data_type, time_start:int, time_end:int, header=None): """ 读取信号量历史数据, 返回接口json数据 """ if header is None: header = self.api_origin['header'] body = { "businessType": "7", "startTimestamp": int(time_start * 1000), "endTimestamp": int(time_end * 1000), "deviceCode": f"{device_id}", "mid": f"{data_type[0]}", "pageNum": 1, "pageSize": 10, "total": 0 } req = requests.post(API_URL, json=body, headers=header) json_data = req.json() if json_data['code'] == 200: """ 数据读取成功 """ print(f"Get data success, mid={data_type[0]}, len={len(json_data['data'])}") table_data = pd.DataFrame(json_data['data'], columns=['collectTime', 'mid', 'midName', 'value']) table_data['dev'] = device_id table_data['time'] = pd.to_datetime(table_data.collectTime) table_data['value'] = pd.to_numeric(table_data.value) return table_data[['dev', 'mid', 'time', 'value']] else: print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}") raise ValueError(f"{json_data['message']}") def get_real_data_by_net(self, device_id, fsu_id=None, header=None): """ 读取设备当前数据, 返回接口json数据 """ if header is None: header = self.api_origin['header'] body = { "businessType": "7", "devType": device_id[3:7], "deviceCodes": device_id, "type": "遥测" } if device_id[3:7] != "0101": if fsu_id is None: raise ValueError(f"Missing required parameters: fsu_id={fsu_id}") body["fsuCode"] = fsu_id req = requests.post(API_Map['perf_real'][0], json=body, headers=header) json_data = req.json() if json_data['code'] == 200: """ 数据读取成功 """ print(f"Get data success, len={len(json_data['data'])}") table_data = pd.DataFrame(json_data['data']) column_name = sorted(table_data.columns) table_data['dev'] = device_id table_data['time'] = table_data['updateTime'].apply(lambda x: int(time.mktime(time.strptime(x, r"%Y-%m-%d %H:%M:%S")))) table_data = table_data[['time', 'dev', *column_name]].drop(columns='updateTime') return table_data else: print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}") raise ValueError(f"{json_data['message']}") def get_devinfo_data_by_net(self, device_id, data_type, time_start:int, time_end:int, header=None): """ 读取设备信息, 返回接口json数据 """ if header is None: header = self.api_origin['header'] body = { "businessType": "7", "id": int(data_type), } req = requests.post(API_Map['dev_info'][0], json=body, headers=header) json_data = req.json() if json_data['code'] == 200: """ 数据读取成功 """ print(f"Get data success, len={len(json_data['data'])}") table_data = pd.DataFrame(json_data['data']) return table_data else: print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}") raise ValueError(f"{json_data['message']}") def spider_adapter(self, device_id:str, time_start:int, time_end:int): """ 爬取适配器数据 """ result = {} columns_adapter = list(filter(lambda x: SemaMap_adapter[x][2], SemaMap_adapter.keys())) data_adapter = pd.DataFrame([], columns=['time', 'device', *columns_adapter]) for k in columns_adapter: result[k] = self.get_history_data_by_net(device_id, SemaMap_adapter[k], time_start, time_end) if data_adapter.empty: data_adapter.time = result[k].time data_adapter.device = device_id data_adapter[k] = result[k].value.apply(float) return data_adapter def spider_meter(self, device_id:str, time_start:int, time_end:int): """ 爬取电表数据 """ result = {} columns_meter = list(map(lambda x: x[4:], filter(lambda x: SemaMap_meter[x][2], SemaMap_meter.keys()))) data_meter = pd.DataFrame([], columns=['time', 'device', *columns_meter]) for k, v in SemaMap_meter.items(): if v[2]: result[k] = self.get_history_data_by_net(device_id, v, time_start, time_end) if data_meter.empty: data_meter.time = result[k].time data_meter.device = device_id data_meter[k[4:]] = result[k].value.apply(float) return data_meter def spider_search_devices(self, device_id:str, header=None): if header is None: header = self.api_origin['header'] body = { "devType": "", "accessPointId": "", "pageNum": 1, "pageSize": 10, "businessType": "7", "devCode": device_id, "deptIds": [] } req = requests.post(API_Map['search_dev'][0], json=body, headers=header) json_data = req.json() if json_data['code'] != 200: """ 数据读取失败 """ print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}") return "" elif search_dev := json_data['rows']: print(f"Search device success, len={len(search_dev)}") return search_dev[0]['stationCode'] else: print(f"Search device fail.") return "" def spider_search_station(self, name:str, header=None): if header is None: header = self.api_origin['header'] body = { "pageNum": 1, "pageSize": 10, "provinceId": "", "cityId": "", "countId": "", "name": name, "code": "", "rsSource": "", "businessType": "7", "status": "", "onlineStatus": "", "maintenancePerson": "", "deptIds": [] } req = requests.post(API_Map['search_stn'][0], json=body, headers=header) json_data = req.json() if json_data['code'] != 200: """ 数据读取失败 """ print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}") return "" elif search_stn := json_data['rows']: print(f"Search station success, len={len(search_stn)}") return search_stn[0]['code'] else: print(f"Search station fail.") return "" def spider_station(self, search:str, time_start:int, time_end:int, header=None): """ 爬取站点数据 """ if header is None: header = self.api_origin['header'] if search[:3] == "TTE": """ 设备编号 """ station_id = self.spider_search_devices(search, header=header) else: """ 站点名称 """ station_id = self.spider_search_station(search, header=header) if station_id == "": print(f"Search station fail.") return {'result': False} body = { "businessType": "7", "stationCode": station_id, } time.sleep(0.5) print(f"Get Data for Station: {station_id}") req = requests.post(API_Map['page'][0], json=body, headers=header) json_data = req.json() if json_data['code'] != 200: """ 数据读取失败 """ print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}") return "" dev_meter = [] dev_adapter = [] dev_info = [] try: for dev in sorted(json_data['rows'], key=lambda x: x['devCode']): print(f"Dev: {dev['devTypeName']}, id={dev['devCode']}") time.sleep(0.5) fsu_id = dev['parentCode'] if 'parentCode' in dev.keys() else None dev_info.append(self.get_real_data_by_net(dev['devCode'], fsu_id, header=header)) time.sleep(0.5) match dev['devType']: case "0101": fsu_id = dev['devCode'] case "0102": dev_adapter.append(self.spider_adapter(dev['devCode'], time_start, time_end)) case "0103": dev_meter.append(self.spider_meter(dev['devCode'], time_start, time_end)) self.save_history_data() except Exception as e: print(f"Get data fail, msg=\n\t{e}") return {'result': False, 'token': e.args[0]} result = { 'result': True, 'station': station_id, 'information': pd.concat(dev_info, ignore_index=True), 'adapter': pd.concat(dev_adapter, ignore_index=True), 'meter': pd.concat(dev_meter, ignore_index=True), } print(f"Station Done.") return result def spider(self, device_id:str, time_start:int, time_end:int): """ 通用爬虫 """ if device_id[:8] == "TTE0102DX": """ 适配器数据 """ self.spider_adapter(device_id, time_start, time_end) elif device_id[:8] == "TTE0103DX": """ 电表数据 """ self.spider_meter(device_id, time_start, time_end) def graphs_adapter(self, device_id, time_start:int|str, time_end:int|str): """ 绘制图表-适配器数据 """ if type(time_start) is str: time_start = time.mktime(time.strptime(time_start, r"%Y-%m-%d %H:%M:%S")) if type(time_end) is str: time_end = time.mktime(time.strptime(time_end, r"%Y-%m-%d %H:%M:%S")) data = self.spider_adapter(device_id, time_start, time_end) self.chart_adapter(data) return data def chart_adapter(self, data_adapter): """ 绘制适配器信息图表 """ fig, ax1 = plt.subplots() ax1.plot(data_adapter['time'], data_adapter['volt_in'], color='green', label='Input Voltage') ax1.plot(data_adapter['time'], data_adapter['volt_out'], color='red', label='Output Voltage') ax2 = ax1.twinx() ax2.plot(data_adapter['time'], data_adapter['power_out'], color='gray', label='Output Power') # # 绘制斜线阴影 # for i in range(len(table_apt) - 1): # ax1.fill_between( # [table_apt['time'].iloc[i], table_apt['time'].iloc[i + 1]], # [table_apt['power_out'].iloc[i], table_apt['power_out'].iloc[i + 1]], # color='red', alpha=0.5) lines, labels = ax1.get_legend_handles_labels() shadows, shadow_labels = ax2.get_legend_handles_labels() ax1.legend(lines + shadows, labels + shadow_labels, loc='upper left') ax1.set_title('Device Data Visualization') ax1.set_xlabel('Time') ax1.set_ylabel('Voltage (V)') ax2.set_ylabel('Power (W)') plt.ioff() plt.show() plt.savefig('output.png') # plt.close() plt.ion() def sim_data_apt(times:tuple[int, int]): """ 模拟数据 """ t_start = time.mktime(time.strptime(times[0], r"%Y-%m-%d %H:%M:%S")) t_end = time.mktime(time.strptime(times[1], r"%Y-%m-%d %H:%M:%S")) count_data = (t_end - t_start) / (10 * 60) time_list = range(int(t_start), int(t_end), 20 * 60) time_list = tuple(map(lambda x: time.strftime(r"%Y-%m-%d %H:%M:%S", time.localtime(x)), time_list)) data = { 'time': time_list, 'volt_in': 10 + 10 * np.random.random(len(time_list)), 'curr_in': 1 + 2 * np.random.random(len(time_list)), 'volt_out': 54 + 2 * np.random.random(len(time_list)), } data['power_out'] = tuple(map(lambda x: x[0] * x[1], zip(data['volt_in'],data['curr_in']))) return pd.DataFrame(data) def save_station_by_file1(data_lamina: Lamina_Data): """ 依据文件爬取所需站点数据 """ time_start = datetime.datetime(2024, 12, 24, 0, 0, 0) time_end = datetime.datetime(2024, 12, 26, 0, 0, 0) time_start_timestamp = time.mktime(time_start.timetuple()) time_end_timestamp = time.mktime(time_end.timetuple()) stations = pd.read_excel(Path(r'C:\Users\wrqal\Documents\Obsidian Vault\附件\25号0发电适配器.xlsx')) output_file = Path(r'result/output.xlsx') if output_file.exists(): finished_station = pd.read_excel(output_file, sheet_name=None) finished_station["Station"]['station'] = finished_station["Station"]['station'].astype('str') finished_station["Adatper"]['station'] = finished_station["Adatper"]['station'].astype('str') finished_station["Meter"]['station'] = finished_station["Meter"]['station'].astype('str') merged_df = pd.merge(stations['点位名称'], finished_station['Station']['点位名称'], how='left', indicator=True) remain_station = merged_df[merged_df['_merge'] == 'left_only'].drop(columns='_merge') else: remain_station = stations['点位名称'] dataset = [] for name in remain_station['点位名称']: print(f"Station: {name}") data = data_lamina.spider_station(name, time_start_timestamp, time_end_timestamp) if data['result']: dataset.append(data) print(f"Done.") # 使用 ExcelWriter 将多个 DataFrame 保存到不同的工作表中 df_station = pd.DataFrame([], columns=['station', '点位名称']) df_station.station = [data['station'] for data in dataset] df_station.点位名称 = remain_station['点位名称'][:len(dataset)].values df_adapter = pd.concat([data['adapter'].assign(station=data['station']) for data in dataset], ignore_index=True) df_meter = pd.concat([data['meter'].assign(station=data['station']) for data in dataset], ignore_index=True) column_adapter = ['time', 'station', *df_adapter.columns[1:-1]] column_meter = ['time', 'station', *df_meter.columns[1:-1]] if output_file.exists(): """ 连接文件 """ df_station = pd.concat([finished_station['Station'], df_station], ignore_index=True) df_adapter = pd.concat([finished_station['Adatper'], df_adapter], ignore_index=True) df_meter = pd.concat([finished_station['Meter'], df_meter], ignore_index=True) with pd.ExcelWriter(output_file) as writer: df_station.to_excel(writer, sheet_name='Station', index=False) df_adapter.to_excel(writer, sheet_name='Adatper', index=False, columns=column_adapter) df_meter.to_excel(writer, sheet_name='Meter', index=False, columns=column_meter) print(f"数据已成功保存到 {output_file}") def save_station_by_file2(data_lamina: Lamina_Data, file_path): """ 依据文件爬取所需站点数据 """ file_input = Path(file_path) file_output = file_input.parent / (file_input.stem + '_output.xlsx') df_input = pd.read_excel(file_input) if file_output.exists(): finished_station = pd.read_excel(file_output, sheet_name=None) finished_station["Station"]['station'] = finished_station["Station"]['station'].astype('str') finished_station["Adatper"]['station'] = finished_station["Adatper"]['station'].astype('str') finished_station["Meter"]['station'] = finished_station["Meter"]['station'].astype('str') merged_df = pd.merge(df_input['点位名称'], finished_station['Station']['点位名称'], how='left', indicator=True) remain_station = merged_df[merged_df['_merge'] == 'left_only'].drop(columns='_merge') else: remain_station = df_input dataset = [] df_input = df_input.set_index('点位名称') for name in remain_station['点位名称']: print(f"Station: {name}") time_start_timestamp = df_input['开始时间'][name].tz_localize('Asia/Shanghai').timestamp() time_end_timestamp = df_input['结束时间'][name].tz_localize('Asia/Shanghai').timestamp() data = data_lamina.spider_station(name, time_start_timestamp, time_end_timestamp) if data['result']: dataset.append(data) analysis_info1(data) plt.waitforbuttonpress() elif data['token']: """ Token 失效 """ data_lamina.api_origin['header']['authorization'] = data['token'] print(f"Done.") # 使用 ExcelWriter 将多个 DataFrame 保存到不同的工作表中 df_station = pd.DataFrame([], columns=['station', '点位名称']) df_station.station = [data['station'] for data in dataset] df_station.点位名称 = remain_station['点位名称'][:len(dataset)].values df_adapter = pd.concat([data['adapter'].assign(station=data['station']) for data in dataset], ignore_index=True) df_meter = pd.concat([data['meter'].assign(station=data['station']) for data in dataset], ignore_index=True) column_adapter = ['time', 'station', *df_adapter.columns[1:-1]] column_meter = ['time', 'station', *df_meter.columns[1:-1]] if file_output.exists(): """ 连接文件 """ df_station = pd.concat([finished_station['Station'], df_station], ignore_index=True) df_adapter = pd.concat([finished_station['Adatper'], df_adapter], ignore_index=True) df_meter = pd.concat([finished_station['Meter'], df_meter], ignore_index=True) with pd.ExcelWriter(file_output) as writer: df_station.to_excel(writer, sheet_name='Station', index=False) df_adapter.to_excel(writer, sheet_name='Adatper', index=False, columns=column_adapter) df_meter.to_excel(writer, sheet_name='Meter', index=False, columns=column_meter) print(f"数据已成功保存到 {file_output}") return result def analysis_info(df_station: pd.DataFrame): """ 站点Log数据分析 """ map_mid = {} for k, v in SemaMap_adapter.items(): map_mid[v[0]] = v[3] for k, v in SemaMap_meter.items(): map_mid[v[0]] = v[3] map_dev = { 'TTE0102': 'Adapter', 'TTE0103': 'Meter', } data = df_station.assign( timestamp = lambda df: pd.to_datetime(df['time'], unit='s', utc=True).apply(lambda x: x.tz_convert('Asia/Shanghai')), type = lambda df: df['dev'].apply(lambda x: map_dev[x[:7]]), date = lambda df: df['timestamp'].apply(lambda x: x.date()), name = lambda df: df['mid'].map(map_mid), value = lambda df: pd.to_numeric(df['value']) ) data_daliy = data.loc[(data['dev'] == 'TTE0102DX2406272727') & (data['date'] == np.datetime64('2024-12-25')) & (data['type'] == 'Adapter')] fig, axes = plt.subplots(3, 2) axes = axes.flatten() i = 0 for name, df_plot in data_daliy.set_index('timestamp').sort_index()[['name', 'value']].groupby('name'): df_plot.plot(ax=axes[i], title=name) i += 1 plt.show() def analysis_info1(data_station: dict): """ 站点spider返回数据分析 """ # 创建双色颜色过渡 color_map = mcolors.LinearSegmentedColormap.from_list("mycmap", ["blue", "red"]) for dev_id in data_station['information']['dev'].unique(): data_dev = data_station['information'].loc[data_station['information']['dev'] == dev_id] print(f"Device: {dev_id}") match dev_id[:7]: case "TTE0101": # 汇流箱 pass case "TTE0102": # 适配器 pass history_dev = data_station['adapter'].assign( date = lambda df: df['time'].apply(lambda x: x.date()), ) case "TTE0103": # 电表 pass history_dev = data_station['meter'].assign( date = lambda df: df['time'].apply(lambda x: x.date()), id_group = lambda df: df['date'].diff().ne(0).cumsum(), ) # 按日期分组并绘制折线图 fig, axs = plt.subplots(3, 1) axs = axs.flatten() for date, group in history_dev.groupby('date'): # 计算当天的起始时间 start_time = pd.Timestamp(date) # 调整时间索引,使其从当天的起始时间开始 adjusted_time = group['time'] - start_time # 计算颜色和不透明度 color = color_map(group['id_group'] / history_dev['id_group'][-1]) alpha = 0.5 group.set_index(adjusted_time)['volt'].plot(ax=axs[0], label=str(date), color=color, alpha=alpha) group.set_index(adjusted_time)['curr'].plot(ax=axs[1], label=str(date), color=color, alpha=alpha) group.set_index(adjusted_time)['power'].plot(ax=axs[2], label=str(date), color=color, alpha=alpha) # 添加图例 axs[0].legend(title='Date') # 添加标题和标签 axs[0].set_title('Value over Time by Date') axs[0].set_xlabel('Timestamp') axs[0].set_ylabel('Value') plt.show() plt.savefig(Path(f"result\Analysis\{dev_id}.png")) print(data_dev.head()) if __name__=='__main__': """ 主体调用流程 """ # plt中文显示 plt.rcParams['font.sans-serif'] = ['SimHei'] # 坐标轴负数显示 plt.rcParams['axes.unicode_minus'] = False if not hasattr(__builtins__,"__IPYTHON__") and 0: import pickle path_data1 = Path(r"result\Analysis\station_data1.pkl") with open(path_data1, 'rb') as f: loaded_data = pickle.load(f) analysis_info1(loaded_data) if hasattr(__builtins__,"__IPYTHON__"): path_db = '../result/chinatowercom.db' else: path_db = 'result/chinatowercom.db' if not (file_db:= Path(path_db)).exists(): file_db.touch() API_HEADER['Cookie'] = "HWWAFSESTIME=1737167522632; HWWAFSESID=6cb0288b7bc75e5a66; dc04ed2361044be8a9355f6efb378cf2=WyIzNTI0NjE3OTgzIl0" API_HEADER['authorization'] = 'Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiZXhwIjoxNzM3MzQxNDg4LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiIwOGFlZDdjYy1hZGE2LTQ4ZWQtYmQyZS0xYjY3NGRkZmVmMWMiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.CnfJh2ie0D0dOG1yELiQPuXCwez_nzeYD8rXTL0ILSeq31kmTnhOJJTA6aI8JTEtDVgFyqC084uDR1KvDgwKL5aXXzKwCNqBxziJQbA2AuBRdDgdWXM0r_3qrBGL-0MuYB2jygJaNwue2GIh_3PYsMQGRqHBeyJ9JUgdiWYUVpmbYInSyOlY2l_QtzQTFlz8L7eUC0sDeAWSPNamvYczLas0MtuQquH6JM_-WaFfc-6TblmFp6qSxZHJT-0dy7LLTw5zpXbh3QnbjgBARCaOvzLaDtfArgU20Hq3AqAIwvTVOQFeI4jChFIRvyXwnnUDX-IrFru_sOYLX1jcc88cPA' data_lamina = Lamina_Data('sqlite:///' + path_db) # 依据站点内设备爬取整个站点的实时与历史数据 # today = datetime.datetime.today() # yesterday = today - datetime.timedelta(days=30) # today_midnight = today.replace(hour=0, minute=0, second=0, microsecond=0) # yesterday_midnight = yesterday.replace(hour=0, minute=0, second=0, microsecond=0) # today_midnight_timestamp = time.mktime(today_midnight.timetuple()) # yesterday_midnight_timestamp = time.mktime(yesterday_midnight.timetuple()) # data = data_lamina.spider_station("乐亭后庞河村", yesterday_midnight_timestamp, today_midnight_timestamp) # 读取站点历史数据 # save_station_by_file1(data_lamina) result = save_station_by_file2(data_lamina, "result\station_Q0120.xlsx") # 网站令牌更新 body = { "appId": "pjxNHUmFrMuj82pJenTmEc3Uvzj1cAO/qXs3zKMTjsG7Quk59cyjBCQM4miupyXv1At4e3deTn1cF9c4/WveDaeJCwEB+Dslom9yufrVPziOmRrQj1iAo8QVWSUnT1k70soDst+JN6japzOt7vjibru0uS/xezHrhuLSyNxkqzs=", "refreshToken": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiYXRpIjoiNGU4NTY1NTAtZDE5Ni00YjY4LWI1OGYtMzBkOTY0YjIyOGNkIiwiZXhwIjoxNzM1OTcyNTA0LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiIwMTY1NDg1MC1mZjIwLTRkMzQtYTQ4ZC03NmRiZTk3MmQ3YWQiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.PFT8JlTvWay1GUI5TC2Ht25rZWkAnQT3nxs-dOcAVIN9To06rG8EDspZ5eFxmNuEraurNxHCOLPfQZ-bCzJ8ywlA747PyJxyMPBhRhgXSDHYHX7ZqHEUdQdQo_Wkf75I8ko8_szchyhItjtgDUCzud9TlxKeuBQuerpYV8tkUVWobp4ulnnHEg0kqZFDeXrI-84Lyy-kodCDI-r3KuMBC5Rvbce0hqMcs2l-2U7M-V7LUT2VhBEvQd8l_Agx8hqWcK-d-dMVhlNjcvcb0AKmcX845D0bD5tKVKim_5JX4Er9-NANzSmgO0SRnsFVuxHhXiNqSkTB7pIdyi9r-ui23Q", "accessToken": API_HEADER['authorization'] } body1 = { "appId": "ePUxoRrHClb7+Wxk7NAJpiJhoVAcJbZ5NPJEak8ZTFrETrfA0JAIjbqiDuaow1Jdyg1FLjUAwlBXrLoKh514oTTZSp1U91ewVj+8ZvNi2vtbQkU03WdyxyHXiyTNjC88O1JRm13hRnIm1vRMoxsudm8CPCpUIsU9yYABZ+/w3A4=", "refreshToken": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiYXRpIjoiNGU4NTY1NTAtZDE5Ni00YjY4LWI1OGYtMzBkOTY0YjIyOGNkIiwiZXhwIjoxNzM1OTcyNTA0LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiIwMTY1NDg1MC1mZjIwLTRkMzQtYTQ4ZC03NmRiZTk3MmQ3YWQiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.PFT8JlTvWay1GUI5TC2Ht25rZWkAnQT3nxs-dOcAVIN9To06rG8EDspZ5eFxmNuEraurNxHCOLPfQZ-bCzJ8ywlA747PyJxyMPBhRhgXSDHYHX7ZqHEUdQdQo_Wkf75I8ko8_szchyhItjtgDUCzud9TlxKeuBQuerpYV8tkUVWobp4ulnnHEg0kqZFDeXrI-84Lyy-kodCDI-r3KuMBC5Rvbce0hqMcs2l-2U7M-V7LUT2VhBEvQd8l_Agx8hqWcK-d-dMVhlNjcvcb0AKmcX845D0bD5tKVKim_5JX4Er9-NANzSmgO0SRnsFVuxHhXiNqSkTB7pIdyi9r-ui23Q", "accessToken": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiZXhwIjoxNzM1OTA5NTE1LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiI0ZTg1NjU1MC1kMTk2LTRiNjgtYjU4Zi0zMGQ5NjRiMjI4Y2QiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.KxGBpvuPIP3CHfVT41wxE_v9vlHNC9GL6sfaIta8cI2qlMpTCVg9dg-4DgPlXuMrtI0YzrSbAywCQmFLGcBgh3HD_UuIAH-k3Y8__osZEgc4bUcJ58W-uukuEu3MEwbV6ZcxTq7dxf3iqu9aXGrawYY_iL-jIRH1v8Zcr4qUPA9Mlzl8LvZdzZ05XgntbxE8IQRmt1M5rWdWLV4tvbUEYR5eDGs3az0w-MFXQ8qNHo8KLJc68WvbilmOMWkhK2k_xQQTdNx_jPktjYfClZa6l9-6rYAb5MMqwt77fY0_JE87u3w5YbU_GRyBI2mjnJe1qKdMjUEpQwWqt3DLJWLe7Q" } req = requests.post(API_Map['refreshToken'][0], json=body1, headers=API_HEADER) # data = sim_data_apt(('2024-10-1 00:00:00', '2024-10-1 12:00:00')) # chart_apt(data) if not hasattr(__builtins__,"__IPYTHON__"): table_apt = data_lamina.graphs_adapter('TTE0102DX2406180988', '2024-11-23 00:00:00', '2024-11-26 00:00:00') while True: plt.waitforbuttonpress()