积累修改;

This commit is contained in:
何 泽隆
2025-01-04 21:20:27 +08:00
parent 00ddd6d68c
commit 5022bf802e
3 changed files with 251 additions and 45 deletions

View File

@@ -9,7 +9,7 @@ import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from sqlalchemy import create_engine
from sqlalchemy import MetaData, Table, Column, String, Float, Integer
from sqlalchemy import MetaData, Table, Column, String, Float, Integer, DateTime
API_URL = "https://energy-iot.chinatowercom.cn/api/device/device/historyPerformance"
@@ -63,6 +63,8 @@ SemaMap_meter = {
}
API_Map = {
'refreshToken': ['https://energy-iot.chinatowercom.cn/api/auth/refreshToken', None],
'search_stn': ['https://energy-iot.chinatowercom.cn/api/device/station/list', None],
'search_dev': ['https://energy-iot.chinatowercom.cn/api/device/device/page', None],
'dev_info': ['https://energy-iot.chinatowercom.cn/api/device/device/devInfo', None],
'perf_real': ['https://energy-iot.chinatowercom.cn/api/device/device/perfReal', None],
@@ -84,7 +86,7 @@ class Lamina_Data(object):
'history', metadata,
Column('dev', String(50)),
Column('mid', String(50)),
Column('time', Integer),
Column('time', DateTime),
Column('value', Float)
)
metadata.create_all(self.engine)
@@ -94,8 +96,8 @@ class Lamina_Data(object):
'log', metadata,
Column('dev', String(50)),
Column('mid', String(50)),
Column('Timestamp_start', Integer),
Column('Timestamp_end', Integer),
Column('Timestamp_start', DateTime),
Column('Timestamp_end', DateTime),
)
metadata.create_all(self.engine)
@@ -128,8 +130,8 @@ class Lamina_Data(object):
self: Lamina_Data = args[0]
result = func(*args, **kwds)
if isinstance(result, pd.DataFrame):
if result.shape[0] != 0:
self.data['history'] = pd.concat([self.data['history'], result], ignore_index=True)
self.save_history_data()
return result
return wrapper
@@ -164,12 +166,13 @@ class Lamina_Data(object):
""" 数据读取成功 """
print(f"Get data success, mid={data_type[0]}, len={len(json_data['data'])}")
table_data = pd.DataFrame(json_data['data'], columns=['collectTime', 'mid', 'midName', 'value'])
table_data['time'] = table_data['collectTime'].apply(lambda x: int(time.mktime(time.strptime(x, r"%Y-%m-%d %H:%M:%S"))))
table_data['dev'] = device_id
table_data['time'] = pd.to_datetime(table_data.collectTime)
table_data['value'] = pd.to_numeric(table_data.value)
return table_data[['dev', 'mid', 'time', 'value']]
else:
print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}")
return pd.DataFrame([], columns=['dev', 'mid', 'time', 'value'])
raise ValueError(f"{json_data['message']}")
def get_real_data_by_net(self, device_id, fsu_id=None, header=None):
""" 读取设备当前数据, 返回接口json数据 """
@@ -200,7 +203,7 @@ class Lamina_Data(object):
return table_data
else:
print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}")
return pd.DataFrame([])
raise ValueError(f"{json_data['message']}")
def get_devinfo_data_by_net(self, device_id, data_type, time_start:int, time_end:int, header=None):
""" 读取设备信息, 返回接口json数据 """
@@ -221,17 +224,18 @@ class Lamina_Data(object):
return table_data
else:
print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}")
return pd.DataFrame([], columns=['dev', 'mid', 'time', 'value'])
raise ValueError(f"{json_data['message']}")
def spider_adapter(self, device_id:str, time_start:int, time_end:int):
""" 爬取适配器数据 """
result = {}
columns_adapter = list(filter(lambda x: SemaMap_adapter[x][2], SemaMap_adapter.keys()))
data_adapter = pd.DataFrame([], columns=['time', *columns_adapter])
data_adapter = pd.DataFrame([], columns=['time', 'device', *columns_adapter])
for k in columns_adapter:
result[k] = self.get_history_data_by_net(device_id, SemaMap_adapter[k], time_start, time_end)
if data_adapter.empty:
data_adapter.time = result[k].time
data_adapter.device = device_id
data_adapter[k] = result[k].value.apply(float)
return data_adapter
@@ -239,17 +243,17 @@ class Lamina_Data(object):
""" 爬取电表数据 """
result = {}
columns_meter = list(map(lambda x: x[4:], filter(lambda x: SemaMap_meter[x][2], SemaMap_meter.keys())))
data_meter = pd.DataFrame([], columns=['time', *columns_meter])
data_meter = pd.DataFrame([], columns=['time', 'device', *columns_meter])
for k, v in SemaMap_meter.items():
if v[2]:
result[k] = self.get_history_data_by_net(device_id, v, time_start, time_end)
if data_meter.empty:
data_meter.time = result[k].time
data_meter.device = device_id
data_meter[k[4:]] = result[k].value.apply(float)
return data_meter
def spider_station(self, device_id:str, time_start:int, time_end:int, header=None):
""" 爬取站点数据 """
def spider_search_devices(self, device_id:str, header=None):
if header is None:
header = self.api_origin['header']
@@ -268,22 +272,80 @@ class Lamina_Data(object):
if json_data['code'] != 200:
""" 数据读取失败 """
print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}")
return pd.DataFrame([])
return ""
elif search_dev := json_data['rows']:
print(f"Search device success, len={len(search_dev)}")
station_id = search_dev[0]['stationCode']
return search_dev[0]['stationCode']
else:
print(f"Search device fail.")
return pd.DataFrame([])
return ""
def spider_search_station(self, name:str, header=None):
if header is None:
header = self.api_origin['header']
body = {
"pageNum": 1,
"pageSize": 10,
"provinceId": "",
"cityId": "",
"countId": "",
"name": name,
"code": "",
"rsSource": "",
"businessType": "7",
"status": "",
"onlineStatus": "",
"maintenancePerson": "",
"deptIds": []
}
req = requests.post(API_Map['search_stn'][0], json=body, headers=header)
json_data = req.json()
if json_data['code'] != 200:
""" 数据读取失败 """
print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}")
return ""
elif search_stn := json_data['rows']:
print(f"Search station success, len={len(search_stn)}")
return search_stn[0]['code']
else:
print(f"Search station fail.")
return ""
def spider_station(self, search:str, time_start:int, time_end:int, header=None):
""" 爬取站点数据 """
if header is None:
header = self.api_origin['header']
if search[:3] == "TTE":
""" 设备编号 """
station_id = self.spider_search_devices(search, header=header)
else:
""" 站点名称 """
station_id = self.spider_search_station(search, header=header)
if station_id == "":
print(f"Search station fail.")
return {'result': False}
body = {
"businessType": "7",
"stationCode": station_id,
}
time.sleep(0.5)
print(f"Get Data for Station: {station_id}")
req = requests.post(API_Map['page'][0], json=body, headers=header)
json_data = req.json()
if json_data['code'] != 200:
""" 数据读取失败 """
print(f"Get data fail, code={json_data['code']}, msg=\n\t{json_data['message']}")
return ""
dev_meter = []
dev_adapter = []
try:
for dev in sorted(json_data['rows'], key=lambda x: x['devCode']):
print(f"Dev: {dev['devTypeName']}, id={dev['devCode']}")
time.sleep(0.5)
@@ -294,10 +356,21 @@ class Lamina_Data(object):
case "0101":
fsu_id = dev['devCode']
case "0102":
self.spider_adapter(dev['devCode'], time_start, time_end)
dev_adapter.append(self.spider_adapter(dev['devCode'], time_start, time_end))
case "0103":
self.spider_meter(dev['devCode'], time_start, time_end)
dev_meter.append(self.spider_meter(dev['devCode'], time_start, time_end))
self.save_history_data()
except Exception as e:
print(f"Get data fail, msg=\n\t{e}")
return {'result': False, 'token': e.args[0]}
result = {
'result': True,
'station': station_id,
'adapter': pd.concat(dev_adapter, ignore_index=True),
'meter': pd.concat(dev_meter, ignore_index=True),
}
print(f"Station Done.")
return result
def spider(self, device_id:str, time_start:int, time_end:int):
""" 通用爬虫 """
@@ -368,6 +441,102 @@ def sim_data_apt(times:tuple[int, int]):
return pd.DataFrame(data)
def save_station_by_file1(data_lamina: Lamina_Data):
""" 依据文件爬取所需站点数据 """
time_start = datetime.datetime(2024, 12, 24, 0, 0, 0)
time_end = datetime.datetime(2024, 12, 26, 0, 0, 0)
time_start_timestamp = time.mktime(time_start.timetuple())
time_end_timestamp = time.mktime(time_end.timetuple())
stations = pd.read_excel(Path(r'C:\Users\wrqal\Documents\Obsidian Vault\附件\25号0发电适配器.xlsx'))
output_file = Path(r'result/output.xlsx')
if output_file.exists():
finished_station = pd.read_excel(output_file, sheet_name=None)
finished_station["Station"]['station'] = finished_station["Station"]['station'].astype('str')
finished_station["Adatper"]['station'] = finished_station["Adatper"]['station'].astype('str')
finished_station["Meter"]['station'] = finished_station["Meter"]['station'].astype('str')
merged_df = pd.merge(stations['点位名称'], finished_station['Station']['点位名称'], how='left', indicator=True)
remain_station = merged_df[merged_df['_merge'] == 'left_only'].drop(columns='_merge')
else:
remain_station = stations['点位名称']
dataset = []
for name in remain_station['点位名称']:
print(f"Station: {name}")
data = data_lamina.spider_station(name, time_start_timestamp, time_end_timestamp)
if data['result']:
dataset.append(data)
print(f"Done.")
# 使用 ExcelWriter 将多个 DataFrame 保存到不同的工作表中
df_station = pd.DataFrame([], columns=['station', '点位名称'])
df_station.station = [data['station'] for data in dataset]
df_station.点位名称 = remain_station['点位名称'][:len(dataset)].values
df_adapter = pd.concat([data['adapter'].assign(station=data['station']) for data in dataset], ignore_index=True)
df_meter = pd.concat([data['meter'].assign(station=data['station']) for data in dataset], ignore_index=True)
column_adapter = ['time', 'station', *df_adapter.columns[1:-1]]
column_meter = ['time', 'station', *df_meter.columns[1:-1]]
if output_file.exists():
""" 连接文件 """
df_station = pd.concat([finished_station['Station'], df_station], ignore_index=True)
df_adapter = pd.concat([finished_station['Adatper'], df_adapter], ignore_index=True)
df_meter = pd.concat([finished_station['Meter'], df_meter], ignore_index=True)
with pd.ExcelWriter(output_file) as writer:
df_station.to_excel(writer, sheet_name='Station', index=False)
df_adapter.to_excel(writer, sheet_name='Adatper', index=False, columns=column_adapter)
df_meter.to_excel(writer, sheet_name='Meter', index=False, columns=column_meter)
print(f"数据已成功保存到 {output_file}")
def save_station_by_file2(data_lamina: Lamina_Data, file_path):
""" 依据文件爬取所需站点数据 """
file_input = Path(file_path)
file_output = file_input.parent / (file_input.stem + '_output.xlsx')
df_input = pd.read_excel(file_input)
time_start_timestamp = df_input['开始时间'][0].tz_localize('Asia/Shanghai').timestamp()
time_end_timestamp = df_input['结束时间'][0].tz_localize('Asia/Shanghai').timestamp()
if file_output.exists():
finished_station = pd.read_excel(file_output, sheet_name=None)
finished_station["Station"]['station'] = finished_station["Station"]['station'].astype('str')
finished_station["Adatper"]['station'] = finished_station["Adatper"]['station'].astype('str')
finished_station["Meter"]['station'] = finished_station["Meter"]['station'].astype('str')
merged_df = pd.merge(df_input['点位名称'], finished_station['Station']['点位名称'], how='left', indicator=True)
remain_station = merged_df[merged_df['_merge'] == 'left_only'].drop(columns='_merge')
else:
remain_station = df_input
dataset = []
for name in remain_station['点位名称']:
print(f"Station: {name}")
data = data_lamina.spider_station(name, time_start_timestamp, time_end_timestamp)
if data['result']:
dataset.append(data)
elif data['token']:
""" Token 失效 """
data_lamina.api_origin['header']['authorization'] = data['token']
print(f"Done.")
# 使用 ExcelWriter 将多个 DataFrame 保存到不同的工作表中
df_station = pd.DataFrame([], columns=['station', '点位名称'])
df_station.station = [data['station'] for data in dataset]
df_station.点位名称 = remain_station['点位名称'][:len(dataset)].values
df_adapter = pd.concat([data['adapter'].assign(station=data['station']) for data in dataset], ignore_index=True)
df_meter = pd.concat([data['meter'].assign(station=data['station']) for data in dataset], ignore_index=True)
column_adapter = ['time', 'station', *df_adapter.columns[1:-1]]
column_meter = ['time', 'station', *df_meter.columns[1:-1]]
if file_output.exists():
""" 连接文件 """
df_station = pd.concat([finished_station['Station'], df_station], ignore_index=True)
df_adapter = pd.concat([finished_station['Adatper'], df_adapter], ignore_index=True)
df_meter = pd.concat([finished_station['Meter'], df_meter], ignore_index=True)
with pd.ExcelWriter(file_output) as writer:
df_station.to_excel(writer, sheet_name='Station', index=False)
df_adapter.to_excel(writer, sheet_name='Adatper', index=False, columns=column_adapter)
df_meter.to_excel(writer, sheet_name='Meter', index=False, columns=column_meter)
print(f"数据已成功保存到 {file_output}")
if __name__=='__main__':
""" 主体调用流程 """
if hasattr(__builtins__,"__IPYTHON__"):
@@ -375,19 +544,38 @@ if __name__=='__main__':
else:
path_db = 'result/chinatowercom.db'
API_HEADER['Cookie'] = "HWWAFSESTIME=1735108780906; HWWAFSESID=1c91597e07b0014c4d; dc04ed2361044be8a9355f6efb378cf2=WyIzNTI0NjE3OTgzIl0"
API_HEADER['authorization'] = 'Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiZXhwIjoxNzM1MjI2NDAyLCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiJmNzA1ZTlkZC1mNTA5LTQwYzUtODFhNi0zYzdlMzhhZjE0ODgiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.e8p-hKWoFyLJbtqKEvlIbLpz-_OB7Ak32d8qdTHNZEny12lUrUE0YYrWQTu0gGtT-eRNDJ62q51IUYOkM_5Ou0Qk2HLouR9-iygtgtjIno72466bv_ao5wvD2PZihXKaKet_c9mnpOqDpvaaApAU4_rk0u6Pg7uJG4stV-akaaMMqRLR-cK5ARePeyHophyGUx80kkSlnhYfGP2rJjEFva36iPaCzM6oiezObMoXWtAPw67vPS-5saTWnjYLrzxr3_s5Idk1pwWPNWfSa6Rl_YMKKiTdtWAEepyrxxOWVfMaeAQYt-ndHhxyBPjRluDTSwUViWmDidoFxkKPMQixVw'
if not (file_db:= Path(path_db)).exists():
file_db.touch()
API_HEADER['Cookie'] = "HWWAFSESID=ac80388ba903b6ae67; HWWAFSESTIME=1735517672864; dc04ed2361044be8a9355f6efb378cf2=WyIzNTI0NjE3OTgzIl0"
API_HEADER['authorization'] = 'Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiYXRpIjoiMzkzMTUzZGYtZDBkYS00YmM1LWJiZmMtZjdjMTQxYTEzOTU0IiwiZXhwIjoxNzM1OTcyNTA0LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiIwMTY1NDg1MC1mZjIwLTRkMzQtYTQ4ZC03NmRiZTk3MmQ3YWQiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.diNffgdMwBp7_IsjTAwN-YYp8_zpRp5pGn7HJAar51R3jpJyeROFV1g8T6jC_d-bfqS59N649llL3kJPguFfTrwJSu7NS4oeD2YNPwuhpQIeMbszp48NZ3zKyct6COpMPTWRVMVygcfsKfkYnDJQSMyanfQCtLinfDhNq3ektLtPFxLsf54noBu8R_MJ-Pkdjiip3koA8Jhl2eL2Af4U-nd9UAjNaMT5HI0scC9ViEJshzgyxvOi4Pabzw-JI5ZRYzWd1rNZo7b1_s7OAYcqj2hteW1JdC7_6hTHZxpEjzRBvhuvpwHDtsBvAgGUI6hPoYkOpGNPG2QfW7Hyc7TWtA'
data_lamina = Lamina_Data('sqlite:///' + path_db)
today = datetime.datetime.today()
yesterday = today - datetime.timedelta(days=1)
today_midnight = today.replace(hour=0, minute=0, second=0, microsecond=0)
yesterday_midnight = yesterday.replace(hour=0, minute=0, second=0, microsecond=0)
today_midnight_timestamp = time.mktime(today_midnight.timetuple())
yesterday_midnight_timestamp = time.mktime(yesterday_midnight.timetuple())
# 依据站点内设备爬取整个站点的实时与历史数据
data = data_lamina.spider_station('TTE0102DX2406272727', yesterday_midnight_timestamp, today_midnight_timestamp)
# today = datetime.datetime.today()
# yesterday = today - datetime.timedelta(days=1)
# today_midnight = today.replace(hour=0, minute=0, second=0, microsecond=0)
# yesterday_midnight = yesterday.replace(hour=0, minute=0, second=0, microsecond=0)
# today_midnight_timestamp = time.mktime(today_midnight.timetuple())
# yesterday_midnight_timestamp = time.mktime(yesterday_midnight.timetuple())
# data = data_lamina.spider_station('TTE0102DX2410091439', yesterday_midnight_timestamp, today_midnight_timestamp)
# 读取站点历史数据
save_station_by_file1(data_lamina)
# save_station_by_file2(data_lamina, "result\station_Q0103.xlsx")
# 网站令牌更新
body = {
"appId": "pjxNHUmFrMuj82pJenTmEc3Uvzj1cAO/qXs3zKMTjsG7Quk59cyjBCQM4miupyXv1At4e3deTn1cF9c4/WveDaeJCwEB+Dslom9yufrVPziOmRrQj1iAo8QVWSUnT1k70soDst+JN6japzOt7vjibru0uS/xezHrhuLSyNxkqzs=",
"refreshToken": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiYXRpIjoiNGU4NTY1NTAtZDE5Ni00YjY4LWI1OGYtMzBkOTY0YjIyOGNkIiwiZXhwIjoxNzM1OTcyNTA0LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiIwMTY1NDg1MC1mZjIwLTRkMzQtYTQ4ZC03NmRiZTk3MmQ3YWQiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.PFT8JlTvWay1GUI5TC2Ht25rZWkAnQT3nxs-dOcAVIN9To06rG8EDspZ5eFxmNuEraurNxHCOLPfQZ-bCzJ8ywlA747PyJxyMPBhRhgXSDHYHX7ZqHEUdQdQo_Wkf75I8ko8_szchyhItjtgDUCzud9TlxKeuBQuerpYV8tkUVWobp4ulnnHEg0kqZFDeXrI-84Lyy-kodCDI-r3KuMBC5Rvbce0hqMcs2l-2U7M-V7LUT2VhBEvQd8l_Agx8hqWcK-d-dMVhlNjcvcb0AKmcX845D0bD5tKVKim_5JX4Er9-NANzSmgO0SRnsFVuxHhXiNqSkTB7pIdyi9r-ui23Q",
"accessToken": API_HEADER['authorization']
}
body1 = {
"appId": "ePUxoRrHClb7+Wxk7NAJpiJhoVAcJbZ5NPJEak8ZTFrETrfA0JAIjbqiDuaow1Jdyg1FLjUAwlBXrLoKh514oTTZSp1U91ewVj+8ZvNi2vtbQkU03WdyxyHXiyTNjC88O1JRm13hRnIm1vRMoxsudm8CPCpUIsU9yYABZ+/w3A4=",
"refreshToken": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiYXRpIjoiNGU4NTY1NTAtZDE5Ni00YjY4LWI1OGYtMzBkOTY0YjIyOGNkIiwiZXhwIjoxNzM1OTcyNTA0LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiIwMTY1NDg1MC1mZjIwLTRkMzQtYTQ4ZC03NmRiZTk3MmQ3YWQiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.PFT8JlTvWay1GUI5TC2Ht25rZWkAnQT3nxs-dOcAVIN9To06rG8EDspZ5eFxmNuEraurNxHCOLPfQZ-bCzJ8ywlA747PyJxyMPBhRhgXSDHYHX7ZqHEUdQdQo_Wkf75I8ko8_szchyhItjtgDUCzud9TlxKeuBQuerpYV8tkUVWobp4ulnnHEg0kqZFDeXrI-84Lyy-kodCDI-r3KuMBC5Rvbce0hqMcs2l-2U7M-V7LUT2VhBEvQd8l_Agx8hqWcK-d-dMVhlNjcvcb0AKmcX845D0bD5tKVKim_5JX4Er9-NANzSmgO0SRnsFVuxHhXiNqSkTB7pIdyi9r-ui23Q",
"accessToken": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiIl0sInVzZXJfbmFtZSI6IndlYl9tYW5hZ2V8d2FuZ2xlaTQiLCJzY29wZSI6WyJhbGwiXSwiZXhwIjoxNzM1OTA5NTE1LCJ1c2VySWQiOjI0Mjg1LCJqdGkiOiI0ZTg1NjU1MC1kMTk2LTRiNjgtYjU4Zi0zMGQ5NjRiMjI4Y2QiLCJjbGllbnRfaWQiOiJ3ZWJfbWFuYWdlIn0.KxGBpvuPIP3CHfVT41wxE_v9vlHNC9GL6sfaIta8cI2qlMpTCVg9dg-4DgPlXuMrtI0YzrSbAywCQmFLGcBgh3HD_UuIAH-k3Y8__osZEgc4bUcJ58W-uukuEu3MEwbV6ZcxTq7dxf3iqu9aXGrawYY_iL-jIRH1v8Zcr4qUPA9Mlzl8LvZdzZ05XgntbxE8IQRmt1M5rWdWLV4tvbUEYR5eDGs3az0w-MFXQ8qNHo8KLJc68WvbilmOMWkhK2k_xQQTdNx_jPktjYfClZa6l9-6rYAb5MMqwt77fY0_JE87u3w5YbU_GRyBI2mjnJe1qKdMjUEpQwWqt3DLJWLe7Q"
}
req = requests.post(API_Map['refreshToken'][0], json=body1, headers=API_HEADER)
# data = sim_data_apt(('2024-10-1 00:00:00', '2024-10-1 12:00:00'))
# chart_apt(data)

View File

@@ -231,8 +231,20 @@ if __name__ == '__main__':
"dev16": {'device_id': 'TTE0101DX2409210027', # 2017-YD定州北只东
'frame_print': True,
'time_out': 6, 'retry': 1},
"dev17": {'device_id': 'TTE0101DX2406280009', # 壮志村委会-光伏
'frame_print': True,
'time_out': 6, 'retry': 1},
"dev18": {'device_id': 'TTE0101DX2406260013', # 开发区竹行机房-光伏
'frame_print': True,
'time_out': 6, 'retry': 1},
"dev19": {'device_id': 'TTE0101DX2409210093', # 2016-YD:定州北木庄
'frame_print': True,
'time_out': 6, 'retry': 1},
"dev20": {'device_id': 'TTE0101DX2409270062', # 内丘中张村北
'frame_print': True,
'time_out': 6, 'retry': 1},
}
dev_lamina = LaminaStation(**mode_config["dev16"])
dev_lamina = LaminaStation(**mode_config["dev19"])
dev_lamina.frame_read(0x0000, 0x20)
time.sleep(2)

View File

@@ -429,14 +429,20 @@ def display_data(address: int, data: bytes, modbus_map: dict=modbus_map) -> dict
def print_display(output_data: dict):
""" 格式化表示输出数据 """
bank_chars = ' \t\000'
print("Parse Result:")
label_len_max = max(map(lambda x: len(x[0]), output_data.values()))
data_len_max = max(map(lambda x: len(str(x[1])), output_data.values()))
for key, value in output_data.items():
label = value[0]
data = "-".join(map(str, value[1])) if type(value) == list else value[1]
match value:
case (str() as label, list() as data):
print(f"{tools.ByteConv.display_hex(key, 4)}: {'-'.join(map(str, data)):<{data_len_max}} {label:<{label_len_max}}")
case (str() as label, str() as data):
print(f"{tools.ByteConv.display_hex(key, 4)}: {data.rstrip(bank_chars):<{data_len_max}} {label:<{label_len_max}}")
case (str() as label, data):
print(f"{tools.ByteConv.display_hex(key, 4)}: {data:<{data_len_max}} {label:<{label_len_max}}")
case _:
raise ValueError(f"Key: {key}, Unkown Value: {value}")