将从缓存文件中读取改成从内存数组中进行读取

This commit is contained in:
您çšpengqi
2025-10-10 09:49:30 +08:00
parent 5233d6ceaa
commit d3964811e9
5 changed files with 147 additions and 149 deletions

View File

@ -10,12 +10,9 @@
import threading
import time
import logging
from datetime import datetime
from snap7.util import get_real, get_int, get_bool, get_word, get_dint # 导入snap7解析工具
class PLCDataReaderThread(threading.Thread):
def __init__(self, plc_client, plc_name, area_config, update_interval=0.03, output_file_prefix="area_"):
def __init__(self, plc_client, plc_name, area_config, update_interval=0.03, max_array_size=10000):
"""
初始化PLC数据读取线程配置驱动支持多区域
参数:
@ -23,13 +20,13 @@ class PLCDataReaderThread(threading.Thread):
area_config: 单个区域的配置来自config.json的plcs[].areas
示例:{"name":"DB100_Read", "type":"read", "db_number":100, "offset":0, "size":6000, "structure":[...]}
update_interval: 读取间隔默认30ms
output_file_prefix: 输出文件前缀,最终文件名为“前缀+区域名.log”
max_array_size: 自定义数组的最大存储条数超过后自动删除最早数据默认1000条
"""
# 线程名包含区域名,便于日志区分(如"PLCDataReader_DB100_Read"
thread_name = f"{plc_name}_Reader_{area_config['name']}"
thread_name = f"{plc_name}_{area_config['name']}"
super().__init__(name=thread_name, daemon=True)
# 1. 核心依赖PLC客户端+区域配置)
# 核心依赖PLC客户端+区域配置)
self.plc_client = plc_client
self.plc_name = plc_name
self.area_config = area_config # 动态区域配置不再硬编码DB100
@ -39,16 +36,21 @@ class PLCDataReaderThread(threading.Thread):
self.size = area_config["size"]
self.area_type = area_config["type"] # 区分read/read_write/write
# 2. 线程与输出配置
self.update_interval = update_interval
self.output_file = f"{self.plc_name}_{output_file_prefix}{self.area_name}.log" # 输出文件名}{output_file_prefix}DB{self.db_number}.log" # 每个区域独立文件
# 自定义内存数据配置
self.custom_array_name = f"{self.plc_name}_{self.area_name}"
self.max_array_size = max_array_size
# 动态创建自定义名称的内存数组
setattr(self, self.custom_array_name, [])
# 3. 数据缓存(新增结构化数据存储)
# 线程与输出配置
self.update_interval = update_interval
# 数据缓存(新增结构化数据存储)
self.running = False
self._latest_data = None # 格式:(timestamp, data_info, raw_bytes, parsed_data)
self._latest_byte_data = None # 格式:(timestamp, data_info, raw_bytes, parsed_data)
self._data_lock = threading.Lock() # 线程安全锁
# 4. 日志
# 日志
self.logger = logging.getLogger(f"PLCDataReader.{self.area_name}")
def start(self):
@ -58,10 +60,16 @@ class PLCDataReaderThread(threading.Thread):
self.logger.warning(f"跳过启动:区域类型为{self.area_type}(无需循环读取)")
return
# 验证自定义数组是否创建成功
if not hasattr(self, self.custom_array_name):
self.logger.error(f"❌ 自定义数组创建失败!数组名:{self.custom_array_name}")
return
self.running = True
super().start()
self.logger.info(f"✅ 线程启动成功DB{self.db_number}{self.area_type}")
self.logger.info(f"🔧 配置:间隔{self.update_interval * 1000}ms读取{self.size}字节,输出{self.output_file}")
self.logger.info(f"🔧 内存配置:自定义数组名={self.custom_array_name},最大存储条数={self.max_array_size}")
self.logger.info(f"🔧 配置:间隔{self.update_interval * 1000}ms读取{self.size}字节,存储到{self.custom_array_name}数组中")
def stop(self):
"""停止线程(优雅清理)"""
@ -70,9 +78,12 @@ class PLCDataReaderThread(threading.Thread):
self.join(timeout=2.0)
if self.is_alive():
self.logger.warning("⚠️ 线程未正常退出,强制终止")
# 停止时输出数组统计信息
custom_array = getattr(self, self.custom_array_name, [])
self.logger.info(f"🛑 线程已停止DB{self.db_number}")
self.logger.info(f"📊 内存数组统计:数组名={self.custom_array_name},总存储条数={len(custom_array)}")
def get_latest_data(self):
def get_latest_byte_data(self):
"""
线程安全获取最新数据(返回原始字节+解析后的结构化数据)
返回示例:
@ -84,20 +95,34 @@ class PLCDataReaderThread(threading.Thread):
}
"""
with self._data_lock:
if self._latest_data is None:
self.logger.debug("⚠️ 无最新数据缓存")
if self._latest_byte_data is None:
self.logger.debug(f"⚠️ 自定义数组{self.custom_array_name}无最新数据")
return None
# 返回字节数据的副本,避免外部修改
return self._latest_byte_data.copy()
timestamp, data_info, raw_bytes, parsed_data = self._latest_data
return {
"timestamp": timestamp,
"data_info": data_info.copy(),
"raw_bytes": raw_bytes.copy()
}
def get_custom_array(self, max_records=None):
"""
线程安全获取自定义数组数据
参数:
max_records: 可选,指定返回的最大记录数
返回:
字节数据列表
"""
with self._data_lock:
custom_array = getattr(self, self.custom_array_name, [])
return custom_array[-1:] if custom_array else [] # 返回所有记录的副本
def clear_custom_array(self):
"""清空自定义数组"""
with self._data_lock:
custom_array = getattr(self, self.custom_array_name, [])
custom_array.clear()
self.logger.info(f"🧹 已清空自定义数组:{self.custom_array_name}")
def run(self):
"""线程主循环读PLC→解析数据→更新缓存→写文件"""
self.logger.debug(f"📌 主循环启动DB{self.db_number}")
self.logger.debug(f"📌 主循环启动DB{self.db_number},数组名:{self.custom_array_name}")
while self.running:
cycle_start = time.time()
try:
@ -111,24 +136,18 @@ class PLCDataReaderThread(threading.Thread):
# 步骤2处理读取结果缓存+解析+写文件)
if cache_success and self.plc_client.data_cache is not None:
raw_data = self.plc_client.data_cache # 原始字节
data_len = len(raw_data)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
# 构造数据基本信息
data_info = {
"area_name": self.area_name,
"db_number": self.db_number,
"offset_range": f"0-{self.size - 1}",
"actual_length": data_len,
"area_type": self.area_type
}
# 步骤3线程安全更新内存缓存
with self._data_lock:
self._latest_data = (timestamp, data_info, raw_data.copy())
self._latest_byte_data = raw_data
# 步骤4写入文件含原始字节+解析后数据
self._write_latest_data_to_file(timestamp, data_info, raw_data)
# 获取自定义数组并添加新的字节数据
custom_array = getattr(self, self.custom_array_name)
custom_array.clear() # 清空旧数据
custom_array.append(raw_data)
# 日志显示当前数组状态仅1条
self.logger.debug(f"✅ 已更新最新PLC记录数组{self.custom_array_name}当前记录数1")
else:
self.logger.warning(f"⚠️ 数据读取失败DB{self.db_number}),跳过本次更新")
@ -142,24 +161,3 @@ class PLCDataReaderThread(threading.Thread):
except Exception as e:
self.logger.error(f"🔴 循环读取出错: {str(e)}", exc_info=True)
time.sleep(self.update_interval)
def _write_latest_data_to_file(self, timestamp, data_info, raw_data):
"""
写入文件:含原始字节+解析后的结构化数据(每个区域独立文件)
"""
try:
# 处理原始字节为列表(便于查看)
data_list = list(raw_data) # 只显示前50字节避免文件过大
data_str = f"{data_list} (共{len(raw_data)}字节)"
# 覆盖写入文件
with open(self.output_file, "w", encoding="utf-8") as f:
f.write(f"[{timestamp}] 📝 {self.area_name} 最新数据\n")
f.write(
f" - 区域信息DB{data_info['db_number']}{data_info['offset_range']}),类型{data_info['area_type']}\n")
f.write(f" - 原始字节数据:{data_str}\n")
f.write("=" * 120 + "\n")
self.logger.debug(f"📤 最新DB{self.db_number}数据已覆盖写入文件:{self.output_file}")
except Exception as e:
self.logger.error(f"🔴 写入DB{self.db_number}数据到文件出错: {str(e)}", exc_info=True)