删除批处理开机自启,修改为windows后台服务

This commit is contained in:
xiongyi
2025-11-19 16:25:48 +08:00
parent d755264f8e
commit 3da220586c
6 changed files with 190 additions and 52 deletions

23
main.py
View File

@ -11,21 +11,20 @@ from tcp.server import TCPServer
from opc.client import OPCClient
from config.settings import (
ACCESS_DB_PATH, ACCESS_DB_PASSWORD,
TCP_HOST, TCP_PORT, TCP_CLIENT_HOST, TCP_CLIENT_PORT,
TCP_HOST, TCP_PORT, TCP_CLIENT_HOST, TCP_CLIENT_PORT,
CHECK_INTERVAL, MAX_AGE_HOURS
)
from utils.helpers import cleanup_old_timestamps
# 假设同事提供的函数
def save_to_custom_table(misid, flag, task_id, produce_mix_id, project_name, beton_grade, adjusted_volume, artifact_id):
"""保存到自定义数据表的函数"""
print(f"保存到自定义数据表: MISID={misid}, Flag={flag}, TaskID={task_id}, 调整后方量={adjusted_volume}")
def start_api_service():
"""启动配比重量API服务"""
api = MixWeightAPI()
api.run(host='127.0.0.1', port=5001, debug=False, threaded=True)
def main():
global tcp_server, data_client
api_thread = threading.Thread(target=start_api_service)
@ -82,13 +81,13 @@ def main():
# 获取所有未浇筑信息
tasks, artifact_list, send_list, half_volume = task_service.process_not_pour_info()
# 如果API调用失败等待一段时间再重试
if tasks is None or artifact_list is None:
print("获取未浇筑信息失败,稍后重试...")
time.sleep(10)
continue
current_artifact_ids = {task["artifact_id"] for task in tasks}
# 检查artifact_list是否发生变化
@ -103,7 +102,7 @@ def main():
for task in tasks:
if task["artifact_id"] in new_artifact_ids:
task_info = api_client.get_task_info(task["beton_task_id"])
# 如果获取任务信息失败,跳过该任务
if task_info is None:
print(f"无法获取任务信息,跳过任务: {task['artifact_id']}")
@ -124,15 +123,15 @@ def main():
# 检查 block_number 是否为 "补方"
if task["block_number"] == "补方":
print(f"任务 {task['artifact_id']} 的 block_number 为 '补方',跳过派单")
task_service.insert_into_produce_table(sql_db, task_info, task["beton_volume"], erp_id,
task["artifact_id"], half_volume, 0)
task_service.insert_into_produce_table(sql_db, task_info, task["beton_volume"],
erp_id,task["artifact_id"], half_volume, 0)
continue
print(f"处理新任务: {task['artifact_id']}")
# 获取任务单信息
task_service.insert_into_produce_table(sql_db, task_info, task["beton_volume"], erp_id,
task["artifact_id"], half_volume, 1)
task_service.insert_into_produce_table(sql_db, task_info, task["beton_volume"],
erp_id,task["artifact_id"], half_volume, 1)
with monitoring_service.tasks_lock:
monitoring_service.monitored_tasks.add(erp_id)
monitoring_service.inserted_tasks[erp_id] = task["artifact_id"]