v0.2.0.5(2024/07/31)

此版本改动较大,公共部分做了规整,放置到新建文件夹 commons 当中,并所有自定义模块引入 logging 模块,记录重要信息
1. [t_change_ui: clibs.py]
   - 调整代码组织结构,新增模块,将公共函数以及类合并入此
   - 将一些常量放入该模块
   - 引入logging/concurrent_log_handler模块,并作初始化操作,供其他模块使用,按50M切割,最多保留10份
   - prj_to_xcore函数设置工程名部分重写,修复了多个prj工程可能不能执行的问题
2. [t_change_ui: openapi.py]
   - 完全重写了 get_from_id 函数,使更精准
   - 在 msg_storage 函数中,增加 logger,保留所有响应消息
   - 删除 heartbeat 函数中的日志保存功能部分
   - 心跳再次修改为 2s...
3. [t_change_ui: aio.py]
   - 增加了日志初始化部分
   - detect_network 函数中修改重新实例化HR间隔为 4s,对应心跳
4. [t_change_ui: do_brake.py]
   - 使用一直打开曲线的方法规避解决了 OOM 的问题,同时修改数据处理方式,只取最后 12s
5. [t_change_ui: do_current.py]
   - 保持电流,只取最后 15s
6. [t_change_ui: all the part]: 引入 commons 包,并定制了 logging 输出,后续持续优化
This commit is contained in:
2024-07-31 08:05:36 +08:00
parent af68f19d53
commit 04bd1238d2
20 changed files with 424 additions and 586 deletions

View File

@ -1 +0,0 @@
__all__ = ['factory_test']

View File

@ -1,7 +1,4 @@
from sys import argv
from os.path import exists, dirname
from os import scandir
from paramiko import SSHClient, AutoAddPolicy
from json import loads
from time import sleep, time, strftime, localtime
from pandas import DataFrame
@ -9,11 +6,12 @@ from openpyxl import load_workbook
from math import sqrt
from numpy import power
from csv import writer
from logging import getLogger
from commons import clibs
tab_name = 'Durable Action'
logger = getLogger(__file__)
tab_name = clibs.tab_names['da']
count = 0
durable_data_current_xlsx = f'{dirname(__file__)}/../../assets/templates/durable/durable_data_current.xlsx'
durable_data_current_max_xlsx = f'{dirname(__file__)}/../../assets/templates/durable/durable_data_current_max.xlsx'
display_pdo_params = [
# {"name": "hw_joint_vel_feedback", "channel": 0},
# {"name": "hw_joint_vel_feedback", "channel": 1},
@ -34,22 +32,6 @@ title = [
]
def traversal_files(path, w2t):
if not exists(path):
msg = f'数据文件夹{path}不存在,请确认后重试......'
w2t(msg, 0, 1, 'red', tab_name=tab_name)
else:
dirs = []
files = []
for item in scandir(path):
if item.is_dir():
dirs.append(item.path)
elif item.is_file():
files.append(item.path)
return dirs, files
def check_files(data_dirs, data_files, w2t):
if len(data_dirs) != 0 or len(data_files) != 2:
w2t('初始路径下不允许有文件夹,且初始路径下只能存在如下文件,确认后重新运行!\n1. target.zip\n2. configs.xlsx', 0, 10, 'red', tab_name)
@ -63,50 +45,10 @@ def check_files(data_dirs, data_files, w2t):
return data_files
def prj_to_xcore(prj_file):
ssh = SSHClient()
ssh.set_missing_host_key_policy(AutoAddPolicy())
ssh.connect('192.168.0.160', 22, username='luoshi', password='luoshi2019')
sftp = ssh.open_sftp()
sftp.put(prj_file, '/tmp/target.zip')
cmd = 'cd /tmp; rm -rf target/; mkdir target; unzip -d target/ -q target.zip; '
cmd += 'chmod 777 -R target/; rm target.zip'
ssh.exec_command(cmd)
cmd = 'sudo rm -rf /home/luoshi/bin/controller/projects/target; '
cmd += 'sudo mv /tmp/target/ /home/luoshi/bin/controller/projects/'
stdin, stdout, stderr = ssh.exec_command(cmd, get_pty=True)
stdin.write('luoshi2019' + '\n')
stdin.flush()
print(stdout.read().decode()) # 必须得输出一下stdout才能正确执行sudo
print(stderr.read().decode()) # 顺便也执行以下stderr
cmd = 'cd /home/luoshi/bin/controller/; '
cmd += 'sudo mv projects/target/_build/*.prj projects/target/_build/target.prj'
stdin, stdout, stderr = ssh.exec_command(cmd, get_pty=True)
stdin.write('luoshi2019' + '\n')
stdin.flush()
print(stdout.read().decode()) # 必须得输出一下stdout才能正确执行sudo
print(stderr.read().decode()) # 顺便也执行以下stderr
ssh.close()
def execution(cmd, hr, w2t, **kwargs):
_id = hr.execution(cmd, **kwargs)
_msg = hr.get_from_id(_id)
if not _msg:
w2t(f"无法获取{_id}请求的响应信息", 0, 7, 'red', tab_name=tab_name)
else:
_response = loads(_msg)
if not _response:
w2t(f"无法获取{id}请求的响应信息", 0, 1, 'red', tab_name=tab_name)
return _response
def run_rl(path, config_file, data_all, hr, md, w2t):
# 1. 关闭诊断曲线,触发软急停,并解除,目的是让可能正在运行着的机器停下来,切手动模式并下电
_response = execution('diagnosis.open', hr, w2t, open=True, display_open=True)
_response = execution('diagnosis.set_params', hr, w2t, display_pdo_params=display_pdo_params)
clibs.execution('diagnosis.open', hr, w2t, tab_name, open=True, display_open=True)
clibs.execution('diagnosis.set_params', hr, w2t, tab_name, display_pdo_params=display_pdo_params)
md.trigger_estop()
md.reset_estop()
md.write_act(False)
@ -114,13 +56,13 @@ def run_rl(path, config_file, data_all, hr, md, w2t):
# 2. reload工程后pp2main并且自动模式和上电
prj_path = 'target/_build/target.prj'
_response = execution('overview.reload', hr, w2t, prj_path=prj_path, tasks=['current'])
_response = execution('rl_task.pp_to_main', hr, w2t, tasks=['current'])
_response = execution('state.switch_auto', hr, w2t)
_response = execution('state.switch_motor_on', hr, w2t)
clibs.execution('overview.reload', hr, w2t, tab_name, prj_path=prj_path, tasks=['current'])
clibs.execution('rl_task.pp_to_main', hr, w2t, tab_name, tasks=['current'])
clibs.execution('state.switch_auto', hr, w2t, tab_name)
clibs.execution('state.switch_motor_on', hr, w2t, tab_name)
# 3. 开始运行程序
_response = execution('rl_task.run', hr, w2t, tasks=['current'])
clibs.execution('rl_task.run', hr, w2t, tab_name, tasks=['current'])
_t_start = time()
while True:
if md.read_ready_to_go() == 1:
@ -178,8 +120,8 @@ def get_durable_data(path, data, scenario_time, wait_time, rcs, hr, md, w2t):
_data_list.insert(0, loads(_msg))
else:
hr.c_msg_xs.clear()
if len(hr.c_msg) > 240:
del hr.c_msg[240:]
if len(hr.c_msg) > 270:
del hr.c_msg[270:]
# with open(f'{path}\\log.txt', 'w', encoding='utf-8') as f_obj:
# for _ in _data_list:
@ -253,8 +195,8 @@ def get_durable_data(path, data, scenario_time, wait_time, rcs, hr, md, w2t):
while True:
if not hr.durable_lock:
hr.durable_lock = 1
_df_1.to_excel(durable_data_current_xlsx, index=False)
_df_2.to_excel(durable_data_current_max_xlsx, index=False)
_df_1.to_excel(clibs.durable_data_current_xlsx, index=False)
_df_2.to_excel(clibs.durable_data_current_max_xlsx, index=False)
hr.durable_lock = 0
break
else:
@ -272,28 +214,10 @@ def get_durable_data(path, data, scenario_time, wait_time, rcs, hr, md, w2t):
def main(path, hr, md, w2t):
durable_data_current = {
'time': list(range(1, 19)),
'axis1': [0 for _ in range(18)],
'axis2': [0 for _ in range(18)],
'axis3': [0 for _ in range(18)],
'axis4': [0 for _ in range(18)],
'axis5': [0 for _ in range(18)],
'axis6': [0 for _ in range(18)],
}
durable_data_current_max = {
'time': list(range(1, 19)),
'axis1': [0 for _ in range(18)],
'axis2': [0 for _ in range(18)],
'axis3': [0 for _ in range(18)],
'axis4': [0 for _ in range(18)],
'axis5': [0 for _ in range(18)],
'axis6': [0 for _ in range(18)],
}
data_all = [durable_data_current, durable_data_current_max]
data_dirs, data_files = traversal_files(path, w2t)
data_all = [clibs.durable_data_current, clibs.durable_data_current_max]
data_dirs, data_files = clibs.traversal_files(path, w2t)
config_file, prj_file = check_files(data_dirs, data_files, w2t)
prj_to_xcore(prj_file)
clibs.prj_to_xcore(prj_file)
run_rl(path, config_file, data_all, hr, md, w2t)