1. [APIs: do_brake.py]: 精简程序,解决 OOM 问题 2. [APIs: do_current.py]: 精简程序,解决 OOM 问题 3. [APIs: factory_test.py]: 精简程序,解决 OOM 问题 4. [APIsL openapi.py] - 心跳修改为 1 s,因为 OOM 问题的解决依赖于长久的打开曲线开关,此时对于 hr.c_msg 的定时清理是个挑战,将心跳缩短,有利于清理日志后,避免丢失心跳 - 新增 diagnosis.save 命令,但是执行时,有问题,待解决
302 lines
12 KiB
Python
302 lines
12 KiB
Python
from sys import argv
|
||
from os.path import exists, dirname
|
||
from os import scandir
|
||
from paramiko import SSHClient, AutoAddPolicy
|
||
from json import loads
|
||
from time import sleep, time, strftime, localtime
|
||
from pandas import DataFrame
|
||
from openpyxl import load_workbook
|
||
from math import sqrt
|
||
from numpy import power
|
||
from csv import writer
|
||
|
||
tab_name = 'Durable Action'
|
||
count = 0
|
||
durable_data_current_xlsx = f'{dirname(__file__)}/../../assets/templates/durable/durable_data_current.xlsx'
|
||
durable_data_current_max_xlsx = f'{dirname(__file__)}/../../assets/templates/durable/durable_data_current_max.xlsx'
|
||
display_pdo_params = [
|
||
# {"name": "hw_joint_vel_feedback", "channel": 0},
|
||
# {"name": "hw_joint_vel_feedback", "channel": 1},
|
||
# {"name": "hw_joint_vel_feedback", "channel": 2},
|
||
# {"name": "hw_joint_vel_feedback", "channel": 3},
|
||
# {"name": "hw_joint_vel_feedback", "channel": 4},
|
||
# {"name": "hw_joint_vel_feedback", "channel": 5},
|
||
{"name": "device_servo_trq_feedback", "channel": 0},
|
||
{"name": "device_servo_trq_feedback", "channel": 1},
|
||
{"name": "device_servo_trq_feedback", "channel": 2},
|
||
{"name": "device_servo_trq_feedback", "channel": 3},
|
||
{"name": "device_servo_trq_feedback", "channel": 4},
|
||
{"name": "device_servo_trq_feedback", "channel": 5},
|
||
]
|
||
title = [
|
||
'time', 'trq-1', 'trq-2', 'trq-3', 'trq-4', 'trq-5', 'trq-6', 'trq-max-1', 'trq-max-2', 'trq-max-3', 'trq-max-4',
|
||
'trq-max-5', 'trq-max-6'
|
||
]
|
||
|
||
|
||
def traversal_files(path, w2t):
|
||
if not exists(path):
|
||
msg = f'数据文件夹{path}不存在,请确认后重试......'
|
||
w2t(msg, 0, 1, 'red', tab_name=tab_name)
|
||
else:
|
||
dirs = []
|
||
files = []
|
||
for item in scandir(path):
|
||
if item.is_dir():
|
||
dirs.append(item.path)
|
||
elif item.is_file():
|
||
files.append(item.path)
|
||
|
||
return dirs, files
|
||
|
||
|
||
def check_files(data_dirs, data_files, w2t):
|
||
if len(data_dirs) != 0 or len(data_files) != 2:
|
||
w2t('初始路径下不允许有文件夹,且初始路径下只能存在如下文件,确认后重新运行!\n1. target.zip\n2. configs.xlsx', 0, 10, 'red', tab_name)
|
||
|
||
_files = [data_files[0].split('\\')[-1], data_files[1].split('\\')[-1]]
|
||
_files.sort()
|
||
if _files != ['configs.xlsx', 'target.zip']:
|
||
w2t('初始路径下只能存在如下文件,确认后重新运行!\n1. target.zip\n2. configs.xlsx', 0, 10, 'red', tab_name)
|
||
|
||
data_files.sort()
|
||
return data_files
|
||
|
||
|
||
def prj_to_xcore(prj_file):
|
||
ssh = SSHClient()
|
||
ssh.set_missing_host_key_policy(AutoAddPolicy())
|
||
ssh.connect('192.168.0.160', 22, username='luoshi', password='luoshi2019')
|
||
sftp = ssh.open_sftp()
|
||
sftp.put(prj_file, '/tmp/target.zip')
|
||
cmd = 'cd /tmp; rm -rf target/; mkdir target; unzip -d target/ -q target.zip; '
|
||
cmd += 'chmod 777 -R target/; rm target.zip'
|
||
ssh.exec_command(cmd)
|
||
|
||
cmd = 'sudo rm -rf /home/luoshi/bin/controller/projects/target; '
|
||
cmd += 'sudo mv /tmp/target/ /home/luoshi/bin/controller/projects/'
|
||
stdin, stdout, stderr = ssh.exec_command(cmd, get_pty=True)
|
||
stdin.write('luoshi2019' + '\n')
|
||
stdin.flush()
|
||
print(stdout.read().decode()) # 必须得输出一下stdout,才能正确执行sudo
|
||
print(stderr.read().decode()) # 顺便也执行以下stderr
|
||
|
||
cmd = 'cd /home/luoshi/bin/controller/; '
|
||
cmd += 'sudo mv projects/target/_build/*.prj projects/target/_build/target.prj'
|
||
stdin, stdout, stderr = ssh.exec_command(cmd, get_pty=True)
|
||
stdin.write('luoshi2019' + '\n')
|
||
stdin.flush()
|
||
print(stdout.read().decode()) # 必须得输出一下stdout,才能正确执行sudo
|
||
print(stderr.read().decode()) # 顺便也执行以下stderr
|
||
ssh.close()
|
||
|
||
|
||
def execution(cmd, hr, w2t, **kwargs):
|
||
_id = hr.execution(cmd, **kwargs)
|
||
_msg = hr.get_from_id(_id)
|
||
if not _msg:
|
||
w2t(f"无法获取{_id}请求的响应信息", 0, 7, 'red', tab_name=tab_name)
|
||
else:
|
||
_response = loads(_msg)
|
||
if not _response:
|
||
w2t(f"无法获取{id}请求的响应信息", 0, 1, 'red', tab_name=tab_name)
|
||
return _response
|
||
|
||
|
||
def run_rl(path, config_file, data_all, hr, md, w2t):
|
||
# 1. 关闭诊断曲线,触发软急停,并解除,目的是让可能正在运行着的机器停下来,切手动模式并下电
|
||
_response = execution('diagnosis.open', hr, w2t, open=True, display_open=True)
|
||
_response = execution('diagnosis.set_params', hr, w2t, display_pdo_params=display_pdo_params)
|
||
md.trigger_estop()
|
||
md.reset_estop()
|
||
md.write_act(False)
|
||
sleep(1) # 让曲线彻底关闭
|
||
|
||
# 2. reload工程后,pp2main,并且自动模式和上电
|
||
prj_path = 'target/_build/target.prj'
|
||
_response = execution('overview.reload', hr, w2t, prj_path=prj_path, tasks=['current'])
|
||
_response = execution('rl_task.pp_to_main', hr, w2t, tasks=['current'])
|
||
_response = execution('state.switch_auto', hr, w2t)
|
||
_response = execution('state.switch_motor_on', hr, w2t)
|
||
|
||
# 3. 开始运行程序
|
||
_response = execution('rl_task.run', hr, w2t, tasks=['current'])
|
||
_t_start = time()
|
||
while True:
|
||
if md.read_ready_to_go() == 1:
|
||
md.write_act(True)
|
||
break
|
||
else:
|
||
if (time() - _t_start) // 20 > 1:
|
||
w2t("20s内未收到机器人的运行信号,需要确认RL程序编写正确并正常执行...", 0, 111, 'red', tab_name)
|
||
else:
|
||
sleep(1)
|
||
|
||
# 4. 获取初始数据,周期时间,首次的各轴平均电流值,打开诊断曲线,并执行采集
|
||
sleep(20) # 初始化 scenario time 为 0
|
||
_t_start = time()
|
||
while True:
|
||
scenario_time = md.read_scenario_time()
|
||
if float(scenario_time) > 1:
|
||
w2t(f"场景的周期时间:{scenario_time}s", 0, 0, 'green', tab_name)
|
||
break
|
||
else:
|
||
if (time() - _t_start) // 60 > 3:
|
||
w2t(f"未收到场景的周期时间,需要确认RL程序编写正确并正常执行...", 0, 111, 'red', tab_name)
|
||
else:
|
||
sleep(5)
|
||
sleep(1) # 一定要延迟一秒再读一次scenario time寄存器,因为一开始读取的数值不准确
|
||
scenario_time = float(md.read_scenario_time())
|
||
sleep(scenario_time*0.2)
|
||
|
||
# 6. 准备初始数据,关闭诊断曲线,保留数据并处理输出
|
||
with open(f'{path}\\results.csv', mode='a+', newline='') as f_csv:
|
||
csv_writer = writer(f_csv)
|
||
csv_writer.writerow(title)
|
||
_wb = load_workbook(config_file, read_only=True)
|
||
_ws = _wb['Target']
|
||
wait_time = float(_ws.cell(row=2, column=10).value)
|
||
rcs = []
|
||
for i in range(6):
|
||
rcs.append(float(_ws.cell(row=6, column=i + 2).value))
|
||
|
||
get_durable_data(path, data_all, scenario_time, wait_time, rcs, hr, md, w2t)
|
||
|
||
# 7. 继续运行
|
||
while True:
|
||
# 固定间隔,更新一次数据,打开曲线,获取周期内电流,关闭曲线
|
||
sleep(wait_time+scenario_time+7)
|
||
# 保留数据并处理输出
|
||
get_durable_data(path, data_all, scenario_time, wait_time, rcs, hr, md, w2t)
|
||
|
||
|
||
def get_durable_data(path, data, scenario_time, wait_time, rcs, hr, md, w2t):
|
||
_data_list = []
|
||
_c_msg = hr.c_msg.copy()
|
||
for _msg in _c_msg:
|
||
if 'diagnosis.result' in _msg:
|
||
_data_list.insert(0, loads(_msg))
|
||
else:
|
||
hr.c_msg_xs.clear()
|
||
if len(hr.c_msg) > 240:
|
||
del hr.c_msg[240:]
|
||
|
||
# with open(f'{path}\\log.txt', 'w', encoding='utf-8') as f_obj:
|
||
# for _ in _data_list:
|
||
# f_obj.write(f"{_}\n")
|
||
|
||
_d2d_trq = {0: [], 1: [], 2: [], 3: [], 4: [], 5: []}
|
||
|
||
for line in _data_list:
|
||
for item in line['data']:
|
||
for i in range(6):
|
||
item['value'].reverse()
|
||
if item.get('channel', None) == i and item.get('name', None) == 'device_servo_trq_feedback':
|
||
_d2d_trq[i].extend(item['value'])
|
||
|
||
if len(_d2d_trq[0]) / 1000 > scenario_time + 1:
|
||
this_time = strftime("%Y-%m-%d %H:%M:%S", localtime(time()))
|
||
next_time = strftime("%Y-%m-%d %H:%M:%S", localtime(time()+wait_time+10+scenario_time)).split()[-1]
|
||
_df = DataFrame(_d2d_trq)
|
||
_flg = 0
|
||
_res = []
|
||
for i in range(6):
|
||
def overmax_data(df, index, number, flag):
|
||
if number > 100:
|
||
md.trigger_estop()
|
||
hr.durable_quit = 1
|
||
df.to_excel(f'{path}\\{this_time}.xlsx')
|
||
w2t(f"[{this_time}] {flag}-axis-{index} 数据过大错误,需要检查确定。", 0, 10, 'red', tab_name)
|
||
|
||
try:
|
||
_ = sqrt(_df[i].apply(lambda x: power((rcs[i]*x/1000), 2)).sum()/len(_df[i]))
|
||
except:
|
||
md.trigger_estop()
|
||
_df.to_excel(path+"\\err_data.xlsx")
|
||
w2t(f"{i}calculate error", 0, 11, 'red', tab_name)
|
||
|
||
if not _flg:
|
||
del data[0]['time'][0]
|
||
data[0]['time'].append(this_time.split()[-1])
|
||
del data[1]['time'][0]
|
||
data[1]['time'].append(this_time.split()[-1])
|
||
_res.append(this_time)
|
||
_flg = 1
|
||
|
||
del data[0][f"axis{i + 1}"][0]
|
||
overmax_data(_df, i, _, 'trq')
|
||
data[0][f"axis{i + 1}"].append(_)
|
||
_res.append(_)
|
||
|
||
_ = rcs[i] * _df[i].abs().max() / 1000
|
||
overmax_data(_df, i, _, 'trq-max')
|
||
del data[1][f"axis{i + 1}"][0]
|
||
data[1][f"axis{i + 1}"].append(_)
|
||
_res.append(_)
|
||
|
||
_df_1 = DataFrame(data[0])
|
||
_df_2 = DataFrame(data[1])
|
||
with open(f'{path}\\results.csv', mode='a+', newline='') as f_csv:
|
||
def change_order(res):
|
||
_time = res[0:1]
|
||
_trq = []
|
||
_trq_max = []
|
||
for _item in res[1::2]:
|
||
_trq.append(_item)
|
||
for _item in res[2::2]:
|
||
_trq_max.append(_item)
|
||
return _time + _trq + _trq_max
|
||
|
||
csv_writer = writer(f_csv)
|
||
csv_writer.writerow(change_order(_res))
|
||
|
||
while True:
|
||
if not hr.durable_lock:
|
||
hr.durable_lock = 1
|
||
_df_1.to_excel(durable_data_current_xlsx, index=False)
|
||
_df_2.to_excel(durable_data_current_max_xlsx, index=False)
|
||
hr.durable_lock = 0
|
||
break
|
||
else:
|
||
sleep(1)
|
||
global count
|
||
count += 1
|
||
w2t(f"[{this_time}] 当前次数:{count:09d} | 预计下次数据更新时间:{next_time}", 0, 0, '#008B8B', tab_name)
|
||
break
|
||
else:
|
||
md.trigger_estop()
|
||
with open(f'{path}\\device_servo_trq_feedback_0.txt', 'w', encoding='utf-8') as f_obj:
|
||
for _ in _d2d_trq[0]:
|
||
f_obj.write(f"{_}\n")
|
||
w2t("采集的数据时间长度不够,需要确认。", 0, 10, 'red', tab_name)
|
||
|
||
|
||
def main(path, hr, md, w2t):
|
||
durable_data_current = {
|
||
'time': list(range(1, 19)),
|
||
'axis1': [0 for _ in range(18)],
|
||
'axis2': [0 for _ in range(18)],
|
||
'axis3': [0 for _ in range(18)],
|
||
'axis4': [0 for _ in range(18)],
|
||
'axis5': [0 for _ in range(18)],
|
||
'axis6': [0 for _ in range(18)],
|
||
}
|
||
durable_data_current_max = {
|
||
'time': list(range(1, 19)),
|
||
'axis1': [0 for _ in range(18)],
|
||
'axis2': [0 for _ in range(18)],
|
||
'axis3': [0 for _ in range(18)],
|
||
'axis4': [0 for _ in range(18)],
|
||
'axis5': [0 for _ in range(18)],
|
||
'axis6': [0 for _ in range(18)],
|
||
}
|
||
data_all = [durable_data_current, durable_data_current_max]
|
||
data_dirs, data_files = traversal_files(path, w2t)
|
||
config_file, prj_file = check_files(data_dirs, data_files, w2t)
|
||
prj_to_xcore(prj_file)
|
||
run_rl(path, config_file, data_all, hr, md, w2t)
|
||
|
||
|
||
if __name__ == '__main__':
|
||
main(*argv[1:])
|