1. [APIs: do_brake.py]
- 修改ready_to_go信号的接收逻辑,适配大负载机型 2. [APIs: do_current.py] - 修改ready_to_go信号的接收逻辑,适配大负载机型 - 调整单轴测试时间为35s,适配大负载机型,调整堵转电流持续时间15s,适当减少测试时间 - 将act信号置为False的动作放在初始化,增加程序健壮性 - 修改所有输出文件的命名,在扩展名之前加入时间戳
This commit is contained in:
parent
a66a55bcd3
commit
d76ee3d223
@ -472,4 +472,6 @@ v0.1.9.2(2024/07/13)
|
|||||||
- 修改ready_to_go信号的接收逻辑,适配大负载机型
|
- 修改ready_to_go信号的接收逻辑,适配大负载机型
|
||||||
- 调整单轴测试时间为35s,适配大负载机型,调整堵转电流持续时间15s,适当减少测试时间
|
- 调整单轴测试时间为35s,适配大负载机型,调整堵转电流持续时间15s,适当减少测试时间
|
||||||
- 将act信号置为False的动作放在初始化,增加程序健壮性
|
- 将act信号置为False的动作放在初始化,增加程序健壮性
|
||||||
|
- 修改所有输出文件的命名,在扩展名之前加入时间戳
|
||||||
|
3. [current: current.py]: 在find_point函数种,当无法找到正确点位时,继续执行,而不是直接终止执行
|
||||||
|
|
||||||
|
@ -6,8 +6,8 @@ VSVersionInfo(
|
|||||||
ffi=FixedFileInfo(
|
ffi=FixedFileInfo(
|
||||||
# filevers and prodvers should be always a tuple with four items: (1, 2, 3, 4)
|
# filevers and prodvers should be always a tuple with four items: (1, 2, 3, 4)
|
||||||
# Set not needed items to zero 0.
|
# Set not needed items to zero 0.
|
||||||
filevers=(0, 1, 9, 1),
|
filevers=(0, 1, 9, 2),
|
||||||
prodvers=(0, 1, 9, 1),
|
prodvers=(0, 1, 9, 2),
|
||||||
# Contains a bitmask that specifies the valid bits 'flags'r
|
# Contains a bitmask that specifies the valid bits 'flags'r
|
||||||
mask=0x3f,
|
mask=0x3f,
|
||||||
# Contains a bitmask that specifies the Boolean attributes of the file.
|
# Contains a bitmask that specifies the Boolean attributes of the file.
|
||||||
@ -31,12 +31,12 @@ VSVersionInfo(
|
|||||||
'040904b0',
|
'040904b0',
|
||||||
[StringStruct('CompanyName', 'Rokae - https://www.rokae.com/'),
|
[StringStruct('CompanyName', 'Rokae - https://www.rokae.com/'),
|
||||||
StringStruct('FileDescription', 'All in one automatic toolbox'),
|
StringStruct('FileDescription', 'All in one automatic toolbox'),
|
||||||
StringStruct('FileVersion', '0.1.9.1 (2024-07-12)'),
|
StringStruct('FileVersion', '0.1.9.2 (2024-07-13)'),
|
||||||
StringStruct('InternalName', 'AIO.exe'),
|
StringStruct('InternalName', 'AIO.exe'),
|
||||||
StringStruct('LegalCopyright', '© 2024-2024 Manford Fan'),
|
StringStruct('LegalCopyright', '© 2024-2024 Manford Fan'),
|
||||||
StringStruct('OriginalFilename', 'AIO.exe'),
|
StringStruct('OriginalFilename', 'AIO.exe'),
|
||||||
StringStruct('ProductName', 'AIO'),
|
StringStruct('ProductName', 'AIO'),
|
||||||
StringStruct('ProductVersion', '0.1.9.1 (2024-07-12)')])
|
StringStruct('ProductVersion', '0.1.9.2 (2024-07-13)')])
|
||||||
]),
|
]),
|
||||||
VarFileInfo([VarStruct('Translation', [1033, 1200])])
|
VarFileInfo([VarStruct('Translation', [1033, 1200])])
|
||||||
]
|
]
|
||||||
|
@ -1 +1 @@
|
|||||||
0.1.9.1 @ 07/12/2024
|
0.1.9.2 @ 07/13/2024
|
@ -129,7 +129,7 @@ def data_proc_regular(path, filename, channel, scenario_time):
|
|||||||
df1 = pandas.DataFrame.from_dict(_d2d_vel)
|
df1 = pandas.DataFrame.from_dict(_d2d_vel)
|
||||||
df2 = pandas.DataFrame.from_dict(_d2d_trq)
|
df2 = pandas.DataFrame.from_dict(_d2d_trq)
|
||||||
df = pandas.concat([df1, df2], axis=1)
|
df = pandas.concat([df1, df2], axis=1)
|
||||||
_filename = f'{path}\\single\\j{channel+1}_single.data'
|
_filename = f'{path}\\single\\j{channel+1}_single_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
elif channel in list(range(6, 9)):
|
elif channel in list(range(6, 9)):
|
||||||
with open(filename, 'r', encoding='utf-8') as f_obj:
|
with open(filename, 'r', encoding='utf-8') as f_obj:
|
||||||
@ -178,37 +178,37 @@ def data_proc_regular(path, filename, channel, scenario_time):
|
|||||||
df_01 = pandas.DataFrame.from_dict(_d2d_vel_0)
|
df_01 = pandas.DataFrame.from_dict(_d2d_vel_0)
|
||||||
df_02 = pandas.DataFrame.from_dict(_d2d_trq_0)
|
df_02 = pandas.DataFrame.from_dict(_d2d_trq_0)
|
||||||
df = pandas.concat([df_01, df_02], axis=1)
|
df = pandas.concat([df_01, df_02], axis=1)
|
||||||
_filename = f'{path}\\s_{channel-5}\\j1_s_{channel-5}_{scenario_time}.data'
|
_filename = f'{path}\\s_{channel-5}\\j1_s_{channel-5}_{scenario_time}_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
|
|
||||||
df_01 = pandas.DataFrame.from_dict(_d2d_vel_1)
|
df_01 = pandas.DataFrame.from_dict(_d2d_vel_1)
|
||||||
df_02 = pandas.DataFrame.from_dict(_d2d_trq_1)
|
df_02 = pandas.DataFrame.from_dict(_d2d_trq_1)
|
||||||
df = pandas.concat([df_01, df_02], axis=1)
|
df = pandas.concat([df_01, df_02], axis=1)
|
||||||
_filename = f'{path}\\s_{channel-5}\\j2_s_{channel-5}_{scenario_time}.data'
|
_filename = f'{path}\\s_{channel-5}\\j2_s_{channel-5}_{scenario_time}_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
|
|
||||||
df_01 = pandas.DataFrame.from_dict(_d2d_vel_2)
|
df_01 = pandas.DataFrame.from_dict(_d2d_vel_2)
|
||||||
df_02 = pandas.DataFrame.from_dict(_d2d_trq_2)
|
df_02 = pandas.DataFrame.from_dict(_d2d_trq_2)
|
||||||
df = pandas.concat([df_01, df_02], axis=1)
|
df = pandas.concat([df_01, df_02], axis=1)
|
||||||
_filename = f'{path}\\s_{channel-5}\\j3_s_{channel-5}_{scenario_time}.data'
|
_filename = f'{path}\\s_{channel-5}\\j3_s_{channel-5}_{scenario_time}_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
|
|
||||||
df_01 = pandas.DataFrame.from_dict(_d2d_vel_3)
|
df_01 = pandas.DataFrame.from_dict(_d2d_vel_3)
|
||||||
df_02 = pandas.DataFrame.from_dict(_d2d_trq_3)
|
df_02 = pandas.DataFrame.from_dict(_d2d_trq_3)
|
||||||
df = pandas.concat([df_01, df_02], axis=1)
|
df = pandas.concat([df_01, df_02], axis=1)
|
||||||
_filename = f'{path}\\s_{channel-5}\\j4_s_{channel-5}_{scenario_time}.data'
|
_filename = f'{path}\\s_{channel-5}\\j4_s_{channel-5}_{scenario_time}_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
|
|
||||||
df_01 = pandas.DataFrame.from_dict(_d2d_vel_4)
|
df_01 = pandas.DataFrame.from_dict(_d2d_vel_4)
|
||||||
df_02 = pandas.DataFrame.from_dict(_d2d_trq_4)
|
df_02 = pandas.DataFrame.from_dict(_d2d_trq_4)
|
||||||
df = pandas.concat([df_01, df_02], axis=1)
|
df = pandas.concat([df_01, df_02], axis=1)
|
||||||
_filename = f'{path}\\s_{channel-5}\\j5_s_{channel-5}_{scenario_time}.data'
|
_filename = f'{path}\\s_{channel-5}\\j5_s_{channel-5}_{scenario_time}_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
|
|
||||||
df_01 = pandas.DataFrame.from_dict(_d2d_vel_5)
|
df_01 = pandas.DataFrame.from_dict(_d2d_vel_5)
|
||||||
df_02 = pandas.DataFrame.from_dict(_d2d_trq_5)
|
df_02 = pandas.DataFrame.from_dict(_d2d_trq_5)
|
||||||
df = pandas.concat([df_01, df_02], axis=1)
|
df = pandas.concat([df_01, df_02], axis=1)
|
||||||
_filename = f'{path}\\s_{channel-5}\\j6_s_{channel-5}_{scenario_time}.data'
|
_filename = f'{path}\\s_{channel-5}\\j6_s_{channel-5}_{scenario_time}_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
elif channel in list(range(9, 15)):
|
elif channel in list(range(9, 15)):
|
||||||
with open(filename, 'r', encoding='utf-8') as f_obj:
|
with open(filename, 'r', encoding='utf-8') as f_obj:
|
||||||
@ -227,7 +227,7 @@ def data_proc_regular(path, filename, channel, scenario_time):
|
|||||||
df1 = pandas.DataFrame.from_dict(_d2d_vel)
|
df1 = pandas.DataFrame.from_dict(_d2d_vel)
|
||||||
df2 = pandas.DataFrame.from_dict(_d2d_trq)
|
df2 = pandas.DataFrame.from_dict(_d2d_trq)
|
||||||
df = pandas.concat([df1, df2], axis=1)
|
df = pandas.concat([df1, df2], axis=1)
|
||||||
_filename = f'{path}\\single\\j{channel-8}_hold.data'
|
_filename = f'{path}\\single\\j{channel-8}_hold_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
|
|
||||||
|
|
||||||
@ -248,7 +248,7 @@ def data_proc_inertia(path, filename, channel):
|
|||||||
df1 = pandas.DataFrame.from_dict(_d2d_vel)
|
df1 = pandas.DataFrame.from_dict(_d2d_vel)
|
||||||
df2 = pandas.DataFrame.from_dict(_d2d_trq)
|
df2 = pandas.DataFrame.from_dict(_d2d_trq)
|
||||||
df = pandas.concat([df1, df2], axis=1)
|
df = pandas.concat([df1, df2], axis=1)
|
||||||
_filename = f'{path}\\inertia\\j{channel+4}_inertia.data'
|
_filename = f'{path}\\inertia\\j{channel+4}_inertia_{time()}.data'
|
||||||
df.to_csv(_filename, sep='\t', index=False)
|
df.to_csv(_filename, sep='\t', index=False)
|
||||||
|
|
||||||
|
|
||||||
|
@ -223,7 +223,8 @@ def find_point(data_file, pos, flag, df, _row_s, _row_e, w2t, exitcode, threshol
|
|||||||
else:
|
else:
|
||||||
return _row_s, _row_e
|
return _row_s, _row_e
|
||||||
else:
|
else:
|
||||||
w2t(f"[{pos}] {data_file}数据有误,需要检查,无法找到第{exitcode}个有效点...", 0, exitcode, 'red')
|
# w2t(f"[{pos}] {data_file}数据有误,需要检查,无法找到第{exitcode}个有效点...", 0, exitcode, 'red')
|
||||||
|
w2t(f"[{pos}] {data_file}数据有误,需要检查,无法找到第{exitcode}个有效点...", 0, 0, 'red')
|
||||||
elif flag == 'gt':
|
elif flag == 'gt':
|
||||||
while _row_e > end_point:
|
while _row_e > end_point:
|
||||||
speed_avg = df.iloc[_row_s:_row_e, 0].abs().mean()
|
speed_avg = df.iloc[_row_s:_row_e, 0].abs().mean()
|
||||||
@ -234,7 +235,8 @@ def find_point(data_file, pos, flag, df, _row_s, _row_e, w2t, exitcode, threshol
|
|||||||
else:
|
else:
|
||||||
return _row_s, _row_e
|
return _row_s, _row_e
|
||||||
else:
|
else:
|
||||||
w2t(f"[{pos}] {data_file}数据有误,需要检查,无法找到有效起始点或结束点...", 0, exitcode, 'red')
|
# w2t(f"[{pos}] {data_file}数据有误,需要检查,无法找到有效起始点或结束点...", 0, exitcode, 'red')
|
||||||
|
w2t(f"[{pos}] {data_file}数据有误,需要检查,无法找到有效起始点或结束点...", 0, 0, 'red')
|
||||||
|
|
||||||
|
|
||||||
def p_single(wb, single, vel, trq, rpms, w2t):
|
def p_single(wb, single, vel, trq, rpms, w2t):
|
||||||
|
Reference in New Issue
Block a user