v0.2.0.6(2024/08/09)
1. [t_change_ui: all files] - 修改了 logger 的实现 - 尤其是 clibs.py,使用日志字典,重写了日志记录的功能
This commit is contained in:
parent
340d48876b
commit
3814d163c5
1
.gitignore
vendored
1
.gitignore
vendored
@ -12,3 +12,4 @@ aio/code/durable_action/__pycache__/
|
||||
aio/assets/templates/durable/
|
||||
aio/assets/templates/.__c_msg.lock
|
||||
aio/code/commons/__pycache__/
|
||||
aio/assets/templates/debug.log
|
||||
|
@ -596,3 +596,8 @@ v0.2.0.5(2024/07/31)
|
||||
- 修改获取初始速度的逻辑,只获取configs文件中配置的时间内的速度
|
||||
- 新增 configs 参数 single_brake,可针对特定条件做测试
|
||||
10. [APIs: all]: 添加了 logger.setLevel(INFO),只有添加这个,单个模块内才生效
|
||||
|
||||
v0.2.0.6(2024/08/09)
|
||||
1. [t_change_ui: all files]
|
||||
- 修改了 logger 的实现
|
||||
- 尤其是 clibs.py,使用日志字典,重写了日志记录的功能
|
@ -14,16 +14,14 @@ from commons import openapi, clibs
|
||||
from matplotlib.pyplot import rcParams, figure, subplots_adjust, close
|
||||
from matplotlib import use
|
||||
from pandas import DataFrame, read_excel
|
||||
from logging import getLogger, INFO
|
||||
|
||||
with open(clibs.log_data, 'w') as _:
|
||||
with open(clibs.log_data_hmi, 'w') as _hmi, open(clibs.log_data_debug, 'w', encoding='utf-8') as _debug:
|
||||
for i in range(1, 11):
|
||||
try:
|
||||
remove(f'{clibs.log_data}.{i}')
|
||||
remove(f'{clibs.log_data_hmi}.{i}')
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
logger.info("日志文件初始化完成...")
|
||||
|
||||
use('Agg')
|
||||
|
@ -1,11 +1,8 @@
|
||||
from json import loads
|
||||
from sys import argv
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
tab_name = clibs.tab_names['at']
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
|
||||
|
||||
def trigger_estop(md, w2t):
|
||||
@ -40,7 +37,7 @@ def get_state(hr, w2t):
|
||||
|
||||
def warning_info(hr, w2t):
|
||||
for postfix in ['', '.2', '.3', '.4', '.5', '.6', '.7', '.8', '.9', '.10']:
|
||||
log_name = clibs.log_data + postfix
|
||||
log_name = clibs.log_data_hmi + postfix
|
||||
try:
|
||||
with open(log_name, 'r', encoding='utf-8') as f_log:
|
||||
for line in f_log:
|
||||
|
@ -5,12 +5,10 @@ from paramiko import SSHClient, AutoAddPolicy
|
||||
from json import loads
|
||||
from openpyxl import load_workbook
|
||||
from pandas import DataFrame, concat
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
tab_name = clibs.tab_names['at']
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
|
||||
|
||||
def check_files(path, loadsel, data_dirs, data_files, w2t):
|
||||
@ -107,6 +105,7 @@ def run_rl(path, loadsel, hr, md, config_file, result_dirs, w2t):
|
||||
write_diagnosis = float(ws.cell(row=3, column=10).value)
|
||||
get_init_speed = float(ws.cell(row=4, column=10).value)
|
||||
single_brake = str(ws.cell(row=5, column=10).value)
|
||||
logger.info(f"write_diagnosis = {write_diagnosis}, get_init_speed = {get_init_speed}, single_brake = {single_brake}")
|
||||
|
||||
if ws.cell(row=1, column=1).value == 'positive':
|
||||
md.write_pon(1)
|
||||
|
@ -4,12 +4,9 @@ from sys import argv
|
||||
from paramiko import SSHClient, AutoAddPolicy
|
||||
from json import loads
|
||||
from pandas import DataFrame, concat
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
|
||||
logger = clibs.log_prod
|
||||
tab_name = clibs.tab_names['at']
|
||||
display_pdo_params = [
|
||||
{"name": "hw_joint_vel_feedback", "channel": 0},
|
||||
|
@ -4,10 +4,12 @@ from time import sleep
|
||||
from os.path import exists
|
||||
from paramiko import SSHClient, AutoAddPolicy
|
||||
from socket import setdefaulttimeout
|
||||
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL, Formatter, StreamHandler, basicConfig
|
||||
from concurrent_log_handler import ConcurrentRotatingFileHandler
|
||||
from logging import getLogger
|
||||
from logging.config import dictConfig
|
||||
import concurrent_log_handler
|
||||
|
||||
ip_addr = '192.168.0.160'
|
||||
ip_addr = '192.168.0.160' # for product
|
||||
# ip_addr = '192.168.84.129' # for test
|
||||
RADIAN = 57.3 # 180 / 3.1415926
|
||||
MAX_FRAME_SIZE = 1024
|
||||
TIMEOUT = 5
|
||||
@ -15,7 +17,8 @@ setdefaulttimeout(TIMEOUT)
|
||||
tab_names = {'dp': 'Data Process', 'at': 'Automatic Test', 'da': 'Duration Action', 'op': 'openapi'}
|
||||
# PREFIX = '' # for pyinstaller packaging
|
||||
PREFIX = '../assets/' # for source code debug
|
||||
log_data = f'{PREFIX}templates/c_msg.log'
|
||||
log_data_hmi = f'{PREFIX}templates/c_msg.log'
|
||||
log_data_debug = f'{PREFIX}templates/debug.log'
|
||||
heartbeat = f'{PREFIX}templates/heartbeat'
|
||||
durable_data_current_xlsx = f'{PREFIX}templates/durable/durable_data_current.xlsx'
|
||||
durable_data_current_max_xlsx = f'{PREFIX}templates/durable/durable_data_current_max.xlsx'
|
||||
@ -38,16 +41,80 @@ durable_data_current_max = {
|
||||
'axis6': [0 for _ in range(18)],
|
||||
}
|
||||
|
||||
file_handler = ConcurrentRotatingFileHandler(filename=log_data, backupCount=10, maxBytes=50*1024*1024, encoding='utf-8')
|
||||
file_handler.setLevel(INFO)
|
||||
console_handler = StreamHandler()
|
||||
console_handler.setLevel(WARNING)
|
||||
|
||||
basicConfig( # level=INFO,
|
||||
datefmt='%Y-%m-%dT%H:%M:%S',
|
||||
format='%(asctime)s # %(levelname)s-%(filename)s-%(funcName)s # %(message)s',
|
||||
handlers=[file_handler, console_handler],
|
||||
)
|
||||
# version:表示版本,该键值为从1开始的整数。该key必选,除此之外,其它key都是可选。
|
||||
# formatters:日志格式化器,其value值为一个字典,该字典的每个键值对都代表一个Formatter,键值对中,key代表Formatter ID(自定义ID),value为字典,描述如何配置相应的Formatter实例。默认格式为 ‘%(message)s’
|
||||
# filters:日志过滤器,其value值为一个字典,该字典的每个键值对都代表一个Filter,键值对中,key代表Filter ID(自定义ID),value为字典,描述如何配置相应的Filter实例。
|
||||
# handlers:日志处理器,其value值为一个字典,该字典的每个键值对都代表一个Handler,键值对中,key代表Handler ID(自定义ID),value为字典,描述如何配置相应的Handler实例,包含以下配置key:
|
||||
# class (必选):日志处理器类全称
|
||||
# level (可选):指定该日志处理器需要处理哪些级别的日志,低于该级别的日志将不被该handler处理。level可以为代表日志级别的整数或者表大写字符串,字符串日志级别和数字日志级别对应关系如下:
|
||||
# CRITICAL 50
|
||||
# ERROR 40
|
||||
# WARNING 30
|
||||
# INFO 20
|
||||
# DEBUG 10
|
||||
# NOTSET 0
|
||||
f_complex = '%(asctime)s # %(name)s-%(levelname)s-%(module)s-%(funcName)s-%(lineno)d # %(message)s'
|
||||
f_simple = '%(levelname)s-%(module)s-%(funcName)s-%(lineno)d: %(message)s'
|
||||
log_dicts = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': True,
|
||||
'formatters': {
|
||||
'standard': {
|
||||
'format': f_complex,
|
||||
'style': '%',
|
||||
'datefmt': '%Y-%m-%dT%H:%M:%S',
|
||||
},
|
||||
'test': {
|
||||
'format': f_simple,
|
||||
'style': '%',
|
||||
'datefmt': '%Y-%m-%dT%H:%M:%S',
|
||||
},
|
||||
},
|
||||
'filters': {},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'test',
|
||||
},
|
||||
'hmi.log': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'concurrent_log_handler.ConcurrentRotatingFileHandler',
|
||||
'filename': log_data_hmi,
|
||||
'maxBytes': 1024*1024*50,
|
||||
'backupCount': 10,
|
||||
'encoding': 'utf-8',
|
||||
'formatter': 'standard',
|
||||
},
|
||||
'debug.log': {
|
||||
'level': 'INFO',
|
||||
'class': 'logging.FileHandler',
|
||||
'filename': log_data_debug,
|
||||
'encoding': 'utf-8',
|
||||
'formatter': 'standard',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'normal': {
|
||||
'handlers': ['hmi.log', 'debug.log'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False
|
||||
},
|
||||
'debug': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False
|
||||
},
|
||||
'': {
|
||||
'handlers': ['hmi.log', 'debug.log'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False
|
||||
},
|
||||
}
|
||||
}
|
||||
dictConfig(log_dicts)
|
||||
log_prod = getLogger('normal')
|
||||
log_debug = getLogger('debug')
|
||||
|
||||
|
||||
class GetThreadResult(Thread):
|
||||
|
@ -6,11 +6,9 @@ from time import time, sleep
|
||||
from pymodbus.client.tcp import ModbusTcpClient
|
||||
from pymodbus.payload import BinaryPayloadDecoder, BinaryPayloadBuilder
|
||||
from pymodbus.constants import Endian
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
|
||||
|
||||
class ModbusRequest(object):
|
||||
@ -263,7 +261,7 @@ class HmiRequest(object):
|
||||
def msg_storage(self, response, flag=0):
|
||||
# response是解码后的字符串
|
||||
messages = self.c_msg if flag == 0 else self.c_msg_xs
|
||||
logger.info(f"{loads(response)}")
|
||||
logger.debug(f"{loads(response)}")
|
||||
if 'move.monitor' in response:
|
||||
pass
|
||||
elif len(messages) < 10000:
|
||||
@ -504,7 +502,7 @@ class HmiRequest(object):
|
||||
|
||||
def get_from_id(self, msg_id, flag=0):
|
||||
for i in range(3):
|
||||
with open(clibs.log_data, mode='r', encoding='utf-8') as f_log:
|
||||
with open(clibs.log_data_hmi, mode='r', encoding='utf-8') as f_log:
|
||||
for line in f_log:
|
||||
if msg_id in line.strip():
|
||||
return line
|
||||
@ -512,7 +510,7 @@ class HmiRequest(object):
|
||||
else: # 尝试在上一次分割的日志中查找,只做一次
|
||||
sleep(1)
|
||||
try:
|
||||
with open(clibs.log_data+'.1', mode='r', encoding='utf-8') as f_log:
|
||||
with open(clibs.log_data_hmi+'.1', mode='r', encoding='utf-8') as f_log:
|
||||
for line in f_log:
|
||||
if msg_id in line.strip():
|
||||
return line
|
||||
@ -617,7 +615,7 @@ class HmiRequest(object):
|
||||
self.c.send(self.package(cmd))
|
||||
sleep(0.5)
|
||||
except Exception as Err:
|
||||
self.w2t(f"{cmd}\n请求发送失败...{Err}", 0, 0, 'red', tab_name=self.tab_name)
|
||||
self.w2t(f"{cmd}: 请求发送失败...{Err}", 0, 0, 'red', tab_name=self.tab_name)
|
||||
|
||||
return req['id']
|
||||
|
||||
|
@ -1,15 +1,12 @@
|
||||
# coding: utf-8
|
||||
from os.path import isfile
|
||||
from sys import argv
|
||||
from openpyxl import load_workbook
|
||||
from time import time, sleep, strftime, localtime
|
||||
from threading import Thread
|
||||
from pandas import read_csv
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
|
||||
|
||||
def check_files(path, raw_data_dirs, result_files, w2t):
|
||||
|
@ -5,11 +5,9 @@ from re import match
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
from csv import reader, writer
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
|
||||
|
||||
def w2t_local(msg, wait, w2t):
|
||||
|
@ -1,13 +1,10 @@
|
||||
# _*_ encoding:utf-8 _*_
|
||||
import pdfplumber
|
||||
from openpyxl import load_workbook
|
||||
from os import remove
|
||||
from sys import argv
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
|
||||
|
||||
def p_iso(file, p_files, ws, tmpfile):
|
||||
|
@ -2,11 +2,9 @@ from pandas import read_csv
|
||||
from csv import reader
|
||||
from sys import argv
|
||||
from openpyxl import Workbook
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
logger = clibs.log_prod
|
||||
|
||||
|
||||
def find_point(bof, step, pos, data_file, flag, df, row, w2t):
|
||||
|
@ -6,12 +6,9 @@ from openpyxl import load_workbook
|
||||
from math import sqrt
|
||||
from numpy import power
|
||||
from csv import writer
|
||||
from logging import getLogger, INFO
|
||||
from commons import clibs
|
||||
|
||||
logger = getLogger(__file__)
|
||||
logger.setLevel(INFO)
|
||||
|
||||
logger = clibs.log_prod
|
||||
tab_name = clibs.tab_names['da']
|
||||
count = 0
|
||||
display_pdo_params = [
|
||||
|
Reference in New Issue
Block a user