v0.2.0.6(2024/08/09)

1. [t_change_ui: all files]
   - 修改了 logger 的实现
   - 尤其是 clibs.py,使用日志字典,重写了日志记录的功能
This commit is contained in:
gitea 2024-08-09 10:47:22 +08:00
parent 340d48876b
commit 3814d163c5
13 changed files with 106 additions and 57 deletions

1
.gitignore vendored
View File

@ -12,3 +12,4 @@ aio/code/durable_action/__pycache__/
aio/assets/templates/durable/ aio/assets/templates/durable/
aio/assets/templates/.__c_msg.lock aio/assets/templates/.__c_msg.lock
aio/code/commons/__pycache__/ aio/code/commons/__pycache__/
aio/assets/templates/debug.log

View File

@ -596,3 +596,8 @@ v0.2.0.5(2024/07/31)
- 修改获取初始速度的逻辑只获取configs文件中配置的时间内的速度 - 修改获取初始速度的逻辑只获取configs文件中配置的时间内的速度
- 新增 configs 参数 single_brake可针对特定条件做测试 - 新增 configs 参数 single_brake可针对特定条件做测试
10. [APIs: all]: 添加了 logger.setLevel(INFO),只有添加这个,单个模块内才生效 10. [APIs: all]: 添加了 logger.setLevel(INFO),只有添加这个,单个模块内才生效
v0.2.0.6(2024/08/09)
1. [t_change_ui: all files]
- 修改了 logger 的实现
- 尤其是 clibs.py使用日志字典重写了日志记录的功能

View File

@ -14,16 +14,14 @@ from commons import openapi, clibs
from matplotlib.pyplot import rcParams, figure, subplots_adjust, close from matplotlib.pyplot import rcParams, figure, subplots_adjust, close
from matplotlib import use from matplotlib import use
from pandas import DataFrame, read_excel from pandas import DataFrame, read_excel
from logging import getLogger, INFO
with open(clibs.log_data, 'w') as _: with open(clibs.log_data_hmi, 'w') as _hmi, open(clibs.log_data_debug, 'w', encoding='utf-8') as _debug:
for i in range(1, 11): for i in range(1, 11):
try: try:
remove(f'{clibs.log_data}.{i}') remove(f'{clibs.log_data_hmi}.{i}')
except FileNotFoundError: except FileNotFoundError:
pass pass
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
logger.info("日志文件初始化完成...") logger.info("日志文件初始化完成...")
use('Agg') use('Agg')

View File

@ -1,11 +1,8 @@
from json import loads
from sys import argv from sys import argv
from logging import getLogger, INFO
from commons import clibs from commons import clibs
tab_name = clibs.tab_names['at'] tab_name = clibs.tab_names['at']
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
def trigger_estop(md, w2t): def trigger_estop(md, w2t):
@ -40,7 +37,7 @@ def get_state(hr, w2t):
def warning_info(hr, w2t): def warning_info(hr, w2t):
for postfix in ['', '.2', '.3', '.4', '.5', '.6', '.7', '.8', '.9', '.10']: for postfix in ['', '.2', '.3', '.4', '.5', '.6', '.7', '.8', '.9', '.10']:
log_name = clibs.log_data + postfix log_name = clibs.log_data_hmi + postfix
try: try:
with open(log_name, 'r', encoding='utf-8') as f_log: with open(log_name, 'r', encoding='utf-8') as f_log:
for line in f_log: for line in f_log:

View File

@ -5,12 +5,10 @@ from paramiko import SSHClient, AutoAddPolicy
from json import loads from json import loads
from openpyxl import load_workbook from openpyxl import load_workbook
from pandas import DataFrame, concat from pandas import DataFrame, concat
from logging import getLogger, INFO
from commons import clibs from commons import clibs
tab_name = clibs.tab_names['at'] tab_name = clibs.tab_names['at']
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
def check_files(path, loadsel, data_dirs, data_files, w2t): def check_files(path, loadsel, data_dirs, data_files, w2t):
@ -107,6 +105,7 @@ def run_rl(path, loadsel, hr, md, config_file, result_dirs, w2t):
write_diagnosis = float(ws.cell(row=3, column=10).value) write_diagnosis = float(ws.cell(row=3, column=10).value)
get_init_speed = float(ws.cell(row=4, column=10).value) get_init_speed = float(ws.cell(row=4, column=10).value)
single_brake = str(ws.cell(row=5, column=10).value) single_brake = str(ws.cell(row=5, column=10).value)
logger.info(f"write_diagnosis = {write_diagnosis}, get_init_speed = {get_init_speed}, single_brake = {single_brake}")
if ws.cell(row=1, column=1).value == 'positive': if ws.cell(row=1, column=1).value == 'positive':
md.write_pon(1) md.write_pon(1)

View File

@ -4,12 +4,9 @@ from sys import argv
from paramiko import SSHClient, AutoAddPolicy from paramiko import SSHClient, AutoAddPolicy
from json import loads from json import loads
from pandas import DataFrame, concat from pandas import DataFrame, concat
from logging import getLogger, INFO
from commons import clibs from commons import clibs
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
tab_name = clibs.tab_names['at'] tab_name = clibs.tab_names['at']
display_pdo_params = [ display_pdo_params = [
{"name": "hw_joint_vel_feedback", "channel": 0}, {"name": "hw_joint_vel_feedback", "channel": 0},

View File

@ -4,10 +4,12 @@ from time import sleep
from os.path import exists from os.path import exists
from paramiko import SSHClient, AutoAddPolicy from paramiko import SSHClient, AutoAddPolicy
from socket import setdefaulttimeout from socket import setdefaulttimeout
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL, Formatter, StreamHandler, basicConfig from logging import getLogger
from concurrent_log_handler import ConcurrentRotatingFileHandler from logging.config import dictConfig
import concurrent_log_handler
ip_addr = '192.168.0.160' ip_addr = '192.168.0.160' # for product
# ip_addr = '192.168.84.129' # for test
RADIAN = 57.3 # 180 / 3.1415926 RADIAN = 57.3 # 180 / 3.1415926
MAX_FRAME_SIZE = 1024 MAX_FRAME_SIZE = 1024
TIMEOUT = 5 TIMEOUT = 5
@ -15,7 +17,8 @@ setdefaulttimeout(TIMEOUT)
tab_names = {'dp': 'Data Process', 'at': 'Automatic Test', 'da': 'Duration Action', 'op': 'openapi'} tab_names = {'dp': 'Data Process', 'at': 'Automatic Test', 'da': 'Duration Action', 'op': 'openapi'}
# PREFIX = '' # for pyinstaller packaging # PREFIX = '' # for pyinstaller packaging
PREFIX = '../assets/' # for source code debug PREFIX = '../assets/' # for source code debug
log_data = f'{PREFIX}templates/c_msg.log' log_data_hmi = f'{PREFIX}templates/c_msg.log'
log_data_debug = f'{PREFIX}templates/debug.log'
heartbeat = f'{PREFIX}templates/heartbeat' heartbeat = f'{PREFIX}templates/heartbeat'
durable_data_current_xlsx = f'{PREFIX}templates/durable/durable_data_current.xlsx' durable_data_current_xlsx = f'{PREFIX}templates/durable/durable_data_current.xlsx'
durable_data_current_max_xlsx = f'{PREFIX}templates/durable/durable_data_current_max.xlsx' durable_data_current_max_xlsx = f'{PREFIX}templates/durable/durable_data_current_max.xlsx'
@ -38,16 +41,80 @@ durable_data_current_max = {
'axis6': [0 for _ in range(18)], 'axis6': [0 for _ in range(18)],
} }
file_handler = ConcurrentRotatingFileHandler(filename=log_data, backupCount=10, maxBytes=50*1024*1024, encoding='utf-8') # version表示版本该键值为从1开始的整数。该key必选除此之外其它key都是可选。
file_handler.setLevel(INFO) # formatters日志格式化器其value值为一个字典该字典的每个键值对都代表一个Formatter键值对中key代表Formatter ID(自定义ID)value为字典描述如何配置相应的Formatter实例。默认格式为 %(message)s
console_handler = StreamHandler() # filters日志过滤器其value值为一个字典该字典的每个键值对都代表一个Filter键值对中key代表Filter ID(自定义ID)value为字典描述如何配置相应的Filter实例。
console_handler.setLevel(WARNING) # handlers日志处理器其value值为一个字典该字典的每个键值对都代表一个Handler键值对中key代表Handler ID(自定义ID)value为字典描述如何配置相应的Handler实例包含以下配置key
# class (必选):日志处理器类全称
basicConfig( # level=INFO, # level (可选)指定该日志处理器需要处理哪些级别的日志低于该级别的日志将不被该handler处理。level可以为代表日志级别的整数或者表大写字符串字符串日志级别和数字日志级别对应关系如下
datefmt='%Y-%m-%dT%H:%M:%S', # CRITICAL 50
format='%(asctime)s # %(levelname)s-%(filename)s-%(funcName)s # %(message)s', # ERROR 40
handlers=[file_handler, console_handler], # WARNING 30
) # INFO 20
# DEBUG 10
# NOTSET 0
f_complex = '%(asctime)s # %(name)s-%(levelname)s-%(module)s-%(funcName)s-%(lineno)d # %(message)s'
f_simple = '%(levelname)s-%(module)s-%(funcName)s-%(lineno)d: %(message)s'
log_dicts = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': f_complex,
'style': '%',
'datefmt': '%Y-%m-%dT%H:%M:%S',
},
'test': {
'format': f_simple,
'style': '%',
'datefmt': '%Y-%m-%dT%H:%M:%S',
},
},
'filters': {},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'test',
},
'hmi.log': {
'level': 'DEBUG',
'class': 'concurrent_log_handler.ConcurrentRotatingFileHandler',
'filename': log_data_hmi,
'maxBytes': 1024*1024*50,
'backupCount': 10,
'encoding': 'utf-8',
'formatter': 'standard',
},
'debug.log': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': log_data_debug,
'encoding': 'utf-8',
'formatter': 'standard',
},
},
'loggers': {
'normal': {
'handlers': ['hmi.log', 'debug.log'],
'level': 'DEBUG',
'propagate': False
},
'debug': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False
},
'': {
'handlers': ['hmi.log', 'debug.log'],
'level': 'DEBUG',
'propagate': False
},
}
}
dictConfig(log_dicts)
log_prod = getLogger('normal')
log_debug = getLogger('debug')
class GetThreadResult(Thread): class GetThreadResult(Thread):

View File

@ -6,11 +6,9 @@ from time import time, sleep
from pymodbus.client.tcp import ModbusTcpClient from pymodbus.client.tcp import ModbusTcpClient
from pymodbus.payload import BinaryPayloadDecoder, BinaryPayloadBuilder from pymodbus.payload import BinaryPayloadDecoder, BinaryPayloadBuilder
from pymodbus.constants import Endian from pymodbus.constants import Endian
from logging import getLogger, INFO
from commons import clibs from commons import clibs
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
class ModbusRequest(object): class ModbusRequest(object):
@ -263,7 +261,7 @@ class HmiRequest(object):
def msg_storage(self, response, flag=0): def msg_storage(self, response, flag=0):
# response是解码后的字符串 # response是解码后的字符串
messages = self.c_msg if flag == 0 else self.c_msg_xs messages = self.c_msg if flag == 0 else self.c_msg_xs
logger.info(f"{loads(response)}") logger.debug(f"{loads(response)}")
if 'move.monitor' in response: if 'move.monitor' in response:
pass pass
elif len(messages) < 10000: elif len(messages) < 10000:
@ -504,7 +502,7 @@ class HmiRequest(object):
def get_from_id(self, msg_id, flag=0): def get_from_id(self, msg_id, flag=0):
for i in range(3): for i in range(3):
with open(clibs.log_data, mode='r', encoding='utf-8') as f_log: with open(clibs.log_data_hmi, mode='r', encoding='utf-8') as f_log:
for line in f_log: for line in f_log:
if msg_id in line.strip(): if msg_id in line.strip():
return line return line
@ -512,7 +510,7 @@ class HmiRequest(object):
else: # 尝试在上一次分割的日志中查找,只做一次 else: # 尝试在上一次分割的日志中查找,只做一次
sleep(1) sleep(1)
try: try:
with open(clibs.log_data+'.1', mode='r', encoding='utf-8') as f_log: with open(clibs.log_data_hmi+'.1', mode='r', encoding='utf-8') as f_log:
for line in f_log: for line in f_log:
if msg_id in line.strip(): if msg_id in line.strip():
return line return line
@ -617,7 +615,7 @@ class HmiRequest(object):
self.c.send(self.package(cmd)) self.c.send(self.package(cmd))
sleep(0.5) sleep(0.5)
except Exception as Err: except Exception as Err:
self.w2t(f"{cmd}\n请求发送失败...{Err}", 0, 0, 'red', tab_name=self.tab_name) self.w2t(f"{cmd}: 请求发送失败...{Err}", 0, 0, 'red', tab_name=self.tab_name)
return req['id'] return req['id']

View File

@ -1,15 +1,12 @@
# coding: utf-8
from os.path import isfile from os.path import isfile
from sys import argv from sys import argv
from openpyxl import load_workbook from openpyxl import load_workbook
from time import time, sleep, strftime, localtime from time import time, sleep, strftime, localtime
from threading import Thread from threading import Thread
from pandas import read_csv from pandas import read_csv
from logging import getLogger, INFO
from commons import clibs from commons import clibs
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
def check_files(path, raw_data_dirs, result_files, w2t): def check_files(path, raw_data_dirs, result_files, w2t):

View File

@ -5,11 +5,9 @@ from re import match
from threading import Thread from threading import Thread
from time import sleep from time import sleep
from csv import reader, writer from csv import reader, writer
from logging import getLogger, INFO
from commons import clibs from commons import clibs
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
def w2t_local(msg, wait, w2t): def w2t_local(msg, wait, w2t):

View File

@ -1,13 +1,10 @@
# _*_ encodingutf-8 _*_
import pdfplumber import pdfplumber
from openpyxl import load_workbook from openpyxl import load_workbook
from os import remove from os import remove
from sys import argv from sys import argv
from logging import getLogger, INFO
from commons import clibs from commons import clibs
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
def p_iso(file, p_files, ws, tmpfile): def p_iso(file, p_files, ws, tmpfile):

View File

@ -2,11 +2,9 @@ from pandas import read_csv
from csv import reader from csv import reader
from sys import argv from sys import argv
from openpyxl import Workbook from openpyxl import Workbook
from logging import getLogger, INFO
from commons import clibs from commons import clibs
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
def find_point(bof, step, pos, data_file, flag, df, row, w2t): def find_point(bof, step, pos, data_file, flag, df, row, w2t):

View File

@ -6,12 +6,9 @@ from openpyxl import load_workbook
from math import sqrt from math import sqrt
from numpy import power from numpy import power
from csv import writer from csv import writer
from logging import getLogger, INFO
from commons import clibs from commons import clibs
logger = getLogger(__file__) logger = clibs.log_prod
logger.setLevel(INFO)
tab_name = clibs.tab_names['da'] tab_name = clibs.tab_names['da']
count = 0 count = 0
display_pdo_params = [ display_pdo_params = [