commit 66b1dd4d703413c10c7d9c617727168b469dc2f0 Author: gitea Date: Mon Jun 5 23:04:30 2023 +0800 [init] initial commit diff --git a/alert/__pycache__/calendar.cpython-39.pyc b/alert/__pycache__/calendar.cpython-39.pyc new file mode 100644 index 0000000..0f30d32 Binary files /dev/null and b/alert/__pycache__/calendar.cpython-39.pyc differ diff --git a/alert/calendar_tips.py b/alert/calendar_tips.py new file mode 100644 index 0000000..a50cf4e --- /dev/null +++ b/alert/calendar_tips.py @@ -0,0 +1,72 @@ +import json +import time +import datetime +import requests + + +def send_msg_tip(msg_tip): + # get the datetime, which is using at a failed situation + alert_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + # Enterprise WeChat Bot API and the format of body to send + hook_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' + body = { + "msgtype": "text", + "text": { + "content": msg_tip + } + } + + # get the result of API call + res = requests.post(hook_url, data=json.dumps(body, ensure_ascii=False).encode('utf-8')) + + # when failed, log it in /opt/logs/alert.log file + if res.status_code != 200: + with open('/opt/logs/alert.log', 'a', encoding='utf-8') as alert_log: + alert_log.write(alert_datetime + ' >>>> ') + alert_log.write('Failed sending message: ') + alert_log.write(msg_tip + '\n') + + +# 查询当天的节假日情况 +def holiday_today(app_id, app_secret): + week_index = {1: '一', 2: '二', 3: '三', 4: '四', 5: '五', 6: '六', 7: '天', } + today = datetime.date.today() + today_fmt = str(today).replace('-', '') + api_url = f'https://www.mxnzp.com/api/holiday/single/{today_fmt}?ignoreHoliday=false&app_id={app_id}&app_secret={app_secret}' + res = requests.get(api_url) + + res_http_code = res.status_code + res_text = json.loads(res.text) + res_code = res_text['code'] + res_msg = res_text['msg'] + if res_http_code != 200 or res_code == 0: + msg_tip = res_msg + else: + res_weekday = res_text['data']['weekDay'] + res_yeartips = res_text['data']['yearTips'] + res_chinesezodiac = res_text['data']['chineseZodiac'] + res_typedes = res_text['data']['typeDes'] + res_type = res_text['data']['type'] + res_dayofyear = res_text['data']['dayOfYear'] + res_weekofyear = res_text['data']['weekOfYear'] + res_constellation = res_text['data']['constellation'] + msg_tip = f'{today},{res_yeartips}{res_chinesezodiac}年,星期{week_index[res_weekday]},{res_constellation}。本周是今年的第{res_weekofyear}周,今天是今年的第{res_dayofyear}天,是{res_typedes},' + if res_type == 2 or res_type == 1: + msg_tip += f"请好好休息,享用美好的一天。" + else: + msg_tip += f"请努力工作,保持良好的心态。" + + send_msg_tip(msg_tip) + +def main(): + app_id = "nrsngdkvknqkrwko" + app_secret = "SFFmQWo2dnNBRjdNYkVSclZxa2ZvUT09" + holiday_today(app_id, app_secret) + + +if __name__ == '__main__': + main() + + + + diff --git a/alert/docker_alarm.sh b/alert/docker_alarm.sh new file mode 100644 index 0000000..d3966c4 --- /dev/null +++ b/alert/docker_alarm.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +docker ps -a --format "table {{.Names}}\t{{.Status}}" > /opt/logs/docker_status.log +while read line; do + echo $line | grep -q 'Exited' + if [[ $? -eq 0 ]]; then + name=`echo $line | awk '{print $1}'` + alarm="Docker Alarm - $name:\nContainer $name has been off line, please check ASAP." + bash /opt/scripts/alert/sendmsg.sh "$alarm" + fi +done < /opt/logs/docker_status.log + diff --git a/alert/love_words.py b/alert/love_words.py new file mode 100644 index 0000000..d99376a --- /dev/null +++ b/alert/love_words.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +import json +import time +import datetime +import requests + + +def send_msg_tip(msg_tip): + # get the datetime, which is using at a failed situation + alert_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + # Enterprise WeChat Bot API and the format of body to send + hook_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' + body = { + "msgtype": "text", + "text": { + "content": msg_tip + } + } + + # get the result of API call + res = requests.post(hook_url, data=json.dumps(body, ensure_ascii=False).encode('utf-8')) + + # when failed, log it in /opt/logs/alert.log file + if res.status_code != 200: + with open('/opt/logs/alert.log', 'a', encoding='utf-8') as alert_log: + alert_log.write(alert_datetime + ' >>>> ') + alert_log.write('Failed sending message: ') + alert_log.write(msg_tip + '\n') + + +# 查询当天的节假日情况 +def love_sentence(app_id, app_secret): + + api_url = f'https://www.mxnzp.com/api/daily_word/recommend?count=10&app_id={app_id}&app_secret={app_secret}' + res = requests.get(api_url) + + res_http_code = res.status_code + res_text = json.loads(res.text) + res_code = res_text['code'] + res_msg = res_text['msg'] + if res_http_code != 200 or res_code == 0: + msg_tip = res_msg + else: + res_data = res_text['data'] + msg_tip = '' + for item in res_data: + msg_tip += f'{item["content"]}\n' + + + # print('*' * 20) + # print(msg_tip) + + send_msg_tip(msg_tip) + +def main(): + app_id = "nrsngdkvknqkrwko" + app_secret = "SFFmQWo2dnNBRjdNYkVSclZxa2ZvUT09" + love_sentence(app_id, app_secret) + + +if __name__ == '__main__': + main() + + diff --git a/alert/poem_send.py b/alert/poem_send.py new file mode 100644 index 0000000..3b51d05 --- /dev/null +++ b/alert/poem_send.py @@ -0,0 +1,120 @@ +import requests +import json +import time +import sys + + +def send_alert_msg(alert_msg): + """ + sending messages via Enterprise WeChat Bot with the content of a poem from Jinrishici API. + 今日诗词:名句 + + 【title】-【author】-【dynasty】 + poem of complete + :param alert_msg:content with a specified format + :return: None + """ + + # get the datetime, which is using at a failed situation + alert_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + # Enterprise WeChat Bot API and the format of body to send + hook_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' + body = { + "msgtype": "text", + "text": { + "content": alert_msg + } + } + + # get the result of API call + res = requests.post(hook_url, data=json.dumps(body, ensure_ascii=False).encode('utf-8')) + + # when failed, log it in /opt/logs/alert.log file + if res.status_code != 200: + with open('/opt/logs/alert.log', 'a', encoding='utf-8') as alert_log: + alert_log.write(alert_datetime + ' >>>> ') + alert_log.write('Failed sending message: ') + alert_log.write(alert_msg + '\n') + + +def poem(): + """ + get the poem with Jinrishici API + :return: None + """ + # specify token in headers + headers = {'X-User-Token': 'dNigXSFtjhLbP5nf49piUPzmD7NoNHVz'} + api_url = 'https://v2.jinrishici.com/sentence' + # will try for 3 times, in case there are failed situations to get the poem + for i in range(3): + res = requests.get(api_url, headers=headers) + # translate json data to dict format + dict_substance = json.loads(res.content) + + # when failed to get the content, try again + if res.status_code != 200 or dict_substance['status'] != 'success': + continue + + # put necessary content to specific variables + # print(dict_substance) + poem_content = dict_substance['data']['content'] + poem_title = dict_substance['data']['origin']['title'] + poem_dynasty = dict_substance['data']['origin']['dynasty'] + poem_author = dict_substance['data']['origin']['author'] + poem_content_all = dict_substance['data']['origin']['content'] + poem_translation = dict_substance['data']['origin']['translate'] + # put poem translation in to the file /opt/scripts/alert/poem_trans.txt, if exists + with open('/opt/scripts/alert/poem_trans.txt', 'w', encoding='utf-8') as obj_poem_trans: + if poem_translation: + for item in poem_translation: + obj_poem_trans.write(item + '\n') + # if the translation part does not exist, put a '' into the file + else: + obj_poem_trans.write('') + + # create the format of content which is intended to be send via EWB, aka Enterprise Wechat Bot + alert_msg = f"今日诗词:{poem_content}\n\n" + alert_msg += f"【{poem_title}】-【{poem_author}】-【{poem_dynasty}】\n" + for line in poem_content_all: + alert_msg += f"{line}\n" + + # when successfully get the needed content, jump out of the for-loop + break + # after 3 times re-tries, still cannot get the content, then send the following warning message + else: + alert_msg = '当前无法获取今日诗词,请手动检查如下请求返回是否正确!\n' + alert_msg += 'curl "https://v2.jinrishici.com/sentence" -H "X-User-Token:dNigXSFtjhLbP5nf49piUPzmD7NoNHVz"' + + # send it + send_alert_msg(alert_msg) + + +def trans(): + """ + send the translation of the poem which is showed this morning, if exists + :return: None + """ + with open('/opt/scripts/alert/poem_trans.txt', 'r', encoding='utf-8') as obj_poem_trans: + alert_msg = obj_poem_trans.read() + # print(alert_msg) + if alert_msg: + alert_msg = f'今日诗词的译文:\n{alert_msg}' + send_alert_msg(alert_msg) + + +def main(): + """ + do the actions according to different parameters + :return: None + """ + if sys.argv[1] == 'poem': + poem() + elif sys.argv[1] == 'trans': + trans() + else: + pass + + +if __name__ == '__main__': + main() + diff --git a/alert/poem_trans.txt b/alert/poem_trans.txt new file mode 100644 index 0000000..e69de29 diff --git a/alert/sendmsg.sh b/alert/sendmsg.sh new file mode 100644 index 0000000..cbe0e0a --- /dev/null +++ b/alert/sendmsg.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +alarm="$1" + +curl 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' \ + -H 'Content-Type: application/json' \ + -d ' + { + "msgtype": "text", + "text": { + "content": "'"$alarm"'" + } + }' > /dev/null 2>&1 + + + diff --git a/alert/todo_alert.py b/alert/todo_alert.py new file mode 100644 index 0000000..7d94810 --- /dev/null +++ b/alert/todo_alert.py @@ -0,0 +1,95 @@ +import requests +import json +import time +import os + + +def send_alert_msg(alert_msg): + """ + send warning messages to phone via Enterprise WeChat Bot API + :param alert_msg: messages needed to be sent + :return: None + """ + + # get the datetime, which is using at a failed situation + alert_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + # # Enterprise Wechat Bot API and the format of body to send + hook_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' + body = { + "msgtype": "text", + "text": { + "content": alert_msg + } + } + + # get the result of API call + res = requests.post(hook_url, data=json.dumps(body, ensure_ascii=False).encode('utf-8')) + + # when failed, log it in /opt/logs/alert.log file + if res.status_code != 200: + with open('/opt/logs/alert.log', 'a', encoding='utf-8') as alert_log: + alert_log.write(alert_datetime + ' >>>> ') + alert_log.write('Failed sending message: ') + alert_log.write(alert_msg + '\n') + + +def main(): + """ + process the file /opt/logs/TODO/todo.txt file, get the items which is to be done within 2 months + :return: None + """ + + # run tt ls in advance, to generate correct todo.txt file + os.system("bash /opt/scripts/todo/todo.sh ls > /dev/null") + # initialize alert_msg with empty string + alert_msg = '' + # specify range to be alerted + alert_day = list(range(61)) + # pretty index for output + alert_index = {1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e', 6: 'f'} + # prepare a dict with specified format and contend, to receive valid items + alert_tasks = dict().fromkeys(alert_day, None) + for key in alert_tasks.keys(): + alert_tasks[key] = [] + + with open('/opt/logs/TODO/todo.txt', mode='r', encoding='utf-8') as todo_txt: + # process every line of the file, and get the left time of the task + for line in todo_txt.readlines(): + items = line.strip().split('|') + time_left = int(float(items[2].strip().split(':')[1])) + content = items[1].strip() + ' | ' + items[3].strip() + + # when the left time smaller than 2 months, put the task needed to be done in the dict + if time_left in alert_day: + alert_tasks[time_left].append(content) + + # determine every day to see if there are tasks not done + for time_left, task in alert_tasks.items(): + # if no task of some day, then skip it + if task == []: + continue + + # pretty and accurate word's format + sp_day = 'days' if time_left > 1 else 'day' + sp_task = 'tasks' if len(task) > 1 else 'task' + + # different output of alert message tips + if time_left == 0: + alert_msg += f'Today, you NEED to finish the following {sp_task}:\n' + else: + alert_msg += f'{time_left} {sp_day} left to finish the following {sp_task}:\n' + + # for every specified day, output all tasks needing to be done + count = 1 + for assignment in task: + alert_msg += f' {alert_index[count]}. {assignment}\n' + count += 1 + alert_msg += '\n' + + alert_msg += 'So, hurry up!! Go get things done!!' + send_alert_msg(alert_msg) + + +if __name__ == '__main__': + main() + diff --git a/alert/weather_tips.py b/alert/weather_tips.py new file mode 100644 index 0000000..ab2d84a --- /dev/null +++ b/alert/weather_tips.py @@ -0,0 +1,98 @@ +import sys +import json +import datetime +import requests +import time + +def send_msg_tip(msg_tip): + # get the datetime, which is using at a failed situation + alert_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + # Enterprise WeChat Bot API and the format of body to send + hook_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' + body = { + "msgtype": "text", + "text": { + "content": msg_tip + } + } + + # get the result of API call + res = requests.post(hook_url, data=json.dumps(body, ensure_ascii=False).encode('utf-8')) + + # when failed, log it in /opt/logs/alert.log file + if res.status_code != 200: + with open('/opt/logs/alert.log', 'a', encoding='utf-8') as alert_log: + alert_log.write(alert_datetime + ' >>>> ') + alert_log.write('Failed sending message: ') + alert_log.write(msg_tip + '\n') + + +def weather_forcast(app_id, app_secret, city): + api_url = f"https://www.mxnzp.com/api/weather/forecast/{city}?app_id={app_id}&app_secret={app_secret}" + res = requests.get(api_url) + res_http_code = res.status_code + res_text = json.loads(res.text) + res_code = res_text['code'] + res_msg = res_text['msg'] + # print(res.text) + if res_http_code != 200 or res_code == 0: + print(f"接口查询失败:{res_msg}") + else: + # print(res_text['data']['forecasts']) + today = datetime.date.today() + hour = datetime.datetime.now().hour + msg_tip = f"{today} {hour}时,查询到{city}最近四天的天气情况如下:\n" + msg_tip += ('*' * 30 + '\n') + when = {1: '今天', 2: '明天', 3: '后天', 4: '大后天', } + week_index = {1: '一', 2: '二', 3: '三', 4: '四', 5: '五', 6: '六', 7: '天', } + count = 1 + for item in res_text['data']['forecasts']: + item_date = item['date'] + item_dayofweek = week_index[int(item['dayOfWeek'])] + if item['dayWeather'] == item['nightWeather']: + msg_tip += f"{when[count]}({item_date} 星期{item_dayofweek})全天天气是{item['dayWeather']}," + else: + msg_tip += f"{when[count]}({item_date} 星期{item_dayofweek})白天天气是{item['dayWeather']},夜间会转为{item['nightWeather']}," + + msg_tip += f"最高温{item['dayTemp']},最低温{item['nightTemp']}," + + difftemp = int(item['dayTemp'].removesuffix('℃')) - int(item['nightTemp'].removesuffix('℃')) + if difftemp > 10: + msg_tip += f"昼夜温差{difftemp}℃,请注意增减衣物,切勿感冒;" + + if item['dayWindDirection'] == item['nightWindDirection']: + msg_tip += f"{when[count]}全天是{item['dayWindDirection']}风," + if item['dayWindPower'] == item['nightWindPower']: + msg_tip += f"风力为{item['dayWindPower']}。\n" + else: + msg_tip += f"白天风力为{item['dayWindPower']},夜间风力为{item['nightWindPower']}。\n" + else: + msg_tip += f"{when[count]}白天是{item['dayWeather']}风,夜间会转为{item['nightWeather']}风," + if item['dayWindPower'] == item['nightWindPower']: + msg_tip += f"风力为{item['dayWindPower']}。\n" + else: + msg_tip += f"白天风力为{item['dayWindPower']},夜间风力为{item['nightWindPower']}。\n" + count += 1 + msg_tip += ('*' * 30 + '\n') + + # print(msg_tip) + send_msg_tip(msg_tip) + + +def main(): + app_id = "nrsngdkvknqkrwko" + app_secret = "SFFmQWo2dnNBRjdNYkVSclZxa2ZvUT09" + # holiday_today(app_id, app_secret) + # ip_self(app_id, app_secret) + try: + city = sys.argv[1] + except Exception as Err: + print(f"Error Desc: {Err}. Maybe you need to supply correct city next time.") + exit(2) + + weather_forcast(app_id, app_secret, city) + + +if __name__ == '__main__': + main() + diff --git a/old/blog_update.sh b/old/blog_update.sh new file mode 100644 index 0000000..750cb08 --- /dev/null +++ b/old/blog_update.sh @@ -0,0 +1,36 @@ +#!/bin/bash +#=================================================================== +# Filename : jekyll.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2021-08-29 13:10 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + +# update blog +echo `date` +rm -rf /opt/websites/blog +let numOfAvatar=`ls /opt/websites/nav/assets/images/logos/ | wc -l` +let randNumber=$RANDOM%$numOfAvatar + +cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/websites/homepage/assets/img/logo.jpg -rf +cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/source-code/blog/img/avatar.jpg -rf +cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/websites/nav/assets/images/logos/avatar.jpg -rf +jekyll b -s /opt/source-code/blog/ -d /opt/websites/blog/ + +# update bash +rm -rf /opt/websites/just-the-docs/bash +jekyll b -s /opt/source-code/document/bash -d /opt/websites/just-the-docs/bash + +# update python +rm -rf /opt/websites/just-the-docs/python +jekyll b -s /opt/source-code/document/python -d /opt/websites/just-the-docs/python + +chown -R www-data:www-data /opt/websites + diff --git a/old/color.sh b/old/color.sh new file mode 100644 index 0000000..c273b6b --- /dev/null +++ b/old/color.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +nums=(0 1 2 3 4 5 7 8) +for i in ${nums[@]} +do + for j in `seq 30 37` + do + for k in `seq 40 47` + do + echo -e "$i;$j;${k}m -- \e[$i;$j;${k}mHello echo!\e[0m" + done + done +done diff --git a/old/ctc/cdnlog_search_logic.jpg b/old/ctc/cdnlog_search_logic.jpg new file mode 100644 index 0000000..e8b97e3 Binary files /dev/null and b/old/ctc/cdnlog_search_logic.jpg differ diff --git a/old/ctc/config.sh b/old/ctc/config.sh new file mode 100644 index 0000000..9e97c36 --- /dev/null +++ b/old/ctc/config.sh @@ -0,0 +1,167 @@ +#!/bin/bash +# set -e +# bug-1: can not find the configuration of live domain +# usage +function usage { + echo -e "${c_bc}获取域名详细配置或者对比两个域名的配置异同:${c_e}" + echo -e " config -c domain" + echo -e " config -d domain_1 domain_2\n" + exit 100 +} + +function onCtrlC () { + # while capture Ctrl+C, kill all background processes silently and exit + exec 3>&2 # 3 is now a copy of 2 + exec 2> /dev/null # 2 now points to /dev/null + kill ${bg_pids} ${progress_pid} >/dev/null 2>&1 + sleep 1 # sleep to wait for process to die + exec 2>&3 # restore stderr to saved + exec 3>&- # close saved version + echo + echo -e "${c_bir}Ctrl+C is captured, exiting...\n${c_e}" + exit 1 +} + +function get_config { + # 判断要查询的域名是否在平台,domain.list文件每小时更新一次 -- task.sh + res=`cat $data/domain.list | grep -w "$domain"` + if [[ $res == '' ]]; then + echo -e "${c_br}该域名[$domain]未在天翼平台配置,一个小时内新增的域名无法查询,退出...${c_e}" + echo -e "${c_br}只接收一个参数,请输入要查询的域名\ne.g. infos www.ctyun.cn\n${c_e}" + exit 247 + fi + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 1 + curl "https://dcp.ctcdn.cn/traffic-control-api/v2/access/list?access_version_id=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjYzfQ.XZ_nNtRWoBRcPBM-bHAG_ciohkNh69n4AVHwV755r6Q' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_1.log > domain_info_1.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_1.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 1失败,退出...${c_e}"; exit 246; } + + python3 /usr/local/script/fanmf11/get_infos.py --map_info domain_info_1.log $domain + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 1信息失败,退出...${c_e}"; exit 242; } + + accid=`cat info.log | awk -F ':' '$1==3 {print $2}'` + # ---------------------------------------------------------------------------------------- + # 获取域名信息 -- CDN + curl "https://confmanager.ctcdn.cn/v1/domain/conf/query_union?precise_query=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjQxNn0.KEFh_yg3J1y8sL7s3X_8jIR8GGq88A89b7J5YhVYVf8' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_4.log > domain_info_4.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_4.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 4失败,退出...${c_e}"; exit 243; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_config_cdn domain_info_4.log $accid $domain + r_code=$? + if [[ $r_code -eq 204 ]]; then + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - live + curl "https://lasc-new.ctcdn.cn/v1/domain/conf/op_query_union?page_size=20&page=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_5.log > domain_info_5.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_5.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 5失败,退出...${c_e}"; exit 235; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_info_5 domain_info_5.log $accid + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 5信息失败,退出...${c_e}"; exit 237; } + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 6 + domain_id=`cat info.log | awk -F ':' '$1==4 {print $2}'` + curl "https://lasc-new.ctcdn.cn/v1/domain/internal/domain_config?domain_id=$domain_id" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_6.log > domain_info_6.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_6.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 6失败,退出...${c_e}"; exit 238; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_config_live domain_info_6.log $domain + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 6信息失败,退出...${c_e}"; exit 236; } + + elif [[ $r_code -ne 0 ]]; then + echo -e "${c_br}处理域名-part 4信息失败,退出...${c_e}" + exit 239 + else + : + fi +} + + +# Self defined color shortcut +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_by='\e[1;33m' # bold yellow +c_bb='\e[1;34m' # bold blue +c_bp='\e[1;35m' # bold purple +c_bc='\e[1;36m' # bold cyan +c_bir='\e[1;3;31m' # bold italic red +c_bib='\e[1;3;34m' # bold italic blue +c_bic='\e[1;3;36m' # bold italic cyan +c_e='\e[0m' # reset +# some initialization +stty erase '^H' # allow backspace +data='/usr/local/script/fanmf11/data' +toolbox='/usr/local/script/fanmf11/' +OP="prefix "$@ +dash=`echo $OP | awk '{print $2}'` +first=`echo $OP | awk '{print $3}'` +second=`echo $OP | awk '{print $4}'` +flg=1 # signify if rip is acquired successfully or not, 0 - OK and 1 -NG +TS=`date +%s%N` +host=`whoami` +trash="/usr/local/script/fanmf11/trash/$host/$TS" + +if [[ -d $trash ]]; then + echo -e "${c_br}对于同一个用户,同一时间只能运行一个实例,请重新运行...${c_e}" + exit 245 +else + mkdir -p $trash + cd $trash && cd .. + docs=`ls` + for doc in $docs; do + [[ -f $doc ]] && rm -rf $doc + done + folders=`ls -t` + while [[ `echo $folders | awk '{print NF}'` -gt 29 ]]; do + folder=`ls -t | tail -1` + rm -rf $folder + folders=`ls -t` + done + cd $trash && touch config +fi +# set a trap for Ctrl+C +trap 'onCtrlC' INT + + +if [[ $# -eq 2 && $dash == '-c' ]]; then + domain=$first + get_config + exec 3>&2 && exec 2> log.json + cat $domain | tr -d ' ' | sed 's/"{/{/g' | sed 's/}"/}/g' | sed 's/\\"/"/g' | sed 's/\\n/ /g' | sed 's/\\t//g'| jq -r . | awk -f $toolbox/reformat.awk | jq -r . > log.json 2>&1 + cat log.json | grep -q 'parse error' + [[ $? -eq 0 ]] && { cat $domain | jq -r .; } || { cat log.json | jq -r .; } + exec 2>&3 && exec 3>&- + +elif [[ $# -eq 3 && $dash == '-d' ]]; then + domain=$first + get_config + exec 3>&2 && exec 2> log.json + cat $domain | tr -d ' ' | sed 's/"{/{/g' | sed 's/}"/}/g' | sed 's/\\"/"/g' | sed 's/\\n/ /g' | sed 's/\\t//g'| jq -S . | awk -f $toolbox/reformat.awk | jq -S . > log.json 2>&1 + cat log.json | grep -q 'parse error' + [[ $? -eq 0 ]] && { cat $domain | jq -S -f $toolbox/normalize.jq > first.json; } + jq -S -f $toolbox/normalize.jq log.json > first.json + exec 2>&3 && exec 3>&- + domain=$second + get_config + exec 3>&2 && exec 2> log.json + cat $domain | tr -d ' ' | sed 's/"{/{/g' | sed 's/}"/}/g' | sed 's/\\"/"/g' | sed 's/\\n/ /g' | sed 's/\\t//g'| jq -S . | awk -f $toolbox/reformat.awk | jq -S . > log.json 2>&1 + cat log.json | grep -q 'parse error' + [[ $? -eq 0 ]] && { cat $domain | jq -S -f $toolbox/normalize.jq > second.json; } + jq -S -f $toolbox/normalize.jq log.json > second.json + exec 2>&3 && exec 3>&- + jaydiff --json --indent=' ' --slice-myers first.json second.json + echo -e "${c_bic}此对比结果仅供参考,由于不同结构的JSON数据语义可能是相同的。${c_by}可以仔细对比下显示不同的部分,有可能是因为结构不同造成的。${c_bic}另外,可以用JSON在线对比工具做进一步检查如下文件${c_e}" + echo -e "${c_bib} `pwd`/first.json${c_e}" + echo -e "${c_bib} `pwd`/second.json${c_e}\n" +else + usage +fi + diff --git a/old/ctc/dist/get_infos.py b/old/ctc/dist/get_infos.py new file mode 100644 index 0000000..258faa6 --- /dev/null +++ b/old/ctc/dist/get_infos.py @@ -0,0 +1,3 @@ +from pytransform import pyarmor_runtime +pyarmor_runtime() +__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x06\x00\x33\x0d\x0d\x0a\x09\x34\xe0\x02\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x2c\x59\x00\x00\x00\x00\x00\x18\x25\x5c\xe9\x4f\xb9\x48\x8c\x99\xd2\x34\x53\x13\xfc\x75\xb1\x48\x00\x00\x00\x00\x00\x00\x00\x00\x5e\x19\xcb\xa1\xde\x63\x44\x43\x4c\xa9\x02\x1c\x05\xb9\x2f\x15\xe7\x81\xc9\x03\x71\x80\x19\x2e\x58\xb8\x53\x7f\xcf\x7b\x85\x28\xe9\x8b\xef\xd6\x3b\x0d\x67\x71\x61\xe3\x72\xed\x20\x0b\x96\x55\xb0\xd2\x47\xbe\x5a\x7c\x3c\xc4\xec\xbe\xd8\xb4\x93\x50\x8d\xfe\x8b\x92\x31\x37\xa7\x47\xdd\x52\x1a\xc7\xb8\xe3\x86\x60\x2b\xce\x1a\x00\x01\x76\xa4\x1a\x97\x4c\x4f\xe3\x45\xb1\x5b\x31\xd2\xf3\x0b\xbb\xe3\xe4\xa2\x95\x12\x3e\xdd\x73\xfa\x86\x8e\x03\x0c\x51\xb0\xfb\xeb\x3c\x39\x2c\x29\x4e\x7c\x47\x9f\x3f\x45\xc3\xe0\x1b\x73\x08\x36\xaa\x09\xae\xc5\x99\xd2\x47\x97\x41\x08\xbf\x02\x70\xdc\x1c\x24\x9b\x94\x3b\x6b\x35\x6f\x59\x5f\xb2\x78\xa2\xdb\xa7\xfe\x2b\x03\x9f\x76\x6c\x3f\x22\x10\xfa\xc0\xc1\xa5\x5d\x7b\xd7\x59\xe8\xc8\x5c\xfa\x43\x73\x24\x8f\x34\x76\x0d\x07\xbe\x9a\xf6\x6f\x32\x87\xcb\xfd\xef\x84\x5c\x7c\x28\xfc\xfc\x23\x15\xdf\xaf\x6b\x07\xcc\x38\xef\xc1\x06\x70\x93\x92\x5a\x20\xdb\xae\x09\x49\x04\xb7\xdf\xe2\xcd\x78\x3c\xb8\xac\xec\xb9\x82\xbd\x5a\x50\x63\x9f\xc8\xc3\x82\x6c\x6c\x58\xc7\x08\x95\x6f\x89\xad\x63\x56\x17\xa5\x6f\x74\xd0\xee\x6e\x1f\xd9\xff\x4b\xd4\x74\xec\x23\xc9\x4f\xd7\xd9\x01\xb0\x3d\x77\xa2\x2b\x0e\x50\xa7\x62\xc2\xf9\x4d\x2a\x33\x60\x7b\x53\x49\x61\x47\x2d\x1d\xd4\xa0\xd7\xdb\xf5\x08\x02\xbb\x35\x79\xf5\x8b\xbe\x6c\xfd\xcf\x7e\xb3\x46\xff\x00\x45\x09\x74\x3d\xe7\xc9\x31\x69\xd5\x35\x77\xe1\x63\xe9\xe5\x7e\xd8\xc9\x2f\xb1\x31\x9f\xe5\xf8\xc2\xf7\x9d\xb0\xe4\x91\x8d\xa8\x9b\xad\xd5\x74\x26\xac\x49\x6c\x66\xc7\xca\xe1\xbb\x5a\xf1\x03\x04\x48\x38\x69\x79\x7a\xfe\xbd\x17\x1a\x7f\xe1\xbf\x5b\x89\xc1\x94\xe0\x20\x74\x52\x5a\x47\xac\x21\x4f\x5e\xa9\x7a\x67\xe1\x5d\x85\x01\xf6\xc6\x26\x1b\x8a\xd9\x44\xe0\x4f\x4f\x7a\x8f\x9a\xae\x19\xc8\x3d\x42\x05\x35\x7e\x88\x18\xc2\xce\x9e\xb5\x9f\x9b\x93\xbe\xf1\x23\xab\xf6\xf7\x7a\x6f\x88\x41\x8b\x14\xe2\x09\x3d\x14\xc9\x22\xcd\xdf\x67\x7a\x29\x10\x18\x53\xa4\x1b\xf2\xc7\xa7\x34\x8a\x45\x33\x2e\xe8\xd7\xfd\x96\x8a\xe3\x05\xfa\xc9\x93\xad\x2e\xed\xbd\x1d\xa4\x6f\xcd\x42\xc6\xcd\xac\x50\x50\xf4\x04\x41\xa7\xba\xce\xe9\x59\x30\x21\x1f\x29\x22\x85\x8b\x16\x1b\x06\x40\xa1\xfc\x56\xc1\x0c\xaf\xa2\x94\x3c\xa5\x4d\xc3\xe9\x3b\xb7\x9c\xb9\x6a\x4e\x4a\x3c\x14\x29\xa9\x48\xfc\xec\xaa\xc1\xbc\x14\x78\xca\xa6\x0a\x36\x1f\xee\x7f\x07\xf1\x29\x89\x7b\xaa\x49\x00\x58\xf8\x3f\x01\x1b\x08\x24\xfc\x50\x1a\xca\x5b\xcc\x16\x10\xc2\xe1\xae\x99\x4f\xfd\xb7\x70\x02\x7f\xad\x6f\x62\x5c\x40\xfa\x9f\x90\x38\xd2\xc7\x7e\x77\x45\xc7\xa6\x67\x92\x20\x51\xa4\x46\xb0\x0a\x8e\x97\x4f\x03\xd6\xf5\x38\xe1\x44\xb9\xcc\x27\xbb\x29\xa2\xbb\xfa\xcd\x0c\x83\xc9\x84\xda\x95\x63\x29\x84\xed\x78\x0b\x6e\xf6\x80\x68\xbd\xa0\x01\x09\xe8\xe8\xd1\xac\x9e\x87\x8a\x80\x5f\xd9\x20\xde\x54\xb0\x1e\xfb\x05\xcb\x23\x58\xa9\xb3\x1a\x29\x60\x39\x11\x5d\x17\xf3\xd8\x79\xaa\x08\x9d\x5f\x64\xb6\x6f\x08\x37\x55\x84\xed\x5b\x1f\x5b\xbd\x81\x66\x16\x24\x57\x1b\xb9\xd1\x56\x1b\xf1\xa4\x90\xb1\x39\x43\xb0\x8d\x80\x35\x67\x06\x7a\xb9\xbb\x1d\x6f\x20\x0e\x68\x8c\xe6\x28\x38\x6a\x0a\x30\x45\x28\xd3\xb4\xce\x24\xf3\x11\xfa\x6c\x0e\x31\xe6\xea\xf5\x7a\x3e\xc6\x2a\x0d\xd9\x54\xbd\xd0\x33\x98\xe3\x34\x70\x1c\x7f\xc5\xd8\x58\xc5\xe7\xff\x3f\x2c\xd6\x68\x97\x6e\xbf\xb7\xb6\x24\x82\x8e\xe1\xbb\x2a\x32\x88\xc2\x29\x0e\x43\x2f\x13\xc0\x8b\x46\x50\x5d\x6e\xc2\x9f\x16\xea\x5d\x15\x4e\xad\x2e\x7c\xab\x67\xe3\x86\x20\x40\xe4\xdf\xc3\xb5\xed\xf2\xee\xcd\xa8\x48\x60\x58\xcf\x40\x99\x5e\xb4\x43\x6b\x98\xa6\x9c\x4c\x65\x02\x4d\x1f\x88\x1a\x7c\x5e\xcd\xe4\xc6\xcd\x23\x45\x77\x0c\x5b\xea\x52\x95\x3d\xd9\xb9\x00\x11\xec\xd6\x74\x2f\x9b\xb2\x47\xfe\x24\x03\x19\x3f\x65\x07\x26\x0d\xc9\x36\x50\x0f\xc6\xef\x5f\xe8\x1e\x82\x03\x7c\x2f\x8c\xa0\x8d\xf5\x1a\xd2\xe3\x88\xb2\x33\x31\x65\x21\x0d\xe5\x9c\x28\x71\xd4\xe2\x97\x37\x8c\xb6\xb6\xb2\xba\x45\x7c\x6a\x4b\x88\x4c\xf5\xc3\xf7\x63\x47\x38\x94\xc6\xa1\xb8\xbb\xc0\x74\x98\x1f\x61\xe3\xd7\x7f\xf3\x7e\x04\x88\xde\x8d\x1c\xe0\xee\x9f\x47\x93\xaf\x1c\x1f\x77\xf1\xbb\x4e\x05\x51\xf9\x6b\x50\xb4\x29\xdf\x9c\x50\x14\x8f\x26\x08\xad\x2a\x5e\x25\xe4\xf2\xc8\xab\xc0\xe9\xa9\xc0\xd5\xf9\x09\x99\x21\x71\xa6\x5f\x65\x58\x5c\x10\x87\x8c\xbb\x9a\xb9\x70\x37\x67\x9b\xed\x03\x0e\x75\xb3\x08\xc6\xfa\xb3\x3a\xb1\xbb\xd6\x2c\xfc\xf3\x40\x4f\x3f\x16\x66\xb3\x5c\xa8\x87\x84\x2f\x77\x01\x27\x7b\xc7\x95\xcf\xaf\x86\xe1\xa2\x34\x5f\xf1\x40\xb3\x30\x0c\x6c\x45\x8a\x3d\xe6\x58\x54\x78\x25\x5c\xb5\xda\x03\xb8\x9a\x09\x91\x5f\xa1\x93\x3f\x02\x50\x8d\xbf\x72\x98\x3c\xaa\x4b\x5d\xe8\xd2\x9c\x06\x30\x60\x98\x0a\x37\xdf\x13\x00\x47\x54\xa1\x73\xfc\x14\x67\xa0\x0f\x6f\x10\xc6\x44\xad\xcc\xbc\xde\xcb\x0f\x24\x20\x73\xe1\x43\xf9\xfb\xc5\xa0\x12\xf8\x51\xcc\x24\x72\x7b\x07\x3c\xc9\xb8\xde\xb5\x6b\x84\x5b\x8a\xe8\x9a\xd8\xad\x88\x3c\x92\xbf\xe7\x5a\xde\x52\x2b\x67\x19\xd4\x70\xa4\xf2\xcd\xb0\xe4\x79\x43\x94\x70\xef\x30\x43\x8d\x17\x98\x9d\xab\x5f\x12\x86\x08\x7a\xa2\x46\x2b\xc7\x10\xa5\x07\x29\x05\xd9\xb8\xf1\x7d\xef\x18\x93\x71\xe1\x95\x75\x67\xe0\xa3\xb9\x5f\x1e\xf0\x42\xee\x8c\x19\xc3\x1d\x6f\xad\x1c\x45\x06\x3d\xf7\x80\x07\xcb\x25\x27\x3c\xf2\x77\x9c\xfb\x45\xdd\x16\x35\x93\xc9\x33\x84\xfd\xf4\x8a\x44\x04\xa0\x2e\xbf\xb7\x03\x8a\x10\x35\x93\x97\x9b\x0f\xc1\x44\xbc\x29\xda\x0f\x7d\xa0\x00\x91\xb4\x8b\xcf\x4b\x8f\x5b\x99\x51\x43\x19\x42\x52\x63\x56\x36\x80\xaf\x29\x51\x1a\x3e\x69\x83\x29\x9b\xd1\xcd\xdc\x4a\xfd\x9b\xc4\x28\xcc\xb3\x7a\x25\x39\x35\xd6\x3f\xc5\x98\xa1\x32\x82\xda\x44\x0d\xc9\xda\x6d\x5c\x33\x6a\xed\x15\xa6\x06\xa0\xfa\xb2\x0c\x02\xd0\x67\xf6\x2c\x7e\x23\x08\x1e\x76\x95\x39\xa0\x19\x90\x0d\x42\x15\x27\x79\xc6\x8a\x5a\x94\x53\xff\xda\xee\x17\xd8\xbc\xed\x58\x87\xf4\xf1\xfe\xb8\x56\xdc\x85\x84\xc4\x78\x87\x93\xb0\x00\x79\x5e\xb1\xce\x18\xe0\x03\xec\x3f\x30\xeb\x35\x0b\xfd\xc6\x99\x89\xd3\x35\x5e\x2d\x25\xf4\xba\x69\x44\xd8\x77\x5a\x6f\xc7\xe2\xf0\xb9\x42\x05\xe4\x4c\x2a\x16\x7a\x1c\x3f\x76\xa2\x8d\x1e\x80\x21\x7d\xec\xe6\xa6\x17\x34\xad\xf0\xf9\x00\x8a\x5c\xac\xfb\x4c\x2d\x33\x81\xd8\xd1\xc7\x43\x78\xa6\x0a\x06\xfc\x9d\x89\x1d\xfa\xd4\xc6\xb5\x8a\xe5\x86\xa3\xf6\x86\x60\xb3\xd8\x2f\x18\x9f\x96\x23\xff\xfd\xc4\xbe\xf1\x6c\x94\x24\x5e\xed\x3b\x8e\xfa\x14\xc9\x84\x14\xff\xbc\x68\x48\x0e\xe8\x1e\x40\x65\x6c\x60\x07\xae\xad\xbd\x85\x84\xea\xb4\xae\xb5\x2c\x9b\xa9\xcd\x42\x8c\x4e\x52\x41\x82\x6a\x3a\x25\x0a\x40\xeb\x27\xc1\xc0\x2d\x97\xf9\xbb\xb7\x4d\xd7\x29\xe5\xc9\x17\x1f\x68\xd9\xb2\x0a\x92\x06\xfa\xb9\xdd\x8d\x61\x0c\x6b\xb8\xc3\xf4\x08\xab\xba\x82\x20\x65\x44\xf2\x80\x87\xdd\x9d\xe6\x49\x99\xa8\x3d\x1d\xd5\x95\x49\x56\x74\x82\xef\x6d\xbc\xee\x68\xb3\xa2\xac\x4a\xe7\x2d\xf0\xda\x31\x41\x54\xf8\x44\x41\x1a\x42\xb5\x11\x34\x0a\x5f\x67\x9a\x47\xad\x63\x74\x19\x38\xeb\x79\x4b\x0d\x5d\xa4\x6a\xd7\xb1\x27\x17\x7b\xb5\x96\xb9\xb7\xbf\x7d\x45\xe5\x5a\xd4\xee\x16\x23\xa2\x33\x8d\x99\x6b\xf4\x78\x66\x8f\x4d\x3d\x1c\x98\xe7\xe0\x70\x1d\xef\x54\xf6\x9e\xdc\xa2\x28\x20\x4d\x22\x9b\xe9\xe2\x0f\x6a\x9c\x4f\xfb\xea\xf9\xea\x31\x55\x6f\x00\xdb\xb6\xbc\xab\xc6\xc5\x8e\x19\x4f\xf4\xd6\xf9\x83\xa3\x13\x4a\x8c\x5b\xdb\x92\x61\xfc\xfa\x4b\x32\x20\x09\xd7\x2a\x87\x48\x7e\xa6\x7a\x43\x4a\x1b\x03\x49\xa3\x8e\x96\x64\xab\x5d\x94\x6a\x71\x89\x00\x1f\xd9\x73\x97\xed\x19\x15\x9b\xaa\x57\x56\x01\xd8\x8d\x7b\xa3\xf3\x2d\x9f\x9f\x94\x0c\xa4\x7a\x0d\xff\x02\xa0\x32\x56\xa7\x1e\x87\x44\x17\x93\xd5\xcd\xae\x4e\xf2\xea\xd5\x71\x89\x72\x27\x01\x3d\xa8\xfb\x1a\x66\x3c\x8a\x7c\xb7\x8a\x86\x1b\x70\x44\xf3\x55\x11\xd2\x54\xf5\x8e\x0c\x06\xd2\xe2\xd0\xa6\xa7\xd5\xe6\xaf\x20\x4f\x91\x49\x0c\x46\x81\x5b\xab\xa0\x35\x14\xc3\x5a\xdf\x7f\x49\x3b\x0d\x8f\xee\x30\xc8\xc7\x97\x71\xcc\x64\x44\x56\x0b\xf5\x56\x34\x02\x6f\x29\xc1\x4f\x05\x67\x9e\x89\x89\xd3\x42\x8e\xb9\xfa\x63\xf5\x5a\x0a\xe8\xf0\xea\x9b\xb9\x5c\x14\x73\xce\xc1\x2f\xf4\x9d\x34\x61\xd4\xd4\x81\xb3\x0d\x80\x07\xb6\xf1\x37\x64\x9e\x84\xaa\xaa\x40\xc9\xb6\xdc\xe9\x7a\xec\x43\x38\x24\x31\xa0\xfe\x0a\x42\x61\xb5\xeb\xb8\xe4\x03\x98\x39\x58\x44\x78\x4d\x39\x01\x0d\x20\x72\x1e\xe6\x2f\x9d\x01\x3a\xfd\x03\xce\x70\xa8\xb2\xf1\x1e\x3a\x6c\x8c\x78\xab\x62\xf9\x15\xb5\x87\xf8\x19\xfb\x4f\x30\x9f\xf3\x32\xd7\x76\xce\x00\x64\x4a\x4e\xab\xc5\xc9\x3e\x6d\x63\xd1\x8c\xce\xe0\x32\x5d\xbe\xdd\x79\xff\xf4\x16\x87\xb2\x5d\x63\xef\x16\xde\x3c\xb9\x98\xe0\xef\x70\x03\xd5\x86\x3a\xc6\xe0\xbc\x48\x77\x0a\x73\x4e\x6f\xb5\x8f\x33\xc2\x4f\x79\x41\xf7\xf4\x19\x86\x41\x44\xde\xf2\x49\x0b\x7c\x86\xc1\x51\x6b\x0b\xa6\x18\xc7\x05\x16\x02\xa0\x5d\x4e\xb2\xde\x70\x17\x15\x00\x06\xce\x1c\x89\xda\x8b\xb0\xcc\x61\x7b\x8c\xc3\x54\xa6\xd1\xd8\xbb\x7a\x15\xbf\xc4\x8a\x6c\xbe\xe9\x3d\x68\x87\x7f\x8b\x4a\x22\x91\x64\xab\xbf\x7e\xf8\x4e\xb3\xb7\x37\x06\xd3\x1f\x6b\xb9\x9f\xc2\x05\x2e\x03\x1d\xe0\xbd\x89\x2e\x63\x26\xe3\x4a\xe2\x24\x49\xf8\xa5\x59\x43\x43\xe4\xf0\xd9\xb5\x29\x9d\xcf\x05\x0e\x45\xa3\x69\xa2\x93\xe6\x37\xa8\x6d\x8a\x49\xe1\x75\x08\x3e\xd1\xd4\xfb\x98\xe7\x58\xd6\x55\x85\x03\x75\xb2\x1e\xc8\x9c\x3e\x10\xc5\x3a\xb5\xed\xb6\x6d\x0a\xb4\xee\x3d\xa4\x1e\xdc\x52\xfb\x1d\xc5\xbb\x99\x0d\xed\xf3\xc6\x9a\x67\xdf\xf8\x97\x86\x13\x17\xa3\xa9\xb0\xc8\xcc\xa7\xd5\xe8\x17\x61\x76\x4d\xff\xd0\x9d\x41\xd3\x31\x66\x29\x87\xfe\x03\x3e\x68\x97\xc2\xb5\x65\x7b\x80\xb8\xb8\x0f\xca\x1d\xe5\xbd\x7c\xdb\xf8\xeb\xf2\x61\x9a\xc4\x21\xc4\x5f\x95\x46\x4f\x43\xd1\x80\xa5\x01\xc5\xb6\x67\x38\x74\x5d\xcb\xcd\xf3\x6f\xe8\x4f\x4b\x6f\x4e\x68\x41\xca\x1c\x5b\xc9\xe5\x3d\xab\x8e\xea\x6b\xb4\x15\x4e\x99\x14\x59\x76\xd0\x76\x87\x9e\xd7\x1c\x2c\x10\x08\xbf\x49\xe3\x1c\x5f\x88\x90\xdd\xb0\x0e\xe8\x1f\x67\x31\x6e\x0c\xd0\x3f\x9f\x7c\xb9\xa7\x30\x6b\x55\x42\xb1\x38\x1f\xca\x33\x53\xf6\xb9\x8b\x57\x8f\xf7\xab\xb7\x76\x7f\xfe\x67\x11\xaa\xbf\x2f\xf3\x32\x2d\xc4\xcf\x20\xdd\xfc\x1d\x4e\x23\x3e\x60\xbc\xe8\xed\xa5\x15\x61\x1f\x3d\xd1\x3a\xc4\xbe\xba\x3d\x0f\xa8\xc1\x6b\xfd\x1d\xd0\x1a\xce\x41\x27\x17\x5d\x7a\xf7\xe4\x80\xd1\x8a\x23\x91\x6b\xb6\xad\x9c\x57\x59\x7e\xbc\x4c\x0a\x70\x69\xc5\x4f\x65\x43\x0c\x95\xa5\x2c\x4d\x53\x76\x10\xa3\x54\x52\x64\xa2\x45\x7b\x73\xa4\x1b\x97\x06\x76\xf9\x33\xde\x0d\xf3\x2a\x4b\x63\x60\x18\x2e\xd1\xbe\xbe\xda\xf7\x6f\x87\xb7\xb1\xb4\x63\x87\xbc\x16\x34\x16\xde\x01\xb2\xee\xc0\x41\x09\x8e\x9e\xa9\xe8\x41\x13\xf6\x42\x90\xeb\x90\x05\x4f\x4b\x98\x9e\xa9\x64\xf7\x9a\xfb\x8a\xfe\xbf\x4c\x5e\x76\x29\x42\xc0\x86\x48\xb1\x95\xf6\xec\x33\x6f\xe9\x4e\xc6\x4c\x78\xf0\x90\x27\x44\xf4\x13\xde\xfa\x2f\x37\xb5\x34\x10\x7d\xff\x97\x77\x3b\x97\x60\x14\xbf\x72\xe5\x88\x0d\xc7\x46\x6d\x79\x26\x81\x20\x0a\xb0\x81\x63\x4a\x5c\x3a\x03\xf9\x61\x75\x6c\xb9\x44\x38\x5b\x79\x60\x5a\x31\xee\x97\x78\x3c\x2a\x1e\xa2\x6b\x77\x97\x82\xca\xf7\x52\x5c\x8e\x9e\x76\x65\xe5\xf5\x61\x30\xbb\xba\xc8\x03\xa1\xb3\x46\xa3\xc5\x63\xea\x46\xed\x52\xd7\xd3\xeb\x60\xeb\xfb\x21\x5d\x2d\x67\xe2\xf1\xc4\x68\x34\x9a\xac\x7d\xdd\x6a\xeb\x8f\x82\xd6\x13\x75\x00\xd3\x77\x24\xd4\x4c\xfc\x2a\xc1\x1b\xed\x64\xfe\xba\x03\x2b\x8c\x18\x1b\xa2\x1d\x00\x6c\x00\x26\xd4\xb1\xb0\xad\x53\xa0\x43\xe4\x60\xe2\x69\xa5\xcb\x36\x5e\x03\xf3\x39\xb5\xd9\x2e\x43\xcf\x4a\x59\x52\x44\xae\xed\xb1\xa6\x3b\x63\xbd\xb3\xe4\x4a\x42\x7b\x23\x06\x2a\x9b\xf9\x07\xcd\xab\x09\x90\x43\xde\xc3\x3c\xb9\xdd\xb5\xa9\x03\x84\x37\x45\xfb\xef\x2f\xb3\x7a\x10\x76\x27\x18\x8b\xd2\x92\x0c\x49\x25\x81\xf8\xd0\xc4\xcd\x04\xfe\xc1\x94\x75\xde\x0d\x77\x99\xcb\x8c\xb7\x8b\xd8\x0b\x26\x91\xcd\x4c\xca\x85\xd5\x1e\x2c\x2a\x03\x53\x48\xc9\x6d\xf8\xd6\x29\x53\x28\x94\xe4\x76\x21\x60\x66\x76\x1b\x0b\xe5\xb0\xde\xdb\x6a\x27\xc7\x3e\xac\x4a\x9f\x79\x31\xb7\x60\x9e\x81\x3c\xfc\x27\x15\x01\x15\x7e\xff\xf1\xd1\xec\xf7\xa8\x46\x99\x96\x77\xdb\x40\x99\x4f\x35\xc5\xc0\xe0\x3f\x74\xf5\xcf\xfa\x20\xb3\xff\x7a\xd3\x89\x74\xfa\x63\x22\x14\x8b\x4f\x1d\x58\xfb\x28\x40\x29\xf3\x2b\xcf\x53\xf5\xa8\x29\x90\xa8\xa8\x74\x71\x54\x3d\x1c\x12\x76\x4c\xec\x5f\xf2\x77\x12\xb8\xb5\x4d\x28\x93\xbe\x89\xbf\x4e\x74\x8a\x24\xdc\xe1\x96\x2d\xc9\xc9\xdd\xe9\xdd\xc2\xca\x2d\x66\x2a\x3b\xda\x76\xb2\x36\x1e\xdb\x1d\xfb\xa6\xef\x57\x76\x2b\x12\xf2\x60\x00\x72\x18\xa1\x99\x64\xe1\x6f\xa8\xfa\x92\xc9\xa1\xdd\xd9\xdb\x5a\x5c\x25\x9c\x28\x87\xf7\x22\xb3\xd3\x68\x8f\xda\x3a\x53\x65\xa0\x40\xcc\xbd\x37\x56\xb3\xb7\xd5\x22\xcf\x5a\x58\x7d\xfc\xff\x69\xc0\x99\xc5\xd9\x28\xdd\xbf\xd3\x20\xf7\x6c\xe6\xc2\x87\x96\xe6\x26\xb1\x3d\x5e\x8d\x84\xb0\x27\x8e\xe3\x92\xa0\x06\x60\x45\xfa\x20\xbd\xc6\x88\xab\x90\x34\x36\x9e\xb0\x16\xda\x03\xa2\xa2\x74\x63\xb7\x18\x2f\x43\x9f\xf1\x53\x15\x32\x2d\x14\xa3\xa2\x20\x3d\x68\x55\xa4\x06\xc5\x19\x43\x23\x37\x61\xa9\x89\x1f\x30\x68\x55\x59\x0d\x49\xce\x86\xf2\x89\x1b\x2e\x61\x5f\x2d\xe6\xd9\x92\xfd\x9c\xde\x38\xc6\x12\x52\x45\xca\xf6\xc4\xbc\x09\x86\xdb\x5a\x4d\xa7\x91\x2c\x6a\x7f\x42\xd0\xac\x42\x23\x2d\xc4\xab\x0f\x65\x2e\x1d\x51\x5b\x95\x0f\xce\xe9\x59\xe8\xa5\xc7\x4a\x41\x9c\x1c\xc2\xe0\xf6\x46\x24\xd9\xfe\xa7\xf1\xa4\x1a\xe3\xba\x72\xbd\xab\xf8\x06\xba\xfa\x7f\x51\xde\x19\xb6\x1a\x0c\x6b\x89\x5f\x13\x0f\xc7\x75\x33\x9f\x2e\xd8\x2d\xe1\x7d\xd7\x09\x21\x8f\x77\x08\x51\x38\x3b\x74\x31\x6b\x8d\x91\xd7\xab\xd6\x83\x41\xb4\x1a\xff\xcf\x66\xfd\xfb\x92\x0f\x76\x90\x41\x5f\x1a\x40\xc4\x55\x70\x34\x8f\x40\x51\x0c\xb9\xe4\xb3\xad\x5c\xe2\x70\x7a\x23\xfe\x54\xa7\x32\x5b\x84\xad\x03\x49\x6b\xb2\xfb\xc8\xa4\xf5\xa8\x36\xa0\x6b\xe6\x5a\x82\xde\x28\x78\xa1\x05\x00\x39\x38\xbe\xe0\xbe\xdf\xd5\x29\x8d\x2e\x9f\x95\x71\x3f\xa0\x37\x7a\x96\xa6\xe4\x77\x10\x31\xe2\x19\x33\x35\x27\x2a\xcf\x58\x82\xe7\xc6\xdd\xbb\xe4\xcb\xe5\xed\xec\xa2\xb4\xe0\x64\x90\xd2\x37\x27\x19\x3a\x81\x24\xe7\xfa\x93\x51\x4a\x70\x9c\x9c\x1b\x55\x5e\x83\x48\x6b\xa7\xee\xc7\x60\x53\xf0\xb6\x34\x15\x4b\xee\xbd\xd9\xd5\x46\x33\x44\x79\x42\x04\x29\x29\xc3\x88\x28\xf3\x51\x55\xc0\xf2\xac\xe5\xf7\x89\xa3\xe0\xfb\x0c\x91\xe8\xea\xc2\xe7\xc0\xc3\x6d\x4c\x30\x4c\x05\xc5\x18\x5d\x86\xca\x94\xb7\x44\x17\xcd\x10\xb1\x39\x06\xd7\xb6\x64\x24\xb5\xee\x67\xee\xfb\xef\x5d\xae\xc0\x80\x88\x32\xf2\xa0\x7f\x31\x05\xe1\x8f\xad\xe6\x87\xad\xb0\xed\x4e\x1e\xb6\x0d\xf3\xa4\x8f\x0f\x68\x81\x7e\x5e\x2b\xcd\x6d\xc0\x18\x55\x41\x51\x31\x5f\xed\x08\x60\x26\xc0\x4b\x10\x9a\xa5\xcf\xba\x64\xab\xf3\xc7\xc1\xa4\x78\xb7\x71\xa9\x80\x78\x9e\x1c\x56\x62\x07\x2a\x0d\xeb\x6e\xf3\x38\x3b\xab\x85\x80\xa0\xec\x80\x28\xfc\x4c\x70\xb2\xba\x2d\xc8\x43\xfe\x1d\x62\x0b\x69\xa1\xc9\xe7\x6d\xe8\xbc\x40\x00\x98\x13\x8e\x75\x48\xc8\xe3\x4a\x2f\x2f\xba\x79\x75\xa8\x20\x4e\x84\xea\x2d\xe2\x58\x66\x03\x88\xee\xbe\x31\x48\x97\x12\x40\xde\x65\xd4\x1c\x1b\x13\xdb\x1a\xb5\xd8\x66\xa8\x4a\x6a\xde\xb5\xc5\x28\xb0\x73\x61\xb9\xe7\xe9\x82\xad\xea\xc0\x88\x61\x0a\x02\x50\x6c\xb3\x88\xbc\xdc\x7a\xdf\xdd\x4d\xc4\x4d\x14\x60\xb7\xc2\xab\x41\x6d\xe6\xb6\xa2\x8f\x2c\xa5\xef\xa6\x29\x12\x5c\x58\x98\xe5\x98\x33\x0e\x55\x1a\xae\x4c\x14\x62\xb3\x1c\x0a\x45\x61\x54\x23\x7b\x53\x82\xb8\x04\x36\x93\x6e\xc9\x05\xbe\xa7\x8a\xe8\x8a\x61\x28\x41\x47\x77\x6f\x4c\xb0\x07\x35\xab\x90\x0e\x72\x57\x47\x30\x0c\x60\xed\xa9\x83\x22\x7e\x6d\x62\x3e\x81\x67\xe2\xab\x6b\x28\x83\x0a\xe6\x3b\x7c\x01\xcc\xef\xc1\xd8\xce\x27\x67\x15\x7a\x2d\x90\xc9\x8b\x12\x87\x30\x99\xfe\xe2\x4d\x6e\x3a\x76\x20\x15\x20\x64\x27\xb4\x1b\xb3\x36\x55\xc1\xd0\xb2\x7e\x70\xc7\x90\x8c\xcd\xb2\xea\xf5\xdd\x75\xcc\xf6\xda\xc8\x25\x7c\xe8\x69\xdb\x52\xc4\x43\x3a\xe6\x8c\x1d\x21\x5d\xd9\xad\x4a\x9a\x72\x2a\x5f\x75\xc9\x33\xd8\xd5\x38\xa5\x92\x89\xb2\x2e\xe8\x2e\xf7\x7a\x9a\xc6\x18\x5f\x2e\x29\x28\xcf\xd6\x41\x50\xf8\x97\x99\x62\x75\x6d\xa6\x70\xc8\xdb\x7e\x37\xbd\x59\xd2\x67\x33\x16\x18\x5d\xbe\x4c\xd3\xc7\x1e\x86\x8b\xcd\x0d\x55\x1a\x75\xf6\x7a\x88\x84\x09\x0b\x8b\x0c\xb6\xd6\x40\x3a\x36\xc1\xdf\x12\x4d\x42\x59\x7a\x71\x18\x1d\xbd\x12\x43\x32\x98\x66\x23\x9c\x26\x1b\xb6\xe1\xb4\x08\xfb\x1c\x86\xb2\xf4\x1a\x6a\x83\x1d\x1d\x58\x19\x01\x76\xd3\x87\xbd\x74\x76\xdd\x25\xfd\x05\x4d\x5d\x07\x34\xa5\x02\xe3\xe3\x9f\x69\x9d\x1e\x99\x11\x2f\x45\x08\x75\x17\x63\xaf\x39\x2c\x5f\x36\x7e\x33\xef\x54\xd8\x6c\x22\xcd\xa4\x64\x2c\xde\x42\x05\x9f\x9e\x28\xfb\x86\x1c\x51\xb4\x69\xb9\x8d\x02\xf6\xd6\x52\x37\x6b\xd4\x4b\xe7\x10\x2c\x0b\x57\xa7\x18\x55\x29\x31\x26\x10\x1b\xe4\x38\x5b\x7e\x89\xc2\xc4\xe0\x67\x6e\x27\x96\xab\xd3\xc9\x71\x8f\x4c\x86\xf7\x3a\x11\xb1\x48\x64\x63\xec\xf3\x5f\x16\x61\xe5\x87\x6d\x69\xc3\x59\xcc\x00\xcd\xbb\xcd\xdf\x5d\x20\xe7\xfc\x91\xbc\xbc\x27\xa8\x01\xc6\x57\xfc\x65\x0f\xc7\x69\x65\xfa\x43\x16\x78\xd9\x7e\xdb\x50\x8b\x18\x3d\x11\x67\x6b\xa0\x3e\x10\xf8\x8f\x78\xeb\xeb\xff\xa3\x81\xd2\x67\xa6\x0c\x7e\x88\x34\x2d\xdc\x49\x1d\x58\x31\x88\xbe\x40\xc1\x13\xac\xbb\xf2\xf8\xe3\x71\x5c\xd9\x19\x34\x61\xb8\x41\xdc\xac\x90\x5d\xb9\x10\x7d\x61\xe6\xe8\x90\xd0\x80\xbe\xe9\xe1\xf3\xa5\x85\x19\x97\x92\x7d\x1a\x1c\x5d\xa9\xd5\xf1\xb2\x0b\x26\x85\x5a\x9a\xda\x38\x8f\x8a\xd9\xec\xac\xc1\x7d\xfd\xca\x0f\x74\x8d\x7a\xd3\x27\xce\x6f\x26\x65\x8e\xe4\x53\xa4\x81\x53\x5f\xf9\xb5\x74\xe2\x65\xd6\xbc\xb6\xd0\x14\x87\x4e\xc1\xbd\x13\x17\x59\x3e\xb2\xd4\xc0\x1b\x0c\xe3\x17\x51\xb3\xd1\xad\x1b\x05\x20\x53\x92\xea\x57\x71\x3e\xc5\x4e\xa7\x52\xfd\xcc\xa9\x8e\xa7\x50\x6a\x45\xe7\x7d\xf2\x92\xde\xc8\xff\xab\x5e\x64\x79\x7c\xc7\x1d\xc5\x1a\xf6\xe0\x74\x2f\xcd\x64\x22\x99\x7e\x88\xb0\x0c\x07\x2d\xd5\xfb\xba\xd1\x2b\xdd\x40\xf8\xea\x59\x1b\x25\x09\xbf\xaa\x4f\x71\xeb\xe1\x5f\xb8\xbb\x4a\x13\x28\x9b\x1a\xad\x89\x1f\xfa\x55\x0c\x69\x2f\x0b\xb1\xa6\x6a\x87\xcb\x3c\xd1\x8e\xa5\x5b\x19\x57\xdd\xe9\x90\x27\x6e\xd6\x9d\x77\x65\x90\x7b\x9e\x75\x2f\xd3\x44\x88\x04\x45\x57\xb3\x13\x01\x61\xb9\xab\x97\x79\x3f\xd9\x65\xeb\xac\x17\x75\x10\x64\x25\xea\xd7\xb0\x95\x1f\x22\xf8\x2b\x16\x22\xb4\xf0\x47\xc2\x55\x8c\x9a\x80\x08\x6f\x3e\x24\x8a\x2b\x74\x11\x51\x5c\xc0\x54\xc9\x5f\xd3\xc2\x26\x17\x36\x92\x0b\x7a\x4c\x7b\xe4\xdb\xf0\x2a\xf8\xe0\xd8\x4a\xae\x53\xc9\x9c\xc4\x06\x51\x2f\xe0\x1e\x61\xb7\xea\x9f\x9f\x1d\xfa\xa6\xc7\x95\x01\x0f\x62\x36\x30\xbc\x26\xc1\xab\xbe\xf1\xa7\xaf\x23\x41\x89\x9d\x2d\xeb\x50\xcc\xa6\x22\x01\xd3\x68\x23\xab\x20\xf6\x7f\x29\x60\x5e\xe9\x8b\x8e\xc7\xdf\x16\xeb\x9a\xd9\x0c\xed\xbd\xe5\xc9\xd5\x7a\xaa\x50\x7b\x47\x69\x2c\x56\xc8\xcb\xe4\x4c\xaf\x2b\xb2\xd4\x46\xc0\xce\xb4\x90\xe7\x9a\x48\x80\xfd\x79\x60\x40\x88\x80\x68\xf8\x49\x82\x2b\xe2\x1e\x07\x38\xe1\x46\xfc\x53\x75\x8e\x8b\x15\x22\x13\xcc\xd7\x21\x0a\x57\x59\x08\xee\x4b\xff\x5a\x93\x85\xcd\x81\xa0\x42\x30\x02\x6b\x05\x2a\x8c\xd1\x82\x0b\x4c\xa1\x66\x66\xec\x59\xa6\x40\x16\x9c\x49\x62\xc8\x4d\x36\xb0\xf4\x39\x84\xfe\xc4\x7b\xd3\x12\x76\x36\xa9\xfc\x74\x5b\xba\x77\x25\x83\x01\x0f\x79\x62\x35\x95\xfb\xa5\xf7\x35\xbd\x47\x83\xd9\xa9\x9d\x84\x28\xca\x4c\x57\x59\xe7\x4f\x5c\x39\x0c\xf2\x5a\x67\x72\xf6\x9d\xcc\x57\x88\x97\x89\x0a\x43\x12\xcb\x8e\x4d\x0b\x1b\xb5\x8a\x50\x8c\x7d\x19\xfb\x88\x12\x9f\xa5\xee\x8c\xa8\xf7\x82\xbb\xc2\xe3\xa0\xa4\xea\x5a\x97\x54\x70\xbd\xcc\x0c\xc4\x78\x49\xa5\x38\xe0\x23\xe3\x26\xa1\x89\x60\x4d\xda\x51\x14\x20\x98\x59\x87\x33\x72\xf1\x50\x2b\x17\x61\x7f\x5f\x0f\x33\x6c\x7b\x1a\x41\x77\xc3\x02\x7c\x1e\x9a\xe7\xe4\x32\xb6\x3b\x02\x7a\x2d\xff\x12\xd9\xdb\x9d\x5b\xe4\x49\x97\xd1\x04\x7e\xdd\x02\xb1\x6c\xa2\xe3\x16\x1e\x91\xb6\xcb\xa6\xad\x54\x7b\x24\x77\x30\x29\x0f\x29\xe6\x7b\x8c\xd9\xb0\xc8\x2b\x05\x7b\xc2\xfb\x14\x21\xbc\x8f\x82\x46\x33\xfd\x91\x55\xc2\xdc\xfe\x69\x18\x9d\x70\x4c\xb7\x50\x12\x68\xed\x38\x3d\xb8\x96\xdb\x42\x12\x6b\x67\xf7\x2a\xb0\x52\xcc\xec\xb8\xee\x3f\xbe\x9c\x16\x1c\x3c\xc1\xca\xc1\xb9\xe2\xd4\x77\xe3\x6a\x84\xe6\x9b\x7e\xef\x17\x89\x7e\x5d\x35\x6a\xd8\x83\x56\xde\x4d\xe2\xe3\x39\x8d\x0a\x0e\xd8\xa7\x70\x12\x44\x6c\xc5\x91\xd7\xf5\x06\xf4\xf5\x6e\x71\x1e\x36\xd2\x33\x2a\xc3\x41\x2e\x75\xeb\xb9\xcd\x79\xc0\x5d\xe5\xda\x25\x29\x19\x3a\x03\xf5\xbd\x94\xdf\x00\x92\x59\xed\x8c\xf0\x51\x17\x89\xb5\x44\x6b\xbb\x2e\xdf\x73\x49\x0a\xb3\x41\x86\x09\x63\x07\x73\x39\xea\xbc\xfd\x44\xcd\xf1\x7c\xda\x24\xc1\xec\xcd\x4c\x9e\x42\x4a\xa5\x77\xa2\x51\x6c\x37\x3c\x27\x81\x88\x32\xdd\xf5\xfe\x88\x83\x6a\x81\x39\x89\xb6\xfb\x05\x7e\x74\xce\xa2\x0b\x87\xc8\xf1\x88\xec\x4f\x8a\xf2\xa7\xb3\x9d\x47\xd2\x56\x57\x02\x77\x7e\xdc\x7f\x5f\x16\xa1\x83\x2a\x23\xb1\x2e\xb9\x0f\x59\xed\x06\x81\xa7\x3c\x17\x64\x17\xab\xd8\x77\x47\xe4\xd4\x51\x29\x92\x23\x7b\x37\x91\xd5\x46\xf7\x04\x62\x95\xe0\x50\x66\x41\xa2\x99\x75\xd1\xb1\xdd\x22\x43\x22\x19\x6a\x79\x23\xc3\x87\x5a\xbc\x36\x4d\x91\xda\xe2\x29\x70\x1c\x17\xc4\x56\xdf\xdb\xd5\xe0\x82\x39\x82\x84\x62\x82\x19\xec\x5b\xad\xad\x48\x27\xf4\xb2\xbb\x3f\x73\xe8\xd4\xec\x23\xe6\x4e\xa4\xc9\x16\x9c\xc1\x63\xbf\x90\x04\xc4\xa5\x61\x6e\x88\xa7\xf0\x89\x71\xf8\xc1\xef\x0e\x1c\x30\x1d\x98\x41\xc9\xbd\x43\x42\x5f\x84\xdd\x88\xc6\xfa\x0a\x20\x66\x69\x8b\x75\xce\xa9\x8e\xeb\xa5\x83\x4c\x25\x48\xb8\xd3\xbf\xd4\xaf\x6f\x3d\x70\x99\xa5\xcc\x55\x60\x3b\xa7\x0b\x51\x51\x7e\x5c\xf2\x48\x91\x2b\x7e\xeb\x0a\x6b\x0b\xcd\xc9\xc5\x19\x23\x00\x31\x19\x30\x5e\xfe\xb8\xec\x50\xd5\xb9\xfa\x3d\xbf\x34\x8e\x6f\xb8\xd4\x40\x49\x34\x7a\x45\x61\x92\x8a\x7d\xb9\xe1\xe2\x8f\xbb\x0b\x5a\x0d\xd8\xb7\xff\xc5\x08\x32\x2d\x3f\xf4\x95\x73\xcd\x78\xcb\xbb\x81\xfd\x91\xcf\x4f\xe2\xb1\x69\x28\xad\x3d\xb9\x94\x52\x17\x3b\x06\x7b\x24\x3d\xf3\xf8\x48\xd3\x7d\x06\x81\xfe\xde\x67\x93\xa0\xa5\x81\xf5\x1b\x8a\xf9\x81\x98\xc6\xb8\x4b\xab\x4e\x28\x66\xf7\x76\x7e\x8f\xb4\x1c\xa6\x2b\x63\x01\x8e\xde\x1e\x2b\xb2\x3c\x93\x66\xe1\x59\xee\xd5\x01\x14\xda\x33\x0e\xfc\x90\x02\x2e\x55\x01\x22\x33\x8c\x42\x67\x95\xaf\x06\x77\x30\x67\xc8\x34\xd9\x2a\x90\x88\x31\x2c\xf2\x97\xc0\x96\xcf\xa5\xd9\x01\x27\x84\x5d\x49\xef\xba\x30\x56\x2f\x0d\xa3\x52\xe1\x18\x45\xd7\xb1\x7d\xc3\x37\x27\x5a\x90\x54\x07\xf2\x52\x8b\x7f\x01\xcc\xc8\xcc\x4f\xd8\xa0\x63\x91\x18\x7a\x2c\x54\xa1\x50\x7d\x22\x20\xde\x90\x9c\x3b\xfc\xe1\xad\x98\x94\x42\x46\xdc\xdc\xa3\x52\x6f\x9c\xcb\xa1\x42\x86\x70\xbd\xea\xd6\xe3\x1a\xe1\x72\xe3\xcd\x47\x2d\xa0\xaa\xf0\x5b\x61\x71\x56\x5a\x9b\x54\x9d\x3c\x8f\xf3\x5d\x84\xe5\x30\x0c\x81\x3e\x0e\x80\xf3\x11\xf6\xf6\xdf\xb4\x45\x79\x2d\xa0\xfb\x2d\xe8\xa8\x93\xb1\xb7\xc9\xfc\x9a\x0a\xd6\x52\x7a\xa5\x21\x2f\xc7\x47\xaf\x56\xf3\x52\xb3\x12\x91\x06\x1f\xdf\x15\x9d\x6b\xf1\xaa\x70\x8a\xa9\x95\x8a\xf5\x93\x17\x1e\xbe\x38\xe9\xc4\x04\xc0\xfa\x89\x5f\xee\x95\x66\xf6\x6d\xf6\xaa\xed\x20\xab\x2a\xcc\xd7\x73\x1f\x84\x2a\x36\x1b\xec\xde\x2a\xd1\xa3\xbc\x21\x31\xb9\xac\x38\x2d\x05\x52\xd5\xb4\x44\xa0\xad\x34\xf0\x54\x24\x7d\xa5\xe8\x54\x5a\x61\xe2\xfe\x8f\x36\x4d\xce\xd1\x18\xd0\xdd\xdf\x12\x61\xfb\x9c\x1a\xf6\xb9\xcb\x94\xd5\xd9\x77\x68\x2f\x1d\x2b\x60\x90\x95\xd2\x1f\x03\x52\x69\xf6\x1f\xbe\x79\xec\x42\x1b\x42\xaa\x59\x49\x2b\x7a\x30\x31\x86\xd7\x7f\x65\x9b\x6b\x0a\xb5\xe7\x96\x35\x9d\x0a\xe8\xea\xb9\x7e\x4a\x05\xfe\xaf\x76\x38\x47\x24\x7b\x9b\x34\x8d\xb5\xdb\x75\x1a\x05\x39\xa8\x79\x6a\x03\xc1\x4a\x6b\xd7\x8c\x6a\x3c\x11\x9b\x0e\x2f\x49\x6b\x4f\xc1\x6b\x50\x7d\x36\x2c\x56\xfe\x3f\xc4\x92\xc6\xf2\x31\x69\x44\x6c\x3d\xbb\x7d\xbd\x4e\xe0\xe6\xa5\x1d\xc6\x17\x98\x60\x3c\x2c\x7a\x60\x9a\x3b\x91\x6f\xb7\x84\xb2\xa4\xb1\x95\x38\xd4\xf4\x50\xed\x3a\xed\x01\xe2\x82\x25\x46\xa3\xe9\x34\xd0\xbe\x78\x2d\x0e\x47\x3e\xf8\x22\x7e\x53\x55\x15\x3e\xf4\x33\x39\x41\x56\x2e\x60\x56\x72\xd9\x22\x40\x53\x81\x9d\x92\x1c\x19\x28\x21\xf1\x3e\x7d\x5a\x00\x20\x93\x5a\xd1\x44\x4c\x22\x99\x7b\xa9\x73\xc5\xfd\xc9\x69\x14\x9f\x81\x6e\xd4\x7f\x35\x47\x9c\x5c\xed\xc5\x9a\x51\x8c\x72\x09\x41\x85\x15\xca\x81\xb4\xb4\x03\x13\x34\xc9\x79\x67\x8a\xad\xeb\x27\x82\x14\xfb\xf3\x52\xf2\x16\xe5\x80\xf1\xd2\xbc\x48\xd6\xef\x9e\xfe\xf9\x84\xbd\xc9\x08\x6c\x7b\x51\x21\x10\x11\xda\x4b\xf4\x37\xa7\x89\x29\xe6\xa2\xdc\x00\x75\x76\xb8\x95\x83\x70\x44\x62\x51\xe0\xd2\xf7\x2d\x47\x14\x4a\x8b\x4a\xf2\xd1\x97\xfe\xec\xda\x17\x84\xe7\xfc\x48\xfb\x47\x98\x0c\xf0\x67\x0e\x8c\xb2\xfa\x1c\x4e\x48\x3c\x45\x03\x4c\x1f\x5b\x63\x3f\x33\xd7\xe2\x8a\x8a\x5d\xae\x08\x60\x9a\xfa\x0d\xf7\xb2\x11\x1a\x87\x3d\x98\xd9\x01\xe2\xd6\x57\x3b\x5b\x85\xde\x36\x5d\x60\x48\x1a\x71\xc3\xbf\xb6\xa9\x20\x9c\x41\x73\xea\xd8\xd2\x1d\x31\xa9\x41\xfa\x1e\xbc\x3e\x2d\x4e\xab\x71\x62\x58\x00\x95\xc7\x15\x94\xd1\x76\x46\x93\xaf\x56\x93\xd9\xae\xd8\xde\x4d\xf0\x02\x86\xc4\xd3\xc2\x00\xdc\x64\xfb\xf7\x2a\xd1\xc5\x23\xd4\xfa\xf4\x4f\x04\x3d\x17\x9f\xb1\x71\x2f\x8c\x2a\x36\x9f\x6c\xe4\x98\xfa\x38\x58\x5d\x7e\xab\xa4\x39\x8e\xc1\x66\xfe\x94\x61\xc4\x53\x67\x91\xb6\x67\x0f\xe5\x85\x65\x5f\x19\xd0\xda\xe7\xe9\xb6\x9d\xcc\x59\x11\x14\x61\x8b\x68\xd0\xfd\xdf\xdd\x3d\x51\xc1\xb4\x09\x40\xbc\xa9\x86\x25\x08\x86\x48\xf2\x95\x86\x97\x05\xca\xf5\x1a\xc2\x62\x78\xe3\xea\xd8\x6f\xc3\x29\x4f\x5e\x1f\x4e\x37\xc7\xf3\xb7\x07\xed\x17\x34\xe0\xf3\xc5\xe9\x27\x5e\xed\x89\xea\x9c\x30\x80\x1d\x3e\xa4\x3f\x99\x78\xf3\x27\x7a\x99\xec\x27\x74\x5e\xf9\x7c\x9a\x1a\xa3\x4d\x7e\xc9\xb9\xd5\xf8\x2c\xc1\xec\x99\x4f\xd8\xaf\xa0\xaf\xea\x53\x6b\x6c\x35\xa1\x8f\x33\x47\xae\xa7\xa9\xa6\x54\x28\x8e\x91\xc8\xd1\x3c\x09\x32\x89\x42\xb1\x4e\x4b\xd9\x81\x87\x7d\x63\x0b\xaa\x9a\x16\xd4\xea\x27\x21\xbf\xc8\x0d\xcd\x1c\x84\x3d\xa6\x77\xb3\xf6\x1c\x73\x1f\xbe\x35\x5e\x54\xed\x01\xc1\x41\xd2\x9d\x93\x8c\x86\x77\xcc\x3a\xf9\x11\x1b\x9f\xdc\x40\xd8\xc6\xe2\x27\x1a\x0d\xd7\x1a\x30\x30\x8d\xde\x64\x76\x5e\x35\xb7\x84\xbe\x20\x98\x0c\xea\xd9\x95\x28\x52\x0a\xc2\x8e\x1f\x2d\xbd\x0a\x11\x1c\xb3\xc7\x7e\x1e\x90\xf5\xc9\x82\x38\x00\x42\xfa\x55\x03\xe3\xb9\xce\x3e\xaa\x71\xa0\xb6\x34\xf5\x71\x31\xfe\x50\xf4\xfc\xe3\x6b\x78\x1b\x77\x05\x8f\x83\x0c\x6e\xba\x63\x19\xbd\x98\x8a\x96\xb6\x81\x98\x89\xf5\x32\x71\xe2\x42\xd4\x7c\xaa\xe9\xea\xb9\x9e\x44\x7b\x94\xf9\x1b\xa2\xab\xa5\xa8\x31\xab\xcd\x8e\x03\x09\xc4\x98\x6b\x89\xe4\x2d\xc4\x0f\xab\xdf\x41\xa0\x91\x61\x30\x9b\x67\xe7\xe3\x8c\xe0\x58\x3e\x59\x7d\x5b\x31\x42\xc0\xa8\x6e\xb8\x17\x85\x76\x3b\x77\x7a\xeb\x4a\x93\x74\xa9\xd3\x83\xf1\x31\x82\xf7\xdc\x75\x5a\xae\x26\x59\x99\xd9\x74\xba\x07\xda\x9c\xa7\xef\x64\xa6\x7a\x83\xa0\xf2\x15\xaa\xf0\x6b\x04\x55\x49\x4a\x30\xc7\x0b\x0d\x6a\x6b\xa5\xbb\xc6\xb4\xb5\xf5\x69\xab\x98\x1f\xe0\x08\x48\x29\x7b\x4b\x7b\x6e\x1c\xf1\xfe\x7e\x46\xb7\x0b\x19\xfe\xc2\xec\x63\x83\x06\x00\x85\x45\x9b\x9e\x57\x6e\x87\xc3\x8f\x3e\xfb\x0b\x78\x93\x98\xb4\xc2\x45\xe3\x95\xef\x89\x58\x62\x22\x28\x09\x14\xf5\x40\xd9\x24\x4f\xe6\x94\xa9\x4b\x97\x11\xa1\x04\x59\x96\x0e\xe7\xf2\x66\x46\x0d\xce\x2a\xe0\x0b\x9b\x09\x9d\xa4\x8b\xf4\x83\xd5\xb0\x0e\x9b\x76\xf0\x59\x8e\x24\x51\x42\x40\x92\xf5\xe8\x57\x73\x1c\x0f\x35\x70\x4a\x17\xe5\xfc\x37\x4f\xb9\x2f\x09\x67\x23\xdc\x85\x98\xd8\x6e\x78\xd3\x5e\x98\xb6\xae\x13\x6b\xf1\x2e\xc5\x37\x7b\xbb\x11\xf8\xed\x39\x6c\x7a\xe9\x24\x6a\x27\xce\x2b\x84\xa1\xd3\x99\x4b\x03\x69\x78\x13\xe0\x17\x75\x5d\x2a\x46\x01\x21\xf3\x06\xd6\x91\xbe\x77\xf0\x8c\x60\x51\x83\x46\x5d\xdb\x12\xc2\xb3\x24\x8a\x37\xfc\x7a\x1a\xe9\x3f\xf2\xad\x80\x94\xad\xb0\x2d\x16\x10\xe6\xd6\x06\x5e\xbf\x9f\x9b\xd7\x28\x56\x55\x82\xd8\x37\xeb\xef\xfd\xa9\x76\xe5\xb6\x6c\x61\x50\x90\xd9\x82\xc1\x1f\x71\x44\x76\xcc\x9f\xe5\x49\x5c\xdb\x4d\xf9\x2c\x27\xf3\x5e\x71\x44\x1b\x06\x63\x8b\xd1\x43\x13\xe9\x87\xb0\x38\x41\xc8\xf3\x3e\x61\xed\x03\x3c\x3b\xd4\xdd\x7a\x1d\x0e\x0f\x84\x27\xda\x17\xe5\x8e\x2a\xdb\x49\x55\x02\x29\x4a\x52\x6d\xb2\xf6\xbb\x00\xab\x7d\x2c\x69\xad\xc2\x6b\x04\x21\x94\xdc\x76\xea\x0b\x76\xfc\x6d\x92\xc1\x54\x9f\x2a\x47\xd4\x91\x48\x9e\x17\x5c\x1f\xf9\x66\xa4\x3f\x88\x39\xac\xff\x7f\x35\xed\x78\x0e\x19\xd6\xbb\x20\xea\x5e\x09\x5d\x1a\xda\x05\x85\xf0\x5f\xfc\xad\xbf\x80\x8d\x7b\x7d\x6f\xd0\x28\x3f\xbb\xf2\x1b\xc3\xa8\xd6\x46\x07\xb5\xe6\x5b\x6d\xa9\x03\xba\x72\x8c\xcd\x84\x79\xc8\x97\x3f\xe1\x2d\x83\xc9\xc3\x79\xa3\xbe\xf3\x35\x8c\xff\x9e\x0b\x5c\x3f\xe9\xf4\x01\xf8\xb3\xe6\x71\xfa\x91\xc0\xda\xf7\x3a\x53\xc8\x10\x60\xc5\xd8\xf3\x6e\x17\xc9\x33\xba\xe9\xe3\x81\x53\x24\x2f\xf1\x25\xdb\xe7\x9b\xaf\xf1\x94\x74\xd9\xe4\x03\x12\x25\x0e\xb2\x9d\xb2\xc3\x46\x58\x15\x6f\xec\xd8\x03\xc8\x4b\x3c\x13\xdd\x55\x03\x98\x8b\x31\xb0\xbc\x1b\x38\x3a\x16\xfc\xc3\xc3\xd9\x0a\x85\xf9\xaf\xda\x8e\x09\x70\x41\xed\x0e\x63\x3a\xf9\x6b\x41\xc0\x96\xfb\x6a\x00\x83\x62\x7b\xd7\x0d\x3c\x12\xfa\x0c\xc0\x5f\x3e\xa0\xae\x11\x52\xe5\x19\x15\x13\x1d\xe9\xb6\xb4\xb2\x75\x85\xb4\xfc\x3e\x4a\x81\x97\xa8\xce\xc5\xae\xc5\x1d\x40\x78\x4f\x8b\x54\x9c\xd3\x40\x20\xc9\x17\x78\xce\x60\xe1\x9b\xca\xdc\xd4\x49\x71\xdf\x4e\x2f\x40\xd8\x7f\x1f\xed\xd9\x99\x61\x73\xd0\x20\x2f\x28\x65\xb1\x5d\x16\x57\x9a\x4c\x3c\x4c\x7c\x7a\x6b\xec\x43\xc3\x8b\x6a\x82\xb3\x3e\xb5\x0f\x6b\x89\x9a\xec\xe3\xb2\xdd\x36\x3c\x06\x95\x0e\xe3\xd5\x2f\xc0\x72\x61\xaa\x3d\x0c\xe9\xe5\x45\x4b\x75\x24\x4b\x49\x94\xef\xcb\x4b\x7d\x7e\xa1\xaa\x6c\x35\x56\x9c\x02\x38\x38\x31\x5a\x8e\x68\x68\x92\x5a\x6e\x00\x65\x65\xc7\x3c\x40\x17\xde\x74\xae\xdf\x09\x6b\xd5\xff\xd2\x87\xac\x4c\xe7\x81\xb3\xc1\xf9\x6a\x8a\x74\x4a\xb9\xf7\xce\xd1\x60\x12\x72\xa8\xf2\x40\x84\x36\x6e\x88\x4d\x01\xb8\x75\xa0\x74\x96\xdc\x55\x08\x52\x56\x6d\xd0\x7c\xce\x12\x81\x65\xee\xc4\xe8\xb1\xf0\x96\x6d\x1b\x82\xdc\x04\x7d\x9c\xf5\x8f\xd1\x12\x93\x50\xa7\x31\x71\xba\x5b\xf0\x13\x4c\xc2\x77\xd8\x68\xee\xfa\x31\x40\x7b\xb9\xc5\x55\x50\x3c\x9a\x1e\x51\xad\xb2\x75\x92\x34\x66\x4c\x21\xe8\x3d\x8d\x8f\xce\xb9\xac\xea\xb2\x48\xbb\x94\xb2\x06\xd0\x05\x8d\x5b\xa1\x0a\x18\x7f\xc0\xfe\x6d\xff\xa3\x1c\xfe\x32\x27\x3f\x48\x7c\x14\xd4\x56\x6b\x27\x1d\xe4\xf8\xca\x2e\x90\x4f\x76\xef\xd2\x54\x84\x8c\x2f\x6c\xb8\xba\x1b\xed\xd5\x11\x48\xa2\x8a\xf7\xbb\x06\x88\x08\x80\x68\xa2\x62\x81\x7d\xb2\x7a\xe0\x98\x6c\x22\x85\x57\x66\xc3\xa3\x64\xa8\xd8\x79\xf1\xea\xbc\x4a\xf5\x9c\x0b\x81\xf9\xfc\x04\x20\xe1\x7f\xe1\xcd\x62\x97\x02\x8f\x57\x14\x05\x75\x47\x28\xe1\xa3\xf4\xc4\xcb\x87\x8a\xd1\xd6\x34\x3c\x10\xc5\xc6\xcf\x7b\x2b\xe4\x44\x18\x50\xf0\x3a\xb9\xe3\x3a\xaa\x8d\x81\xb8\xa4\x21\x1e\xd8\x02\x43\x72\x63\xfb\x85\x17\xcf\x8a\xbb\xea\x4d\xa1\x08\xa2\x3c\xd9\xfd\x90\xe3\x7b\xe4\x11\xec\x4c\x6f\x2b\x74\x0a\xed\xb5\x1a\xa1\xbc\xf9\xd1\xa5\x0d\x4f\x25\xa7\xff\x8e\x02\x19\x06\x56\x02\x3d\x13\x07\x77\x2b\x0a\x75\xcb\xd2\xb9\xc8\x22\x97\x54\x79\xfc\xb4\x42\x96\x53\x34\xc7\xa0\xa9\x8c\x40\xec\x8a\x27\x76\xac\x00\x54\x80\x9c\x78\x74\x83\xd6\x21\x86\x55\xf3\xe6\xbc\x7d\x02\x43\x6c\x89\x9e\xac\x86\xb8\x21\xca\x75\x2e\x4b\x6b\x53\xd8\xdb\xbf\x79\x12\x10\xaf\x7e\xf6\xb6\x6e\x55\xde\xc7\x8b\x1d\x39\xba\x68\xb2\x93\x3a\xe1\x3f\xf7\xd3\x56\xde\x0c\x3b\xa2\x47\xa3\xfb\x75\x75\xde\x02\x44\x4f\x99\x07\x58\xcc\x9f\xfd\x94\xdc\x7b\x37\x8a\xb0\xbb\x6e\xd2\xd4\x85\xed\x3e\x67\x93\x9d\xef\x4b\xcd\xb9\x17\x3f\x22\xaa\xcc\x3a\x8c\xad\x88\x6d\xec\x3f\xad\xb0\x40\x8b\xdb\xc1\xc9\x9b\xed\xf2\xd9\x7c\x05\xca\x83\xcf\x4a\xba\xa4\xdf\x1c\x67\xb2\x12\xe0\xdf\xa5\xb6\x53\x62\xa1\x7f\xcd\x42\x62\xca\x5c\x2e\xee\xe6\x2e\xbe\x5e\xb5\x5c\x30\x87\x63\xd6\x9f\x49\x10\x79\xd9\x89\x4a\x7d\x33\x9c\xc9\x27\xa1\xc9\xb4\x02\x7b\xa8\x6e\x99\xbd\xf0\x33\xeb\xc1\x33\xa5\xed\x3f\xa9\x85\x8c\x51\x93\xf7\x53\xe0\x57\x29\x14\x85\xe3\xa3\x72\x0b\xe2\xe3\x24\x06\x72\x32\x71\xa5\xe5\x51\x55\x01\xcc\x92\x95\x46\xd0\xb9\x97\xaf\xca\xce\xa1\xaf\x36\x47\x25\xad\xe0\x91\xca\x0f\xc1\x07\x43\xb7\x37\x00\x2f\x7b\x25\x15\xd2\x5a\xb6\xa2\x6d\x39\x25\x1a\xcf\x9d\x19\x42\x7c\x08\x8a\xab\x68\x38\xbc\x05\x9d\xd7\xeb\x35\xb4\xbc\xce\xf0\x56\x66\xa9\x9a\xef\x18\x8d\xfd\x8b\x1f\xe0\xaa\x67\x53\x63\xdb\x1c\x04\xeb\x0a\x25\xa2\x34\x12\x47\x52\x43\xe0\x92\x97\xe0\x3c\xd8\x0c\x40\x5c\x8e\xe6\x84\x57\xa9\xd0\xc8\x54\xf8\x4f\xeb\xfe\x24\xcf\x6c\xb8\xee\x99\xc4\x4f\x31\xbc\xdb\x72\x03\x37\xb4\x56\xec\xd0\x7e\x71\x96\x97\x5a\xcf\x4c\x32\xb3\x67\xb7\x39\x6a\x14\x8b\x9e\xf5\x2d\xa2\xa2\x16\x31\x4d\x26\xd6\x63\x7c\xaa\x64\x2b\xbc\xb3\x38\x11\xbf\xae\xfb\xeb\x9a\x28\x77\xcd\x95\x30\x1d\xd9\x89\xe8\xb5\x29\xcf\xc3\xcb\x54\x14\xa3\xdc\xd3\xbd\x74\x4c\x58\x4d\x9c\x8a\xab\x41\x81\x84\x09\x10\xd9\xe6\x09\x00\x7e\x5d\xf9\x7f\x94\xc1\xf4\x70\x81\xc9\xba\x08\xe0\x9b\xd6\x15\x17\x46\x04\x1c\x75\x11\x0a\x0d\x45\x98\x1b\x2e\x0c\xfb\xc0\xb0\x19\xf7\x40\x6d\xae\x53\x05\xcf\x78\x86\x65\xca\x6f\x96\xcd\x39\xae\x82\xd3\xfd\x2e\xdd\xfe\xee\xeb\xb8\xbf\x8a\x07\x47\xbd\xeb\xa3\xad\x6e\xfd\xa9\x6f\xd0\xa3\x59\x32\x4c\x88\xaf\x94\xc0\x71\x48\x26\xf3\x1e\xc7\x9f\xf1\x2d\xdc\xd9\xa0\x85\x71\x7f\x51\x2b\xe6\x4a\x0c\xd8\x46\xf7\xd0\x13\xb0\xae\xf2\x9b\xb6\x8c\xb6\x8c\xca\x24\xc8\x99\x96\xf0\xbe\xec\x01\x2d\xac\x21\xd1\x7f\xbc\xcf\xb3\x80\xef\x1a\xca\x19\x54\x49\x3d\xc2\x3d\x00\x5a\xac\x53\x53\xf9\xb9\x10\x56\x13\xfa\x3e\x0b\x2a\x5a\xdd\x12\x53\x39\x54\x1b\xfe\xe1\x77\x55\x2b\x69\x97\xe3\x79\x0d\x8e\x40\xd7\x77\xe7\xd7\xe4\x33\xa0\x4d\xea\x7c\x48\x61\x4b\x6c\x9c\x88\xd2\xd3\x41\xfe\xd4\xff\x14\x22\x4c\xb8\x3b\x71\x3d\x88\xc7\x73\x64\x51\xb7\xd1\xf4\xc1\xee\x00\x88\x58\xb9\x92\x9c\x3c\xfb\x9b\x38\x68\x3b\x44\x72\xda\xfc\xad\x13\x00\xd9\x9a\xb0\x1a\x89\x24\xf1\x7d\x0e\xb7\x08\xcd\xa3\xc9\x26\x19\xf7\x7d\x75\x1b\x9d\x2b\x92\xbc\x3e\xf8\xcb\xca\x5a\x78\xdf\xde\xb6\x20\xc6\xb5\xb9\x46\xb7\x87\x73\xf7\x85\xe0\x15\x3d\x8b\x6f\x17\x7f\x3f\xc5\x98\xfe\xd8\x35\x6c\xbd\xf9\x61\xe1\x59\x08\xa3\xcd\x51\xc7\x98\xd6\xf1\x95\x03\xb0\x6e\x21\x9b\x5d\x38\x7e\x43\x0f\xf0\x69\x39\xab\x36\x56\x7e\xc2\xb3\x87\x85\x79\x19\xa8\x81\x4b\xfd\x62\xe8\x51\xd7\x76\x6b\xa6\x49\x01\x90\x5b\x9e\x4b\x91\xa5\x64\x99\x63\x58\x0f\x7a\xeb\x6d\x79\x55\x36\x0b\xaa\x9f\x47\x6e\xd4\x30\x60\x1b\xfd\x80\x20\x2a\x00\xb6\xaa\x88\x65\xb5\x87\xeb\xcf\x7b\x84\x28\x96\x63\xbd\xf0\x5f\xc5\x3d\xf4\x57\xaa\x45\xba\xe1\x24\x97\xf7\x44\xd1\x3d\xc9\x20\x85\x12\x03\xbf\x99\xfe\x4f\x02\x83\x75\xd8\xac\x40\x0f\xeb\x58\x11\x5e\xe2\x73\x66\xfa\x4f\xcf\x6a\x75\x06\x3d\xf8\x25\x63\x66\x5f\xaf\xfa\x01\xfe\x6b\x70\xe6\x8d\x65\xf5\x16\x70\xb3\xf9\xaf\xc7\x41\xd9\x3a\x42\xe4\x58\x8c\xc2\xd4\xdd\xf6\x2a\xf7\x35\x0e\x98\xff\x36\x81\xce\xa6\x34\x45\xb6\x2f\xd5\xb4\x28\x53\xd5\xac\xbf\x35\x94\x3d\xb3\xbf\xfb\x34\xb9\xf4\xce\xa0\x36\xf3\xf6\xf1\x21\x09\x8e\x1a\xac\x76\x2f\x51\x3a\x35\x58\x92\x7d\xf9\xde\x6d\x03\xc5\xf7\x1f\xfa\xe3\x5f\x9c\xba\x34\xac\x74\x2e\x19\xeb\x52\xd5\x8b\xa5\xac\x01\x8b\x92\xc5\xda\x47\x7c\xef\x49\x3d\xe5\x89\x5b\x89\x16\x28\x01\x45\xee\xbb\x85\x37\x2e\x74\x10\x55\x21\xf0\x80\xc5\x54\xbb\x60\x9e\xc0\x76\x0a\x27\x69\xa0\x38\x7d\xe8\xc1\xb4\xb4\xf8\xdd\x39\xd6\xf3\x04\x0a\xd8\xcf\x98\x63\x27\xd5\x85\x89\x88\x8b\xa9\x51\x82\xff\xe9\x56\x05\xd8\x54\x7e\xf7\xcd\xa1\xf3\x6d\x6a\xac\x9e\xb5\x8d\x3d\x48\x9b\xc8\xda\x8d\x83\x63\x3e\x42\xba\xf8\x7a\x80\x8d\xd5\x8c\x48\xff\x20\x1b\xe3\x67\x12\x1a\xef\x7e\xe3\xa0\x88\x36\x06\x2f\xd7\x56\x54\x48\xcd\xd5\x2a\xee\x71\x2d\xf4\xa8\x84\xf0\xbd\x4b\xf6\xe3\x3c\x99\x64\x88\x33\x64\x2c\x4b\x92\xe2\x0d\xf2\x13\xe1\x10\xc7\x76\xfb\x9d\x48\xef\x9e\x2f\x26\x37\x3f\x3a\xd6\xf7\x7b\x58\x84\x60\x68\xe5\xd6\x25\x61\xde\xc0\x5b\x4d\xd1\xfc\xf7\xc3\x0b\x65\x88\xd8\xeb\x1c\x4f\x69\x25\x08\x8a\x50\x1c\xb2\x83\xe5\xa5\xfe\xea\x25\x7d\xd0\x99\x0e\xa6\x63\xdc\x9a\x04\x54\xeb\x41\xbf\x24\x21\x9e\x4a\x85\x30\xb1\x7a\xc8\x42\x89\x35\xdb\x9b\x29\x1f\xd0\xff\xa2\x95\x87\xc2\x83\x68\x11\x86\x53\xd6\x79\xf0\xa4\xfa\xd8\xe3\x0f\x87\x45\xe0\x2a\x83\x38\x3d\x76\xc2\x4a\x9c\x6e\xd2\x6c\xbb\x56\xa7\xdb\x4e\x92\x2f\xd3\xc2\xb1\x8d\x17\xca\xfd\xee\xda\x19\xb7\xf3\x87\x9b\x99\x15\xaa\xbb\x83\xf9\xf9\x0a\xad\x91\xd8\xaa\x42\x75\x28\xb3\x60\x67\xba\x5e\x71\x4f\xdc\x33\xa9\x20\xdd\x8f\x04\x2d\xab\x35\x1e\x57\x0f\x0f\x22\x2a\x5e\xd0\x6a\x41\xfe\x39\x40\x88\x77\x4a\x62\x1d\x6f\x21\xb0\x3e\xcd\x96\x3d\xd9\x84\x68\xec\x27\xbb\xfc\xcf\x38\x62\x0f\x02\xb4\x26\x47\x05\xb1\x2f\x84\x65\x5b\x67\xd2\x7f\x34\xf8\xa5\x45\x45\xb0\xd7\x5c\x2b\x56\x44\xcd\x84\x7b\x2a\x5c\x14\xed\xef\x5b\x95\xa5\xa4\xc3\x11\xdb\xec\x49\x09\x5c\x93\xbf\x16\x11\xc5\xbf\x72\x2a\x90\xd3\x19\x76\x0e\x1d\x08\x06\x4e\x1c\xa2\x22\x45\x4f\x6a\xd3\x94\xe9\x30\xa7\x15\x1c\xdd\xe0\x6b\x7f\xea\xf8\xfe\x3d\x32\x12\x3b\x86\xe8\x91\x08\x34\x7d\x3a\xab\xa2\x54\xe2\x23\x70\x87\x72\xa3\xe0\x76\xe0\xb1\x82\x42\x2e\x4e\x79\x32\xb9\x6e\x83\x4e\x73\x17\x05\x21\x62\xa6\x39\x6d\x71\x44\x11\x76\x94\x40\x78\x6b\xc6\xb0\x27\x08\x16\xb7\x49\x35\x61\xd6\xad\xa2\x3c\x1a\x6b\x2f\x9a\xbd\x68\x1b\x3d\x7f\x9c\x1c\x55\x29\x23\x89\x9e\x9d\x32\xc6\x80\x1f\x2e\x5f\x7c\x90\xe9\x68\xa2\xa3\x57\x2e\x6c\x97\x2f\xae\xe8\x30\x5c\x61\x02\x34\x93\xf9\x44\x10\x50\x06\x24\x63\x5d\x3c\xc0\x95\xe0\xb1\x02\xb6\x15\xcc\x8c\x50\x4b\x70\x8b\xd7\xa6\x65\x96\xb4\xaf\x42\xc6\x28\xc7\xc7\x2e\x39\x2f\x1b\x46\xf6\x64\xf2\xdd\xeb\xe8\x1e\xde\xf7\xf9\xd8\xc3\x50\xd5\x7b\xd5\xed\xd3\xb1\x9f\xa1\x92\x1b\xad\x04\x38\x3c\x16\x43\x47\x13\x24\x8e\xf8\xc9\xae\x7b\xb2\xb2\xbc\x53\x1f\x97\xfd\x55\x2e\x7e\x3b\x4a\xc1\x41\x5e\xdf\x52\xc3\x38\x87\xf9\x63\xe6\x47\xb7\x40\xae\x4d\x51\xb0\xd0\x18\xba\xd2\xa8\x4e\x1f\xa3\x04\x23\x61\xcb\x2d\xd9\xf8\xa8\xb6\x57\x28\x0e\x21\xd2\x04\x56\x0f\xe3\xc2\x13\xbc\x80\x16\x3d\xa1\x5f\xfb\xbb\xc9\xa5\xf1\xcc\x32\xa6\x65\x85\x23\x9d\xc6\x36\xa8\x81\xf5\xb4\x44\xac\x7e\x9e\x3b\x06\xd6\x37\x58\x54\x3c\x6a\x6c\xed\xd0\x32\x56\x65\x76\x95\xc6\xe5\x26\x43\xe4\xd9\x29\x57\x87\xba\x7e\xc2\xaa\xa1\x14\xb4\xeb\x85\x8f\xdd\x71\x68\x0a\xa2\x5b\x46\xe1\x01\xcb\x5b\x9c\x9a\x91\x85\xe9\x74\x65\x95\x99\x48\xee\xcb\x78\x60\x4e\x7a\x40\x42\x40\x4f\x34\x50\x10\x9e\x92\xb5\x9e\xc4\x3d\x15\x0a\x83\x64\xf5\x4c\x3f\x84\xc5\x6c\xc0\xe3\x53\x52\x3a\x5b\xca\x15\xc3\xfc\x1d\x88\xcc\x0b\x5b\xf8\x75\xa7\x30\x00\x08\x91\x56\x5e\xb0\xf8\xb8\x72\x94\x0e\x1a\xe7\x48\xe7\xea\x58\xcb\x5b\x3c\x5a\x89\x23\xc2\xbf\xbf\x40\x50\x9e\xee\x7b\x16\x22\xb2\x3e\x24\x50\x7c\x09\xb2\x62\x4d\xb4\x56\xd5\x0a\xc4\xcc\xd3\xe6\xce\x4c\x0f\x6e\x60\x10\xf0\x35\x42\x51\x6a\x8d\xc8\xbe\x12\xcd\xee\xa0\xdb\xc2\xb5\xd1\xf0\x5e\x93\xe4\xed\x76\x42\xf8\x52\xa5\x1b\xcc\x93\x4d\xbb\x22\x82\x79\xf8\xb8\xef\x56\xeb\x5b\xe2\x65\xc7\xec\x45\x5f\xbc\x1a\xaf\x37\x18\x23\x1f\xdd\x97\x66\x02\x08\xe1\xb4\xbf\x0b\x3b\xc3\x14\x65\x15\x62\xe4\xe5\x9b\xf3\xfe\x20\x55\x0f\x52\xe2\x79\x82\x84\x71\xc5\xfc\x75\xcf\xcf\xfb\x2c\x7a\xa1\x0e\x76\x5f\x71\x77\x8a\x6a\x43\xb1\x14\x8b\x24\xf8\x03\xfd\x69\x1e\xcb\x19\x64\x2f\xd9\x8d\x93\x7e\x20\xc7\x3b\x43\xe8\x1d\x06\x0b\x69\x9b\xd9\x7b\xd8\xd7\x44\x8c\xe1\x5a\x90\x7f\x73\xe9\x68\x0b\x25\x27\xb3\x0b\xe8\xd5\x2b\xad\x97\xca\xb1\xc2\xd3\x8b\x11\x02\xae\xfa\x1e\xa1\xf1\xdd\x75\xcb\x70\x95\xdb\x29\xdd\x7d\xff\x33\x0a\x3c\xfa\x7f\x37\x16\x6a\xfd\xe5\xc9\xf6\x11\x90\xe6\xe0\xe4\x35\xfd\x93\x28\xc4\x78\x59\x8c\x82\xe5\x57\x20\x49\xdb\xaa\xda\x81\x5c\x1e\x15\x2a\xf8\x33\x0a\xad\x9e\x55\x7d\xe3\x5b\x19\xf7\x50\xe0\x90\xb4\xfc\x09\xc0\xce\x2c\x7c\x5c\x8f\xb7\x28\x9c\xd8\xe9\x7c\x11\xc9\x3d\x7b\x2f\xc0\xee\x76\x75\xb8\x04\x14\x37\xd9\xa6\xcb\xef\x8e\xed\xe6\x3d\x70\x46\x09\xb5\x91\xdc\x16\x16\x69\xe7\xb5\x2a\xc0\xa3\x28\xe1\x49\x48\x25\x77\xf9\x0c\x66\x4d\x7c\xac\x63\x97\xf4\x39\x4d\x00\x0f\x67\xff\xdd\x17\xd3\x50\xf6\x0c\xda\x38\x77\x32\x33\xb0\x84\xfe\xb2\x29\xec\x28\x74\x02\x92\x57\xbe\xce\x06\x35\xdb\x65\xdf\x15\x3b\xe8\xbf\x7a\x60\x13\x18\x6f\xaf\x54\x36\xe1\xe0\x40\xdc\x3e\xb3\xba\x69\x68\xae\x22\x54\xd2\x3b\xbb\x83\xb3\x20\x0d\xed\xad\xd0\xd1\x76\x03\x36\xad\x9d\xe0\xd6\x7c\x56\x46\xa0\xaf\xfd\x10\x17\x09\x28\xb5\xe7\xe0\x58\x7e\xb8\xb4\x11\x0c\x1c\x7a\xfa\xc8\x10\x18\x8a\xaf\x07\x82\xb8\xf5\x26\xd9\xe8\x4c\xe5\x3e\x97\x41\x42\x87\x5c\xf4\xaf\xa6\x54\x34\x6e\x46\x92\x97\x9b\x39\x4a\x57\x3b\xd9\xc1\x9a\xe7\x59\xf3\x97\x0c\x26\xbd\x4f\x0e\x4b\x43\x9c\xd5\xa0\x6f\xd7\x7e\x8c\x4b\x94\x9c\xa6\x6d\x58\x3c\x80\x36\x43\x46\x9a\x95\xd3\x0b\x6d\x9d\x03\x97\x19\x90\x1e\x99\xc4\x81\x05\x4e\xfd\x91\xae\x69\xe6\xe9\xa3\x58\x0b\x04\x12\x36\xca\xbf\x7d\x52\xca\x14\x92\x11\xba\x8a\x41\x0d\xca\x44\xfb\x3e\x0e\x09\x9b\xa2\x96\xba\x3d\x92\x59\x7d\x08\xf0\x48\xf8\x2f\x83\xe5\x68\x68\x2e\xea\x3e\x95\x8b\x87\x9e\xdd\xdb\xda\xea\x77\x92\x87\xe4\x5f\x12\x65\x2d\xa0\x09\x1b\x55\xbd\x26\xfb\x11\xac\xe2\x01\x7c\xb5\x5d\xbf\x73\x49\x5f\x85\x62\x77\x12\x48\xc8\xb3\x77\x13\x3d\x55\x37\x65\x35\xc6\xa2\xde\xda\x8d\x15\x29\xab\x0d\xfb\xe5\xa5\x79\xf9\x90\xa5\x80\x86\x15\xcf\x33\x7c\x72\x35\xae\x13\x90\x6f\x79\x78\x87\x29\xf5\xaf\x95\x91\x06\xb2\x3e\x4c\x5c\x29\xa7\xd3\x94\x0d\xd3\x69\xd6\x5c\x86\xb4\x36\x3b\x21\x16\x89\xf0\x7c\x43\x95\x7f\xf9\x7c\x8e\xff\xac\xba\x59\x6f\x5f\xdc\x54\xf3\x0a\x6c\x60\x9b\x84\x7b\xe6\x99\xe6\x73\xe9\xf6\xcf\x28\x58\x80\x6c\x5e\xbd\x18\x70\xef\x2c\x59\x32\xc0\x55\x5c\xb5\x4c\x24\xbc\x51\x03\xbe\x39\x11\xa7\x81\x6b\x43\xa8\xea\x0c\xf1\xe5\x0c\x69\xe2\x01\x2e\xb0\xb9\xb4\xb5\x08\xa4\x49\x94\xa1\xad\x8a\xff\xfe\x5c\x68\xb5\x47\x87\x50\x21\x1d\xc9\x7e\x4d\xe5\xc1\x79\x99\xad\xcd\x51\x1f\xa5\xba\xff\x76\x81\x44\x01\x5f\x31\x31\x7d\xaf\xc6\x4e\x02\xe2\x7a\x38\xe5\x1c\x77\x8c\xdd\x48\x84\x4b\x57\xbe\x4e\x3b\xf7\xba\xb4\x0b\xde\xa6\x9f\x5d\x6e\xa4\x87\x76\x50\xf7\xc1\x98\x1e\xb2\x83\xc1\x18\x45\x82\x83\xe4\x98\x5c\x87\x3b\xd1\x24\x9a\x6f\xe6\x39\x9f\x52\x96\x1f\xd8\xba\xa9\xb9\xaf\xb9\xcb\x26\xa4\xf4\xee\xee\x13\x40\x64\x54\x4a\xef\x0e\xfb\xbe\xcb\x8a\xbf\x46\x65\x49\x99\xd8\xc3\xe0\x73\x4c\xac\x9c\xd9\x51\xa5\x7f\xe5\xb4\x93\x92\xc3\x6e\x62\x8f\xc7\x34\x58\x91\xee\x5a\x25\xcc\xdf\x02\x47\xb1\xbb\xeb\xb4\xac\x00\x5d\x1b\x97\x00\x20\xc7\x01\x72\x18\xce\x53\xef\xe6\x57\xa8\xe2\x1f\x51\x73\xd4\x8d\x32\x17\x18\xe9\xc8\xfc\x3a\x2a\x49\x92\xaa\x11\x7d\xbc\x44\x03\x18\x23\x64\xb4\x63\xc8\xa9\xd5\x70\x2b\x51\xc1\xa0\x8d\x08\x56\x9a\xfb\x13\xde\x58\xb2\x9c\xda\x34\x42\xcf\x08\x3a\x44\x09\x67\xbe\x7d\x79\xe7\xf5\x89\xa9\xfd\x93\x81\xba\x63\x1f\xc5\x3a\xe1\x7b\xa2\x84\xaf\x44\xf6\x75\x24\xef\x38\x21\x8d\x9c\x1b\xaf\xaa\xf7\x6b\x10\x4e\xe2\x74\x77\xa8\xa5\xdc\xa1\x43\x8e\x1c\xc9\xf8\xf8\x1f\x3c\xff\xb8\x31\xd9\x84\xf7\xd1\x70\x41\x0c\xee\x29\x5c\xef\xb2\xcc\x04\xae\x74\x51\x16\xce\xa9\x55\x7e\xa2\x01\x38\xa3\x7a\xa4\x90\x4d\xd6\xb6\xb6\xe2\xd0\x4e\x5d\x8a\x68\x2a\xa9\x54\x65\x96\x41\x2c\xe3\xde\x96\x9f\xab\x92\xb7\xb6\x2b\x9c\x0b\xe1\xfb\xf4\x14\x19\x2a\xcb\xea\xa8\x9e\x1d\xb3\x10\xd0\x8c\xe1\x21\x79\x83\xcd\x6a\x24\x72\x68\xb8\xfa\x2d\xab\x3c\xa5\xc7\x33\x1e\x1c\xdc\xb5\x78\xd0\x8c\x68\x05\xf8\xb7\x38\x93\xec\xb9\x36\xf0\x1c\x35\x00\x96\x3e\x24\x5f\x18\x28\x41\x9d\x30\x20\x26\x9a\x92\x86\xd1\x06\x2d\x32\xde\x60\x91\xdc\x47\xa7\xe4\xef\x28\xe0\x6f\x26\xcd\xfd\xe4\x3f\xeb\xbc\x55\xba\x73\xd3\xd8\x43\xdb\xfa\xdb\x72\x23\x6d\x9e\xf3\x41\x3d\x4c\xf1\x0f\x26\xe7\xf7\xf2\xc5\x82\x66\xa3\x77\x72\x63\x0b\x5f\x0b\xa5\x38\x3f\xf8\x83\xc2\xe0\x94\xc9\x2b\x09\xe3\xaf\x22\x79\x39\x54\x82\x0f\x1e\x6a\x81\x35\x14\x2f\xc3\xcc\x5f\xe8\xe0\x62\x3b\x3b\x8b\x71\x48\xb0\x03\x05\xc3\xb6\x22\xe1\x63\x0f\x95\xc4\x58\xe5\x9b\xb6\x8c\x8d\x34\x57\x85\x38\x3d\x86\x77\x08\x22\xaa\xf1\x55\x1a\x53\x7e\xd9\x74\x04\x93\xea\xe8\xdc\x3d\xb5\xd5\x9e\xa9\x0e\x7a\x0f\x43\xc2\x65\x88\xdf\x28\x76\x9d\x87\x45\x75\xde\xc1\x7e\x66\x4f\x84\x0d\x02\x6a\x48\x66\x3a\x53\x00\xb4\xb7\xe0\xfe\xb4\x62\x27\x45\x2a\x91\x69\xe5\xb2\xe3\xed\x54\x22\x6b\x5e\xff\x1d\x4d\x16\x9a\x15\x9c\xc7\xc9\xd9\x22\x47\xd5\xea\x64\x74\xd2\x11\x64\x5c\x04\xb6\xff\xd9\x43\x51\x36\x69\x12\x89\x2b\xa7\xe1\x4a\xe8\xe7\xd2\xbd\x95\x98\x44\x63\xb2\x14\xa7\x20\x08\x1d\x18\xff\x6d\xdf\xb8\x8b\x61\x70\x2a\xda\x0b\xd3\x59\x43\x1c\x82\x13\x89\x51\xe8\xde\x71\x20\x60\xc3\x2e\x1c\x55\x6e\xc5\x07\xba\x38\x25\xde\x26\xf0\xb7\x6d\x3e\x26\xcb\xd6\x6a\x24\xea\x58\xde\x68\x47\x73\xd5\x0e\x85\x8c\x8e\xcf\x68\xc2\x56\xa9\x69\x16\xe9\xa2\xd6\xdd\x7f\x38\x34\x34\xc7\x59\x1a\x8b\xfe\x87\xf1\xbc\xfe\xa4\xdb\x81\xba\x9b\x90\x52\x8e\x84\xee\xf2\xdf\x7c\x8f\x27\xf6\x87\x57\xe7\x58\x66\xd3\x23\xe3\x8a\x06\x06\xa3\x26\x8a\xc2\xe1\x48\x7d\x1d\x12\xa7\x12\xb0\x90\x44\x50\xa8\xc1\x82\x19\x9a\x76\x3e\x1c\x11\xf7\x6f\x1d\xab\x39\xa2\xcf\x6e\x40\x57\x35\xf5\xa2\xe0\xa7\xa5\xf5\x33\x95\xe7\x2f\x87\x25\x0c\x61\x71\xfe\x30\x3d\xfb\xba\x91\xe9\xe8\x7e\x03\x07\xd5\x18\x9c\x78\x2d\x21\xd2\x48\x92\x7a\x80\xf3\xb0\x27\x1c\x08\xd4\xa6\x62\xc2\xf4\x35\xfd\x7b\x83\x22\x02\xc3\x39\xe4\x2c\x63\x7b\x81\x31\x44\x53\x11\x67\x4c\x80\x38\x5d\x1a\x5d\x59\xb6\x36\x13\x82\x9e\x81\x3e\xc8\xa2\x6b\xc5\xb1\x24\xe7\x75\x79\x4d\xf0\x48\x48\x0d\x5a\x9f\x47\x5f\x51\x4f\x13\x19\xb5\x55\x9a\xb3\x55\x6f\x9c\x88\x82\x93\x25\xa0\x7a\x1e\x37\xeb\x27\x25\xc5\x83\x20\xb5\xef\x7f\xc9\x50\x56\x22\xe9\x03\xf5\x7f\xaf\x09\x58\x7b\x80\x1b\x8c\x26\x1d\xc0\x88\xd8\x25\xf7\x4b\xb1\xc0\xec\x7e\x84\xc3\xc0\xa3\x00\x0f\x8e\x61\x2e\x12\x23\x8b\x8b\x28\x82\x9f\xbb\xa6\xa2\xed\x3d\xf1\x4b\xfd\xc8\xb2\xd6\x25\x97\xd9\x92\xfb\x72\x8c\xcc\x65\x5c\x00\xdb\x48\x32\xc7\x4d\x21\x95\x43\x20\xc2\x3e\x6f\x90\x86\x4c\x03\x0c\xf9\x5e\x87\x06\x43\xbf\x76\x1a\x72\x9e\xc3\x7d\x8e\xf7\xa3\xd9\x14\x52\xc4\xb3\x84\x38\x45\xaa\xeb\x7d\x26\x5a\x5f\xf4\x7b\xd8\xd1\x92\xa3\xaa\xe7\xae\x8c\xd7\xf4\xe0\x50\x81\x4b\xad\xe6\xe4\x23\x84\x21\xec\x89\x59\x1d\xb5\x1b\x15\x49\xb6\xbd\xba\x42\xa4\xa3\x43\x8e\xc9\xa4\xca\x79\x65\xfe\x96\x4e\xf0\xd2\xab\xfc\x44\x6a\xb6\x25\x90\xa0\x75\xe6\x8a\x5f\x44\xad\xc8\xd9\x38\xed\x69\x61\xe4\x86\x9c\xee\xe0\x28\x8d\x1c\x9e\xa8\xd1\x7b\x8d\x7d\xa8\xdd\x39\xbd\xb8\xc7\xad\xfb\xa9\x4e\x1a\xfb\x1f\xa1\x6d\x47\x69\xd7\xb6\xc8\x0b\x41\x25\xa3\x0e\xdd\x8c\x5e\xf1\xf7\x52\xb2\x40\xeb\x09\x31\xfa\xa2\x2d\xef\x08\x7c\x25\x29\xbd\xe6\x54\xe8\xa0\x57\x2d\x16\x89\x71\x47\xbd\xa5\xa9\xf6\x9b\x7a\x73\x6f\x79\x75\x88\xf8\x83\x28\x95\x1f\xd6\x19\x6b\x4b\x59\x52\x05\x68\x52\x5c\xf0\xe7\xf1\xc3\x6e\x63\x88\x9f\x0d\xb5\xf5\x40\xb1\x62\x08\x84\xc6\x97\x1c\x7e\xa4\x51\xde\x33\x85\x35\xa3\x43\xd6\x23\x7d\x19\xac\xc7\xc4\x12\x8f\xf2\xb5\x26\x1a\xd9\x3c\x94\x7c\x1f\xfe\x5c\x3f\xea\x56\x54\x3e\x05\x39\xb0\x5c\x5c\xfe\x4a\xb2\xfa\x36\xfb\xbc\xcf\xe4\x42\x15\x98\x70\x75\xe0\x39\x4f\x28\xc4\x52\xa8\x70\x7d\xf4\x43\x6d\xe1\x71\xc2\x63\x8a\x9b\x39\x4f\xeb\xe9\x9b\x37\x59\x8f\x28\x50\xb5\x88\x7d\xc7\xc4\x35\xdf\xad\x07\x27\xc0\x62\xdc\xc5\x87\x45\x53\x7e\x91\xb4\xa9\x6a\x3b\xbb\xd7\xed\x8c\xe0\xe8\xe6\xab\xd9\x32\x29\x33\xb4\xfc\x68\x6c\x49\x84\x6a\xc4\x8e\x93\x87\x10\x50\xfb\xee\x03\x58\x13\xdd\xc6\x18\xa4\xca\x2f\x81\x8e\x02\xa1\xd2\xdd\xf3\x78\x15\x7e\xab\xf2\x64\x59\x86\x0f\x2c\x25\x22\xcf\x61\xdd\x31\x84\xb2\x47\x80\x33\x76\x80\x16\x5c\x94\xc3\x5f\xee\xdf\xe7\xf1\xc4\xbd\x86\xbe\xe5\x95\x75\x3e\x47\x2f\xbf\x80\x75\x35\x56\xe1\x0e\x01\x28\x0a\x05\x9b\x20\x9b\x99\xd7\xc9\xba\x02\x10\x7d\xb3\x24\x78\x8a\x9f\xa6\x1a\x01\x95\x6a\xb2\xa5\x84\xa7\x48\x7a\xc4\x67\x62\x32\x59\xa6\x8a\x8f\x4e\x8d\x22\x29\xa1\x92\x35\xc1\xa6\xb3\xa8\x09\xb8\xb7\x81\x6a\xf9\x6e\xf0\x86\x17\x4b\xfc\xc0\x45\x60\x3b\xc1\xa2\x96\x1d\x57\x17\x2e\x13\x75\x1e\xdc\x69\xce\xfe\xdf\xb9\xf9\x45\xb9\x5e\x09\x17\xe6\x3a\x87\xdf\x7a\xbf\x72\x24\x8c\xfb\x4a\x70\x19\xd6\x4e\x43\xff\xe7\x60\x25\x47\x4d\x5a\xa0\xcb\x67\xce\xd2\x40\xd2\x1e\x3d\x6a\xea\xe3\x1a\xc5\x3d\xef\x87\xbd\x08\xf6\x8e\xfb\xeb\x52\x74\xfb\xe0\xec\x3a\x9d\x91\x72\x2d\xf4\xdf\x1e\x0e\x44\x53\xac\x1d\x16\x78\xa1\x8d\x7b\xd2\xee\x81\xa7\x21\x08\x07\x00\x4b\x34\x3e\xc0\x02\x59\x26\x7c\x57\xbc\xbf\x64\x93\x81\xae\x86\x5e\xd9\xbf\xc3\x6e\x4d\xa1\xaf\xbd\xea\x3d\x26\x73\x9d\x13\xa0\x2a\xf3\x06\xcb\x88\x63\x72\x4e\x85\x6f\x56\x60\xdf\x39\x1a\xb7\x04\x31\x12\x53\x25\xcf\xac\x6d\x3e\x11\x9d\xeb\x00\x54\x7d\x76\xa8\x13\x01\x10\x2b\xbf\x4b\x37\x3e\x92\x61\x5b\xbf\x03\x1a\x3d\x6d\x0a\x80\xcb\xa0\xb0\x1f\x1a\x83\x7f\x71\xb0\x9c\xa0\x2a\x2c\xd3\x98\x31\x28\x77\xa3\x91\x3a\x18\xe5\xe7\x2e\x12\x15\x3c\x19\x5e\x8b\xda\xe9\x76\x08\x50\x3c\x7d\xfc\x95\xa3\x22\x97\x66\x01\x4d\x5e\x7c\xf8\x54\x6f\x44\x0c\xf1\x53\x47\xaa\x8b\x09\xc6\x90\x54\xd7\xbe\xa5\x60\xc1\x04\xf2\x1a\x07\x17\x42\x35\xbb\xd3\x50\xe6\xd3\xb6\xc7\x2a\xb8\x56\x53\xf1\x80\xc7\x1b\x83\x2e\x08\xf1\x10\x65\x87\x3d\x58\x9f\xca\xc3\xa6\x25\x4f\x9a\xa5\xaf\xc7\x23\xc1\x4b\xb7\xfa\x52\xff\x1d\xb3\x2f\xad\x0a\x60\x2b\x2d\x0f\x1d\x10\x83\xec\x7e\xd2\xb0\xa0\x55\xfc\xfa\xb0\x42\x9f\x1b\xad\x40\x65\xa3\xb8\xab\x1f\x1d\xc4\xcc\xd8\x3a\xac\xae\xa0\x74\x8d\x8b\xb1\x6d\x22\x90\x33\x07\x18\xb9\xee\x66\x2e\x37\x20\xed\x83\xe5\x16\xa5\x80\x93\xed\x52\x4c\x28\xb8\x35\xb7\x55\x11\x4c\x1f\x63\x43\xb8\x2e\x9c\xe8\xee\x0b\x69\x17\x1d\x6d\xe2\xbb\x98\x2c\x25\x3b\xc1\x2b\xb7\xe1\x61\x23\xee\xd8\x3a\xf8\xc3\xb6\x17\x55\x4d\xb6\xd3\xf5\x41\x7f\x79\xbe\x46\x8d\x3b\x41\x44\xe0\x87\xa6\x34\x42\xd9\x3c\xdd\xc8\xf9\x26\x31\x6d\xd8\xde\x06\x21\xba\xf6\x6a\x4f\x11\xfa\x7f\xc5\x2e\x59\x16\x6d\x36\x33\xbb\x68\xf2\x9b\xa5\xdc\x71\x5b\x5a\xb7\x73\xcd\x47\x79\x4a\xe0\x34\x89\xc0\xca\x48\x97\x70\x6d\xe5\x2b\x2c\xb2\xe3\x6d\x53\x36\xd9\x0e\xd4\xb5\xab\xc0\x21\x4d\xaa\x03\x1d\x8c\x12\x24\xfa\xc3\xda\x52\x7b\x46\x0a\x53\x4b\x39\x2f\x4b\xb8\xe5\x0f\x33\x56\x2b\x18\x85\xa3\x81\x07\x3b\x94\x9c\x80\x35\x30\x8e\x51\x95\xe8\x0e\xc1\xe1\xc7\xf3\x32\x13\x08\xab\x6e\xf0\x44\xd6\x8c\xb0\x3c\x85\x4d\xc4\x10\xd6\xd5\x2a\xc4\x6c\xee\x26\xd9\x38\xf8\xc8\xa1\x5e\x24\xa5\x8b\x66\x33\x93\x3f\xbf\x07\xbe\x7b\x18\x65\x91\x4f\xd7\x56\x1f\x51\xbb\x87\x5b\x54\x90\x10\xeb\x54\xae\x88\xc8\x9a\x86\x10\xd7\x27\xb6\xb2\xcf\xef\x3e\xea\x55\xde\xea\xd1\x69\xcb\xd3\x41\x5f\x92\x72\xa1\x05\xd1\x64\xd8\x3b\xc2\xbd\x1e\xf4\xa6\x2d\x8b\x09\x77\x0b\x89\xf6\xf2\x3d\xb5\x07\x65\xf8\x85\x2e\xef\xa0\xe3\xa6\xce\x5a\xfa\xa6\xc0\xab\xa7\x8b\xb4\xde\xe7\xf3\x9a\x11\xe7\x3b\xd1\x9e\x1c\xc9\x9b\x35\x40\x45\x55\x68\xa5\x49\x7c\x4c\xca\x63\xc4\x1c\x72\x74\x79\xc7\x78\x3f\x1f\x09\x3c\x85\x7e\x44\xaa\x1e\x70\x77\xb7\x3c\xf7\xa7\x2e\xeb\x24\xa8\xc2\x33\xd2\x3e\xf2\xf5\x57\x08\xc9\x31\x52\x7b\xcd\x72\xff\xe9\x1f\xa5\x54\xed\xf2\xed\x4e\x61\x5f\x63\x86\xc0\xc9\x41\x2d\x4d\xd7\xe2\xcb\x92\x77\x5c\x1f\x97\x12\xa3\x8f\x32\x6b\xb7\x85\xd3\x8f\xb9\x7d\x76\x45\xc7\x12\x89\x06\x8f\xea\x48\x32\xef\x3d\x0f\x40\x27\x7c\xac\xf2\xe2\x72\x28\x94\xd3\x87\xd7\x37\x2c\x7d\xb6\xb8\x73\x0e\xef\x65\x6f\x5f\x30\x4d\x2e\x3f\xd7\xe3\xc4\x1c\x4d\x14\x1f\x92\xb2\x94\x28\x9f\x7c\x02\x92\x39\x76\x8c\x44\xf2\xa7\x6b\x42\x4c\xca\x63\x0f\x8a\x34\x05\x10\x5a\xff\xbe\xce\xc9\x22\x6a\x85\xe5\xcf\x64\xe6\xe2\xf6\x64\x7b\x80\x84\x60\x9e\x6b\x06\xcd\xdf\x6b\xa4\xf7\x22\xc0\x20\xd4\xe3\x10\x05\x63\x32\xc2\x0a\xfa\x50\xfa\xa1\x3c\x62\xa8\xe7\x22\xc8\x76\xc0\xf1\x1c\x32\x6e\x10\x20\xc6\x1d\xd4\xf0\xb5\xfa\xf6\xeb\xd4\x81\xc4\xd7\xee\x3f\xc9\x4a\x07\xae\xf8\xb4\x41\x3c\x3f\xf1\xba\xae\x39\xd3\x59\xb8\xcd\x57\x1d\xc6\x9b\xdb\xa9\x72\x4a\x33\x49\xf4\x59\xe6\x08\xb3\x57\xa9\x1f\x74\x5f\x69\xaf\x88\xc1\xe0\x6f\x71\xb1\xf6\x33\x3c\x4f\x69\xb1\xa7\xb4\x71\x44\xf7\x0e\x0b\xeb\x83\x13\xde\xab\xa7\x14\x47\x34\xb5\x25\x22\xc4\x25\xef\xce\x1c\xc7\x49\x07\xdb\x1f\x38\x92\xb4\x1b\xaa\x55\x7d\x5c\x89\x4a\x55\x52\x37\xe0\xf7\x4a\xe2\x15\x5f\x5c\xdf\xb5\xfe\x82\x71\x6a\x02\x28\x24\x27\x1e\xdf\x49\xc2\x7a\xc4\xe5\x81\x05\x18\x4f\x54\x0c\x97\x3e\x27\x14\x54\x53\x2e\xd9\xd6\x30\x3e\x5a\x48\x8f\xf8\x3c\xb7\xaf\xde\x74\x00\xa0\x0f\x56\x8f\x88\x20\xec\x0b\xb0\xf5\x4d\x6f\x43\xe0\xe4\x03\x76\x0d\x48\x6d\xfb\xdf\x5e\x43\x57\xba\x3d\xd5\xa2\xff\x5e\xad\x57\xa0\x62\x40\xc5\x76\x28\xc2\x7f\xbc\x36\x62\x7a\x9e\x6c\x7a\x9f\xd9\x18\x1b\x0b\x02\x80\x1d\x48\xd8\x51\xb5\x10\x20\x5f\x54\x87\x9e\x91\x46\xcd\x2b\x56\x4e\x03\x60\x19\x93\x12\x15\x9f\x04\x74\x49\xaf\x84\xd4\x45\xb4\xd2\x81\x9d\x31\x88\x77\x21\xd0\x29\x8c\x6f\x43\x02\x40\x61\x44\x5a\xfb\x54\xdb\xf4\xcc\xea\x52\xdc\xab\x76\x99\x10\xf8\x62\x17\x82\xbb\x8d\x47\x6a\x8d\xf4\x20\x5d\x93\x5c\x8b\xa6\xc0\xc0\xae\x01\xed\xd7\x32\x2c\x9b\xcc\x6a\xac\xe6\x55\x9c\x5c\xda\x27\xed\x2e\xd9\xb3\xc3\x7a\x0b\x16\xc8\x21\xf1\xb8\xaf\xdc\x78\x9b\xf7\x90\x4e\x83\x89\x88\xa9\xca\x40\x71\xfc\x2a\x63\x41\xf2\x32\x14\xa3\x2b\x09\x57\xd8\xb2\x0f\x91\x87\xf6\x0d\x4a\xc6\xcd\x89\x42\x0f\xd4\x54\x7f\x38\xa6\x80\x81\xcd\xf2\xa5\x36\xbd\x9f\xed\xda\xc6\xc9\xf1\x8b\xaa\x06\x83\x43\xb4\x4a\xc0\x3f\xde\xcf\x8a\x67\xed\x5c\x54\x91\x62\x85\x4d\xdb\xe7\xe3\xc2\x08\xdb\x3d\xd8\x80\xb4\x2a\x73\x8e\xad\x1e\x84\x4d\x2c\xfc\xd6\xb4\x86\xfd\xd0\x7f\x7c\x90\x12\x24\x8b\x66\x54\x58\xc4\x03\xe5\x6d\xeb\xc0\xc5\xa0\xd3\x73\xc8\x40\x62\xaa\x36\x3c\x62\x5f\x17\x55\xae\x93\x93\x36\x01\xb9\x74\xa7\x55\xe3\x95\x43\x3b\xa4\xcf\x35\x18\xa6\x3e\x16\xd0\x07\xa2\xa7\x6b\x03\xa5\xea\xd7\xfc\x00\xbc\xf1\xae\x6b\x13\x39\x65\xc3\x71\x86\x26\xb9\x92\xf0\xc8\xdf\xa8\xc6\xeb\x09\xc1\x14\xb6\xeb\xed\x0b\x72\x58\x7e\x47\x1d\x18\x0b\xee\x73\x4f\x73\xcf\x68\x69\xb6\x58\x15\xa9\x2d\x02\x61\x3e\x7d\xe7\x3e\x3a\xa2\xcf\x20\x92\x17\x1b\x1e\x9a\xc6\xc0\x92\x75\xb3\xe9\x5c\x7b\x83\x59\xd2\x40\xbf\xd8\xc3\xa4\x72\x6e\x46\xc1\xcf\x15\x5c\xe4\x8e\xa6\x48\x22\x44\xa5\x25\x3c\x7c\xf7\x81\x97\x5c\x53\x1c\xb6\x5e\x91\xb4\x7d\xd2\x00\x50\x04\xac\x6b\xe0\x0e\x57\xea\xcf\x16\x57\xbb\x0c\x04\xa8\xfc\x6c\x44\x76\x9c\xe7\x52\xf9\x4c\x43\xd5\x93\x80\x55\x9c\x19\x09\x9a\x8d\xf0\xd7\x68\x4a\xac\x4c\xcd\x0b\x6e\xf3\x54\x3b\x9b\x1d\x1e\x4a\x02\x5f\x23\xdb\x83\xd6\x24\xb7\x76\x7e\xe8\xfe\x9d\x70\x6d\xe2\x63\x29\x21\xcd\xd4\x49\x96\xc6\x09\x81\x4e\x4f\x36\x5e\xd1\xa0\xde\x81\xfd\xfb\xdd\x3a\x16\xa1\xc1\x12\x1b\x07\xd5\xd8\x64\xfc\xe4\xff\x9c\x4b\x73\xa9\x73\x61\xcd\x50\x49\xf6\x85\x0c\x11\x10\x49\x93\xe2\x12\xee\xf4\xae\xe0\x8b\xd0\x2b\x39\x76\x0c\xe5\xc8\x1c\xd4\x46\xc1\x2a\xdf\x52\x4b\x14\x65\x4d\x00\x6c\x90\xe0\x9b\x57\x5e\x0e\x55\x57\x54\x88\x9d\x9e\xdc\xa4\xbb\x24\x46\xc3\x00\x97\xac\x2e\x10\x72\x1e\xde\xa6\xb8\x45\x9c\xda\xf0\x29\x4c\x48\xad\xda\xaf\x39\x14\x61\x7c\xf9\x6c\x50\x4b\x68\xe6\xc1\x8c\xa0\xff\xa7\xa2\xf5\x86\xe9\x02\x97\x4f\x04\x22\x0f\xf9\xb3\x48\x81\xc0\x83\xb6\x22\xf2\xa8\x5f\xe1\x4b\x4a\x94\xff\xc0\x84\x77\xd9\x20\x5c\x99\x47\x11\xc0\x77\x9b\x4c\x86\xe9\x78\x9a\x3b\x93\xa6\x8a\x3e\x9c\x76\xba\xd3\xc6\x55\xb5\x2c\x23\x9f\xf3\xc2\x58\x27\x16\x4e\xca\xa9\x8a\x57\x6d\x2f\xa2\x35\x44\xcf\xb3\x25\xc4\xee\xdb\x4e\xa8\x90\xe4\x71\x0b\xee\xa6\x78\xb9\x39\x70\xbd\x8e\x7a\xaf\x67\xb7\x58\xc9\xb1\xa7\x1b\x19\x12\xaf\x2b\xb0\xae\x36\xb3\x67\xfe\xe5\x8e\xaa\xb9\xfb\x68\x99\xb8\xc0\x0d\x97\x7a\x00\x4a\x30\x4e\x75\x21\x01\x67\x51\x11\x9c\x9f\x3b\x23\x83\x38\x6e\xc3\x7e\x43\x09\x6c\x90\xe8\xfd\x16\x6c\xc4\x65\x38\xda\x3e\x16\x08\x92\xe7\xe1\xe4\x1e\x7f\xcb\x2c\xcf\xd4\x2e\x16\x08\x65\x87\xa9\x53\x6f\x05\xdc\xf9\xa6\x34\x97\xea\x7a\x55\x3d\x16\x59\x05\x0c\x9e\x24\x8d\x87\x12\xb1\x7e\x46\x95\xe8\x5a\x93\xc1\x10\xd5\x5d\x53\xcd\x28\x7d\xdd\x1a\x72\x34\x81\xef\x27\x35\x11\x45\xc6\x0e\xd9\xe7\xaa\xc7\x19\x04\x84\xa0\xb4\x55\x9a\x5f\x30\xfa\xd6\x78\xc9\x3e\xca\x5c\xf0\x5d\x03\x86\x1c\x63\x4b\x7f\xc9\x8e\xdd\x3c\x10\x6f\x72\x9c\x8b\xc9\xb7\xe6\x40\x8b\xa2\x8b\xb3\x1a\x1d\x52\x19\xb8\x4a\xac\x34\x23\x93\x91\x2b\x41\x0e\x65\xe7\x98\x2e\x05\xe8\x28\x45\x75\x33\x83\x8e\x82\x40\x6b\xff\xa3\xcc\x8e\x5b\x93\xbd\x52\x5f\x7a\x0c\x44\xda\x5c\x14\xdc\x1a\x7b\x31\x86\x76\x11\x01\x92\xd8\x16\xaa\x30\x76\x73\x76\xf4\xa9\x5b\x8b\xdb\xc1\xd2\xca\x63\xa7\x88\x6a\x50\xe4\x46\xb8\x4f\xf0\xb2\xdb\x38\x3b\xb7\x1b\x58\xd9\xe8\xe1\xad\x83\x42\x82\xd6\x7d\x59\xbf\xc4\x32\x88\x13\xf9\x71\x38\xd2\xae\x15\x42\x10\x4b\x7d\x62\x81\xf2\xfd\xe0\xd7\xfc\x79\x50\xaf\xb5\xab\xbd\x42\x3f\x0b\xd8\x82\x88\x86\x25\x77\xd0\xe3\x88\xa2\x75\x8a\x1b\x3a\x7e\xbb\xe3\xd4\xe7\x5d\x75\x0e\xd9\x7f\x78\xfa\xe1\xf1\x68\x35\x44\xf8\x48\x82\x84\xc0\xd8\xbe\xb7\xa1\x0b\x95\x4e\x26\x94\x93\xff\xb4\xd6\x88\xd5\x30\xbe\xf2\x48\xe4\xcb\xb7\x4c\x72\x25\x25\x37\x55\xde\xa5\xaa\x46\x1c\x07\x8d\x5a\x4d\x99\x1f\xf5\x67\xd7\xff\x7f\xd0\x5d\xa2\xe1\xd3\xb5\xaa\x63\xd7\xff\x01\x7c\xfe\xe6\x0e\xe6\xb2\xa1\x77\xa6\x90\x44\x33\x20\xe4\x5d\x40\xea\x69\xe0\xd6\x08\x5b\xb0\x0c\x79\x2c\x5b\x44\x3f\x12\xd9\x16\xe9\xb7\xe9\x37\x76\x0b\xb0\xb9\x1e\xe5\x8c\x4b\x2d\xb2\x11\x13\x0a\x5e\xdb\xe1\x3d\x84\xc3\xfc\x56\xf6\x61\x06\x3a\x35\x05\xc4\xdd\xcc\x62\x31\xc3\xe6\x63\x61\x7f\x83\xd9\x03\x94\x8f\x98\xdb\x46\xb4\xd0\x2c\x28\x80\xb0\x1c\x3b\xb5\xd7\x7d\x33\xed\xc8\x8e\x79\x56\x97\x62\x09\xa5\xa0\x26\xb0\x73\xac\xb2\x78\x6c\xa3\x79\xb3\xa9\x6a\x96\xd0\x37\x66\x91\x0b\xe0\xbc\xf0\x20\xd0\x6d\xc4\x57\x23\x90\xe6\xff\xb2\x85\x38\xa3\xb3\xe7\xf0\x6f\x6c\x81\x7d\xdd\x49\xb6\x49\x41\xfb\x3e\x3c\xe0\x98\xbc\xb6\x56\x4e\xc3\x84\x30\x89\xfb\x48\xbe\xc6\x7c\xd9\x8d\x57\xb5\x88\x47\xb6\xd3\x58\xc6\xe2\x9e\xfd\x3f\x57\x00\x82\xfe\xc0\xe7\x3c\x0d\x8e\xcc\x56\x00\xb1\x60\x8a\x86\xc1\xa6\xc7\xe9\x69\x81\x56\xbc\xe6\x39\xaf\x54\xdb\xe0\xfc\x50\xa9\xbd\xf4\xc7\x95\x81\xe2\xa3\x0b\xd2\x50\xb6\x9e\x91\xbc\x58\xa4\xb2\x33\x98\xa5\x42\xc6\xcc\x17\x43\x58\x7c\xc6\xe4\x65\x47\xa1\xd1\x20\x4f\x76\x16\x53\x62\xb6\xfa\x93\x28\x29\xa7\x0a\x03\x83\x38\x55\x09\x79\x37\x50\xdb\x31\xb9\xe9\xdc\x67\x4b\x7e\x17\x26\x51\x61\x3c\x37\x03\x72\x9a\x5a\x07\x60\x2e\xed\x28\xc9\x02\xdf\x46\xe5\x2b\x2b\x0f\xf0\x25\x71\xa7\xef\x27\xe0\x5b\x5b\x1d\xfb\x43\xef\x17\x65\x9e\x7e\x45\xd0\xc5\x9d\x85\xfe\xc8\xba\xed\x26\x06\x85\x1c\xca\xe6\x8b\x72\xa0\x6b\x1c\xca\xc9\x61\x48\x6b\x3a\xe3\x3e\x3e\x86\xd5\xf6\xab\x12\x88\x38\xa8\x13\x70\xa0\xe9\xe2\x7a\x71\xad\x3e\x02\x75\xc5\xca\x7b\x29\xbc\xd4\x34\xd1\x61\x26\xea\x95\x57\x1a\xac\xe7\x87\x5b\xd1\x5a\x2c\x88\x8a\xfc\x95\x1f\xae\xbc\x7c\xf8\x0d\x2b\xfb\xef\x4e\xfa\xd2\x67\xb6\xc2\x9d\x4f\x82\xe7\xdf\x7b\xd9\x58\xd8\xfb\xeb\xa8\xe8\xf9\x2d\x24\xfa\xf1\xee\xec\x1c\xbc\x7f\x74\xba\x41\x31\x21\xac\xb4\x71\x20\xa6\xe5\x06\x37\xad\x2f\x78\xf2\x8b\x38\xb1\x0f\xc5\x77\xc0\xae\x58\x71\x48\x44\x59\x73\x76\xd4\xb3\x9b\x65\x08\xca\xa7\xa5\xa8\x4e\xb4\x81\x2a\xd2\xe5\x46\xdf\x42\x2a\x12\x4d\x81\x2a\x0f\x05\x6f\x5c\x60\x26\x30\x99\x11\x91\x52\x09\x93\x9d\x5b\xa1\xe0\x41\x24\x84\xc1\xbb\x40\x4f\x00\xf6\xbc\x4b\x66\x75\xe5\x54\x2f\x4d\x86\x2d\xbc\xde\xd6\xed\x21\xff\x6b\x01\xc8\x57\x17\xa8\x9b\xff\x17\xcd\x07\x87\x87\x45\x82\xde\x11\x4e\xee\x9d\x07\x5f\x51\x02\xbc\x73\x01\xc0\xf4\x2c\xc5\x20\x05\x5b\x4f\x9f\xf5\xfb\x78\x6d\xa0\xc6\xbf\xb6\xb8\x3e\xfc\x1e\x06\x2a\x54\xfb\xda\xd7\x10\x54\x61\x76\x32\x1f\xae\x9a\xfe\xe0\x2f\x57\x12\xce\xed\xbc\xf5\xf0\xbe\x79\x46\x4a\x34\xe9\xda\x18\x7e\x26\xa3\x73\xe1\xbd\x94\x22\x85\xcc\x32\x67\x63\xde\x59\xb6\x1a\x57\xde\xf7\xef\xf5\x81\x3b\x21\x72\xd7\xe8\xf5\x38\x64\x6b\x76\x8d\xc6\xbb\x25\xc5\x85\x5e\x7e\xec\x33\x91\xe4\x24\x99\xba\xe2\xd9\x6e\x0e\x21\x3a\x61\xea\xa7\x57\x34\x17\xb4\x22\xf4\xb6\xb3\x21\x7a\x48\x72\xe2\x35\x99\x71\x3b\x46\xa9\xc6\xe8\xc4\x8b\x2b\x04\x2a\x8c\x2a\xd4\x73\xe3\x80\xdc\x81\xa7\x27\x81\xc2\xba\x21\x1c\xc0\xbf\x72\x41\x69\x48\x3b\xb6\xee\x2a\xab\xbc\x0f\x32\xae\x77\xf0\x9f\x4f\x1e\x9b\xa6\xd3\x8b\xa8\xc5\x50\x5f\x5c\x97\x50\x19\xa2\x1f\xfc\x8b\xc8\x0b\x0d\x22\x5b\xc7\xea\x70\xa9\x41\x3b\x24\xce\x42\xb7\x01\xfc\x54\x72\xa8\x55\xb8\xc7\x62\xc4\x61\x3c\x25\x4b\xdb\xae\x29\xc4\x0d\x0f\x22\x37\xca\x9d\x42\x04\xe2\x4b\x74\x50\x99\x2a\x33\xdc\x44\x79\x41\xfe\xc7\x15\x57\x73\x2e\x4c\xc8\xe8\x0f\x53\x51\x8d\x2b\xd6\x69\xc8\x1a\x35\x96\x29\x27\x2f\xc7\xee\xe5\x2f\x5f\xe8\xd3\xa2\xcc\xf6\xc6\x0f\x50\x6e\xa7\x42\x6b\x3d\x2b\xd0\xd4\x16\x62\x83\xc4\x76\x39\x6b\xd2\x88\xca\x36\x96\x62\xfa\x8f\x59\xdb\xa0\x57\xf2\x5d\x12\xba\x0c\x8d\xf2\xf5\x0d\x55\x1c\x65\x7a\x00\xbc\xbf\x62\xa1\x6b\x75\xbc\xd3\xc2\xb2\x73\xa6\x21\x92\x22\x78\xd0\x64\x4d\xa1\xf5\x93\x40\x91\xa5\xe2\xf5\x65\x2e\x0b\x6d\xae\x66\x72\x1d\xa5\x78\xa0\xa3\x4c\x5f\x59\x8c\x47\xcd\x5b\xdc\x1c\x71\x3f\x98\xd1\x4f\xae\x81\xd0\xb9\xa2\x5d\x5f\x9b\x91\x39\x15\xd3\x7e\x7b\x4f\x6f\x05\xe4\x56\x37\xe9\x3f\x16\x22\x31\xd9\x65\x63\xc4\x0d\x7d\x0a\xd9\x09\xc6\x00\xd9\x09\xb8\xee\x1b\x60\xc1\x84\x54\xff\x07\x5c\xce\x2d\x9a\x8c\x5c\x97\x1c\x3b\xe2\x93\xbf\x8c\x33\x42\x8a\xe1\xe1\xf9\x0d\x64\x59\xc0\x1c\x6e\x2e\x54\x99\x1e\x07\x15\xe4\xd0\xfa\xfc\xd4\x6e\xf1\x1b\xaf\xe7\x64\xcc\x16\x08\xff\x10\x9b\x76\xb9\x20\x5f\x7a\x14\x14\xa1\x06\xff\xda\x6e\x29\x19\x9a\xd5\xcc\x80\xf2\x03\x6a\xdc\xe5\x29\xcf\xae\x1f\xe1\x33\x1e\x91\x68\xac\xce\xd8\xcf\x4f\x82\x41\xae\xba\xce\xe7\x39\x45\x66\xdb\x01\x7c\xf0\x63\xec\xec\x20\xf2\x77\x6d\x47\x88\xbe\xcc\x63\x68\x97\xea\x8c\xf6\x96\xea\xc4\x8d\xbe\xf1\xec\x77\xdb\x1a\xea\x58\x7c\x23\x3e\x1f\x47\x18\x5b\xd6\xdd\x85\x1f\x02\xa1\x97\x16\x77\x8b\xa5\x92\xb7\x0d\x2d\xb9\xb9\xc4\x54\x99\xda\xf3\x1f\xaf\x07\x09\x53\x0f\x1e\xe4\x27\xa1\x7b\x23\x10\x28\xa4\xce\xbe\x81\xb6\xb2\x86\xb8\xcf\x2e\x46\xdb\x1d\x36\x85\xb6\xbc\x9f\x75\x15\xce\x8c\xc0\x64\xb1\x22\x13\x87\x77\xa2\x77\x68\xcc\x58\x0e\xbc\x49\xf1\xd0\xb1\xd9\xfe\xc8\x81\x88\xa5\xbe\x86\x92\x5f\x95\x0e\x92\x39\xdd\x7e\xbe\xa0\x2a\xc5\xc9\xe8\xfc\x32\x36\x76\x6e\xde\x0e\x67\xd3\x31\xfe\x64\xc8\x7c\xab\x5e\x62\x5a\xcf\xc2\x24\x7a\x5b\x23\xdb\x1c\x70\x72\x55\xa4\x21\x0d\x22\xad\x8a\x23\x28\xde\xb5\xfa\xdf\x22\x7d\xea\x51\x16\x7e\x45\xcd\x26\x27\x8e\xa0\xe9\x01\xb7\xac\x6e\x76\x7b\xf6\x4e\xba\x87\xde\xb9\x73\x2c\x6d\x8c\x3e\xaa\x01\x87\xdf\x14\x81\x3e\x8d\x6c\xf1\x88\x22\xe7\xcc\x6a\x7f\xa5\x5b\x65\x4c\x8b\xfd\xf0\x86\x18\x5a\x46\xa8\x6d\x45\x2e\x6c\xdb\x4c\x7a\x0b\x25\x95\xe8\x4e\x9f\xba\x3e\xcf\x04\x46\xc9\x33\x70\x4d\xfe\x46\x6b\x10\xe0\x20\x70\x35\xb9\xad\x40\xca\xbd\xf5\x92\x16\x55\xd7\xbb\xce\x35\x3a\xb7\xba\x5c\xfa\xd8\x73\x01\x05\x38\x23\xe5\x96\x1e\x45\x4b\xb6\xcf\xf3\x15\x86\x87\x3d\xaf\xe8\x47\x68\xc3\x71\xa1\x22\x0f\x0d\x23\xf0\x15\x84\x57\xb5\xaf\xa9\x08\x2b\x98\xf9\xf1\x1d\xbc\xf9\x10\xd9\xa0\xb2\xbb\x4b\xf1\x77\x58\x28\x31\x09\xe0\x7d\x32\x2c\xb8\xb7\x2b\xc1\xcd\x2f\x4a\x6b\xbc\x93\x64\x53\x64\x4a\x01\xcd\xf0\xe9\xb9\x1e\x3a\x28\x9f\x97\x7f\x28\x37\x92\xb3\xdd\xe4\xed\x85\x38\xbb\xa3\xa1\x02\xa2\x2d\x0a\x9a\x4e\x33\x08\x4d\xbe\x64\x77\xa3\xb2\xf4\x80\xd8\xb6\x02\xe8\x2f\xf6\x26\xfa\x4d\x2c\x66\x4a\x92\x94\x03\x2c\x96\x23\x16\xe0\x69\xc0\xbb\xee\xbe\x5d\x22\x86\x3b\x37\x35\xfb\x83\xf9\x31\xee\xff\xf7\xf5\x75\x49\xb8\x6d\x73\x7b\x10\x79\xb4\x89\x7a\x1b\x67\xb8\x55\x73\x8c\x04\x50\xe9\xdc\xfb\x48\x3f\x9f\x83\xb8\x4c\xca\xa2\x62\xbb\x0d\xf5\xd6\xb5\x4e\xc6\x2b\xba\xc4\x6b\x1b\x8a\x81\x7e\x86\x22\x54\xcd\x79\x09\x47\x26\xde\xa2\xcc\xc7\x87\xf7\xe7\x3c\xd5\xe6\x13\x65\x9e\x42\x1f\x8a\xca\x63\x5c\xbe\xcd\xfa\xf5\x74\x29\xdb\x8e\x9a\x6d\x6c\x92\x4c\xd8\x91\x1e\x81\x3d\xb9\x00\x20\x2d\xfe\xda\x5b\x0c\x4f\x29\xc8\x1d\xad\x19\x9e\xf4\x37\x97\x3c\x2f\x7a\xc8\xec\x3c\x2f\x40\x85\x83\x05\x05\xa6\x13\xed\x25\x4b\x1b\x76\x0e\x15\xef\xac\x3c\xd8\x67\xda\x79\x2c\x12\x44\x87\xe7\x9e\x6c\xed\xe4\xe7\x90\xc8\xd5\x84\xce\xf1\x52\x05\xde\xf4\x37\x03\x06\x1b\x9f\x33\x90\xee\xb1\x20\xe7\x1a\x22\xbe\xac\xdf\xac\xe2\x86\x35\x0c\x45\xbf\xc0\x9d\x36\x8e\x3d\x7a\x57\xf5\x97\xcb\x40\xd9\x21\x97\xeb\xfb\x6b\x8b\xa2\x9b\x3c\xa6\x68\x3a\xae\x5a\x98\xac\x7b\xde\xae\xb1\x94\x1e\x6b\x68\x8b\x4a\x04\x30\xe8\x4d\xb9\xaa\x07\x84\x10\xa9\x38\x08\x6a\x1b\x81\x83\x41\x3c\xc1\x31\xaf\x01\xa3\x1c\xc1\x01\xad\xeb\xa1\x7f\x32\xa1\x39\x2b\xd2\x63\xd3\x84\x8b\x21\x1e\x18\x6f\xde\x4d\xc2\x10\x65\xf2\x0d\x7c\x06\xee\x9f\xc8\x58\x16\xd0\x3c\x7c\x97\x0d\x4c\x68\xd4\x01\xfa\x29\xcc\xd6\x62\x43\xfe\x1f\x2f\x9e\x05\xbb\xff\xd0\x21\x27\xd5\x4e\xd3\x15\xcb\x13\x0b\x7b\x28\xae\xdf\x6b\x8a\xa6\x0c\x7a\x5f\x1e\xbc\x54\xf2\x60\xa8\x12\xe3\x86\xd2\x2e\xd9\x75\xa5\x3c\xa9\xdd\xcf\x0c\x61\xb6\x37\x9e\x25\x1c\x16\xab\x3f\xb0\xfc\x07\xc7\xae\xf3\x1b\x83\x88\xd5\xa8\x62\x8b\xb4\x6b\x69\x49\x09\xe0\xed\xba\xb1\xf3\x72\x5d\x03\x2a\xf3\x9d\x05\xb2\xb3\xcd\xaa\xdd\x08\xb7\x48\xf4\x43\xe6\x69\xfa\x36\x65\x4d\x1e\xd3\x7e\x36\x11\xc7\x00\xc0\x09\xd0\x7e\x08\x8c\xa0\xfc\x41\xf2\x1c\x0c\x2e\xc5\xa1\xee\x5a\x04\x38\x4a\x02\xcf\x24\x0d\x24\x65\x31\x67\x3b\x8c\xac\x03\x22\x77\xda\xdb\x86\xc4\x14\x9d\x58\xed\x73\x50\x75\x5e\xe5\x0b\x6c\xd9\xc8\xfe\x96\x6b\x4a\x33\x7e\xe1\x59\x3d\xb5\x30\x7f\x04\xa5\x54\x11\x6d\x00\x89\x2d\x5d\x9f\xb3\xe5\xd7\xed\x76\xe8\x3c\xb5\x3c\x95\xaf\x3e\x8a\xbe\x0f\x37\xef\x25\xfd\x96\x40\x47\xaf\x8c\x26\x2c\x18\xc9\xeb\x13\x42\xd9\xd0\xc7\x31\x6e\x77\x51\xb5\x67\x52\x91\xec\xdb\xb1\xc5\xac\x80\x12\xd9\x8f\x19\x4a\x07\x30\xba\xad\x5b\x3e\xef\x0b\xff\xae\x86\x91\x3d\xfe\x8c\x3c\x5e\x52\x62\x25\xc7\xda\xed\x37\x24\xf5\x86\x87\x39\x92\x94\xce\x36\xd8\x91\xd2\x2c\xd7\x76\x92\x32\x94\xc7\xba\x05\xb4\xbe\x09\xca\xa4\x61\x5a\xc1\x09\x4b\xeb\x1a\x6b\x28\xf8\x03\x53\x8e\x94\xcb\x28\x11\x38\x82\x29\x20\x13\xe7\x11\xe3\x6c\x27\x39\x80\x77\x77\xcb\xc1\xff\x76\x0e\x7a\xfd\x6b\x4c\x8a\x1e\xe7\x45\xdd\x60\xf9\x85\xca\x38\xf7\xcd\xcd\x9e\xa6\x0c\xa9\x2f\x8c\x03\x20\x6e\x09\x2e\x38\x6e\x98\x41\xb4\x2c\xed\xe4\x1d\xad\x5b\xa7\x80\x4e\xa5\x7c\xdb\x3c\x06\x0c\xdb\xb7\x66\x26\xe2\xad\xe4\x4a\x23\x00\x81\xf8\xeb\x5b\x9b\xf5\x18\x4e\x36\x0b\xef\x04\x85\xbf\x78\xa9\x44\x2c\x8b\x5e\x72\xd9\xc0\xb1\x2e\xb3\x90\x01\xa0\x58\x3b\xc3\xd5\x75\xc1\xdb\x24\x6f\x09\x1e\x19\x72\xce\x64\x3b\xb2\xac\x2c\xce\xd2\xdc\x90\x94\x54\xb8\xa9\xcd\x10\x95\xa0\xfb\xc4\x10\x94\xdd\x8d\x0a\xa5\x08\xd0\xef\x0f\x2b\xe9\x75\x0e\x50\xee\xb1\x54\x52\x51\x06\x6c\xda\x15\x3b\x1a\x9c\xee\x55\xe8\x8c\xb6\xf8\x17\x2e\xf4\x34\x80\x3a\x11\x7d\x92\xc7\xa7\x7c\x79\xad\x52\x86\x20\xd0\x5c\xd2\x06\x44\x91\xcd\x6e\xcf\x19\xbd\x92\xdb\x6b\xc3\x8c\x1c\x06\xcd\xfe\xd4\x60\x04\x89\x65\x2b\x17\x23\xfb\x17\x4a\xcd\xe1\xec\x23\xce\x7e\xab\xf7\xe7\x3d\xf6\x68\xd4\xa8\xe2\x56\x77\xfa\xed\x6c\xdb\xdf\xe9\xd1\x49\x63\xc0\xda\x7f\x35\x13\xe4\x44\x68\x4a\x6a\x51\x26\xb8\x4f\xf6\xc5\xd1\x3b\x99\x44\xd5\x96\xeb\x21\x69\x9d\x80\xe3\xec\x6f\xd6\x08\x71\x3e\x9f\xe0\x7d\x02\xfb\xa8\xa8\x08\x89\x0c\xc3\xa1\x69\xd0\xa7\xd3\x1b\xfc\x71\x41\x72\x37\x71\xd0\xde\x32\x00\x98\xd1\x21\x20\x46\x7d\x68\xd5\x68\xc9\xd6\x39\x28\xe5\x18\xab\xc8\xd4\x57\x5a\x0b\xd5\x07\x2f\x21\xbc\x81\xbf\x40\x3c\xf5\x3a\x5b\x1a\x8b\x3d\xf0\xce\x95\xa8\x6b\x7e\x9a\x04\x8d\xe7\x66\xc5\xd4\x5d\xd1\xfb\x9b\xcc\x6f\xab\x8e\x44\xff\x98\x13\x21\x67\xdf\x23\x93\x7f\xd4\x3f\x28\x7d\x7b\x50\xe1\xb5\x85\xb9\x8a\xc7\x1d\x9e\x8a\x3a\x44\x41\xf0\x87\xd2\x4b\x84\x03\x73\xb9\x4e\x85\xb7\x63\x63\xe9\x63\xb1\xe9\x76\x05\xeb\x7a\xfb\xf0\xd4\x0c\xdb\x12\xa9\x83\x3d\x6e\xe5\xd9\x9e\x63\x4e\x3b\x69\x58\xcc\x50\x8d\x70\x26\x47\xd3\x9a\xaa\x43\x57\x51\x6e\xc5\xfc\xd7\xa0\x3e\x54\x5c\x8a\x72\x9e\xfe\x22\x31\x78\x31\xe6\xf2\x79\x1f\x77\xeb\xeb\x72\xc3\xa7\x25\x26\x33\x94\x72\x75\x21\x37\x74\xb4\xf7\xac\x9d\x5f\x15\x74\x0c\x39\xfe\x51\xfb\xfd\x90\x13\xae\x02\xdf\xe4\x1c\x4c\x08\x39\xf0\xcb\xad\xad\x77\xcf\x6b\x61\x2b\x4a\x57\xab\xaf\xfc\x71\xd8\x45\x92\x50\x85\x3b\x5f\x55\xf5\xab\x0b\x83\x53\x17\x61\xa6\xbe\xbb\x9f\xed\x68\x2e\x53\x7d\x1f\x06\xb1\xac\xc0\x00\xca\xc8\xc3\x04\xd0\x71\xf1\x18\xa7\x09\x81\x09\x85\xaf\x0d\x35\x7d\xaa\x5b\xd2\x20\x2d\x79\xfc\xbb\x56\xc9\xcc\x8d\x73\x6f\x2c\xd5\x76\x67\xbb\x8e\x03\x7b\xf6\x33\x47\x1f\x06\x3e\xf5\x6c\x2b\x57\x0f\x22\x06\x4c\x66\x38\x5a\x3e\x7d\x30\xa6\x38\x6c\x8e\xdf\x2f\xa0\x9d\xfc\x9a\x2f\x19\x44\x21\x84\x36\x90\xb3\x3b\x47\xd4\x12\x15\xa9\x1b\x81\xa8\x06\xb9\x92\xc2\x0c\xc4\x3c\x97\xac\xfa\x7e\xcd\x9d\xf6\x8a\x9a\xd3\x2a\xac\xb1\x27\x80\x1a\xb7\xed\x44\xf9\xf3\x74\x61\x22\x75\x4b\x76\x41\xb1\x12\x60\xa2\x35\xe9\xf0\x93\x29\xc7\x79\x4c\xe0\x1f\xff\xe8\x21\xc5\x3a\xa2\x33\x0a\x4c\x59\x3a\x90\x9e\xc8\x72\x39\x7f\x61\xbc\x5e\xc9\x8c\x9d\x94\xa0\xc4\x5f\xf3\x37\xc3\x14\x2f\xe5\xbd\x42\x91\x9a\x1f\xb0\xd7\xc2\xb5\x19\x4c\x2e\xc1\xc5\xb2\x4b\xc4\x10\x3b\x8e\x30\xac\x46\x48\x69\xda\x29\x49\x3d\xe5\x88\x60\x62\x6f\xad\xa6\x03\xa7\x87\x6c\xce\x1a\xfa\x0a\x81\xaa\xbe\xb7\x50\x05\xf4\xc1\xe7\x18\x49\xa4\x9c\xa2\x66\xa4\x8a\x2c\x03\xa3\xf2\x2a\xb7\xe9\x19\xbc\x68\xcf\x2d\xc4\xd1\xe9\x34\x0c\x5c\x91\x07\x4d\xcf\xf8\x00\x49\x22\x16\x64\x91\x60\xc1\xcf\x8e\x9f\xab\x4d\xa9\x9f\x6c\xd4\x93\xf0\x2e\xd6\x30\xd4\xd3\xf3\xab\xbb\x45\xaa\xd2\x25\x1a\x63\x16\xf7\x58\x65\x89\x1c\xd9\x3d\x84\x9c\x12\x45\xd1\xd4\x3f\x08\x65\x36\xd0\x4d\xf3\x07\x9e\x2c\xae\x8a\xb0\xe5\xa5\x3e\xeb\x55\xdf\x40\xb3\x07\x01\x8e\x7f\xbc\x3a\xb4\xce\xb3\x28\x25\x5e\x10\xc2\x6c\xaf\x61\x44\x38\xca\x08\x8b\x48\xda\x84\x02\x20\xc8\xc8\x23\xbf\x53\x1f\xe7\xc6\xe5\x67\xec\x29\xcb\xc6\xb9\x82\x3c\x7a\x05\xba\x85\xd6\x76\x56\xe6\x11\xd2\xfa\x90\x1c\x97\x56\xd7\x42\xcb\x19\x98\xda\x27\x9a\x36\x19\x56\x73\x05\x4a\x8c\xa2\x10\xa1\x44\xb7\x81\x15\x7e\x52\x57\x0b\x4a\x23\x22\xe5\x08\x25\x0e\x29\x58\xa5\xde\x77\x66\x51\x78\xb1\x94\x7a\x98\x80\x00\x21\x4b\xe3\x8e\x79\x43\x8c\x1f\xbc\x24\x6b\x91\x33\x9a\xb1\x9b\x2e\xc6\x8c\x3c\x74\x55\x89\xa8\x23\xf4\xe1\x00\x61\xc6\x0d\x71\x20\x0f\x89\x58\xe4\x6d\x30\x23\x7f\x0c\x4d\x2f\xa8\x59\x23\x98\x16\x09\x10\x18\x43\xe4\xc5\x4f\x8f\xaf\x28\x7a\xa7\xad\x59\x16\x18\xe3\xfa\xa5\xc7\x9e\x82\x12\xd6\x01\x91\x01\x5f\xa2\x5f\xe9\x08\x63\xde\x3f\x01\xfc\xc3\x12\x81\x4a\x9f\x56\xf3\xf8\x6d\x94\x63\x63\x12\x31\xa1\x66\x3a\x15\xab\x2b\x5f\x49\xeb\x59\x7a\xea\x71\x0a\x6d\xcb\x09\x89\x27\x6e\xc3\x6a\xf1\x33\x99\xa9\xb4\x8b\x4d\xc5\x65\x30\x0c\x33\x05\xa3\xec\x22\x3b\xc8\xd7\xe4\x15\x7d\x54\x04\x24\x03\xd4\x63\xca\xc5\x15\x21\xef\x54\x95\x2d\x64\x42\x55\xed\xa5\x99\xb1\x9f\xba\x67\x11\xfb\x2b\x62\x83\x6d\xc3\xa6\x87\x5f\xc6\x2e\x42\x2c\x74\x04\x64\xf2\xfc\xde\x5f\xe9\x87\x8e\xa7\x1a\x7c\xbd\x40\xcb\xb3\xee\xa5\x31\x5c\x4f\x39\xa7\x50\xd7\xdf\xc2\x98\x0a\xe9\xe0\x7d\xbc\x2b\xb0\xae\x76\x79\xa1\x06\x8a\xfc\xdf\xe3\x38\xb8\x52\x09\x95\xb6\x58\x4c\xf2\x9c\xba\x7d\xc3\xae\xa7\x34\x94\xce\x10\x26\x50\x20\x35\x58\x50\x41\xd8\xf5\xa4\x55\xbd\x48\x8e\xd8\x78\xa2\x4c\x47\x0b\xac\xfc\xb0\xde\x61\x16\xc2\x45\x86\xc5\x8b\x84\xbb\x16\x45\x2d\x02\xbc\x63\xbf\x29\x7c\xe8\x48\x69\xac\x41\x0e\xba\x29\x5d\x00\x25\xa9\x31\x24\xdb\x8b\x3a\x48\xb5\xdc\x3b\x14\x09\xd8\xdf\x8d\xc6\x7e\x61\xe9\x98\x0f\x09\x12\xe9\xd0\x8c\x15\x25\x86\xf3\x1a\xfd\x72\x8c\xe4\xae\xb0\x9f\x5e\x14\xbe\x88\x78\xd7\x58\xe7\xbc\x41\x85\xf2\xef\x0c\x60\x0c\x93\xcc\x60\x5e\x93\xf4\x2b\x4e\x31\xf0\xdd\x38\xd7\xe7\xc9\x84\x8c\x74\x33\x29\x88\x7b\xaf\xbc\xb5\xd6\x82\x10\x14\xae\x72\xe4\xd9\xb1\xd5\xe2\x4d\xad\x6e\x62\x42\x89\x1a\x7e\x31\x55\xb9\x54\xb7\x97\x92\x5f\x08\xc5\x02\x4c\x4a\x53\xa3\x74\xf5\x81\x4a\xe9\x20\x6d\x54\x1d\x1e\x6d\xbd\x6e\xac\x02\x3f\x51\xe8\x06\x17\x0b\x48\x4d\xde\x03\x0c\x31\xbf\xd1\x15\x91\x65\x27\xc9\xe1\x7a\x24\x99\xfa\x16\x16\xea\xde\x34\x0f\x70\x89\x7b\xc9\x96\x58\x2e\x07\x03\xbe\xda\x8c\xa1\xf5\x86\x1f\x16\x7f\x9a\x34\x05\x86\xee\x8f\xb6\x18\x63\x95\x3a\xb8\xed\xd6\xa8\xcf\xcd\xa4\xb0\xe1\x84\x69\xd0\xa3\xb8\x23\xc6\xb2\xf6\x76\x86\x43\x56\xc6\x68\x59\x6c\xe1\xff\x2c\xd4\xe2\xb2\x84\x84\x08\x96\xd1\x5e\x78\xde\x3e\x22\x39\xa8\xe2\xd2\x3a\xf6\x6b\x0a\x1f\xe5\x1a\x0a\x10\xe7\xc3\x73\x8b\x9d\xb4\x16\xf0\xe4\x05\xca\x25\x9d\xe8\xa9\xb8\x53\x07\x41\x51\x31\x25\x9e\x89\xc0\xe9\x7e\xbb\x37\xe1\x42\x0a\x58\x15\x65\xed\x94\xa4\xcc\xfb\xe4\x8a\x09\x25\x3a\x79\xe6\xbe\x08\x51\x0a\x6a\x42\x10\x57\x47\x9f\x45\x6f\x9e\x99\xc9\xbd\xf8\x50\xfb\x05\xcc\x2d\x30\xc7\xd0\x8e\x21\x15\x38\x5b\x80\xb7\x4a\xc4\x8f\x90\xbb\x96\x3c\x41\x35\x24\x90\xc5\x0e\xe8\xad\x3e\xa7\x57\x6a\x6d\x3e\xb3\x6a\x2e\xa6\x47\xf6\x57\x75\x14\x38\x21\x71\x33\x16\x3f\x22\xe4\xff\xc3\xe7\xb1\xfa\xd3\xf4\x7d\x2a\x1f\x8c\xae\x5c\x5a\x9e\xcf\x92\xf1\x24\x46\xf0\x72\x26\xf4\xa1\x90\x5b\xda\x42\x84\x92\xde\xb3\x21\xcd\x25\x2e\x69\x56\x1c\x43\x81\xc1\xea\x70\x44\xbf\x34\x82\x7e\x0a\x1c\x23\x6b\x89\xd8\xdc\x9c\xbc\x74\x43\x36\x8d\x99\xaa\xae\xf3\x50\x6b\xe8\x24\xeb\x6f\x3e\xa2\x00\x44\xe0\x46\x1f\xe8\xeb\x31\x29\x92\x87\x16\x0f\xf4\x54\x8d\x38\x56\x1c\x0b\xa7\xc0\x09\x71\xa8\x52\x39\x12\x8c\xc8\x12\xa9\x43\x13\xd8\x20\xd1\xae\x78\x49\x05\x61\x58\xc6\x6a\xd2\x8f\x49\x22\xdd\x10\x11\x57\x68\xfb\xed\x32\x9d\xd8\xe9\x7e\x4a\x12\xa1\x42\x57\x04\xed\x92\x0f\xe4\x12\x86\x9f\xb2\x82\x6b\x4f\x13\x4b\x87\x6a\x91\x47\x50\x37\x7e\x44\x2f\x31\xa3\x0a\x1e\xe1\x99\x84\x65\xe0\x35\xd6\x5c\x7c\x69\xf2\x8b\x8a\x0f\x98\xc2\x1f\xf7\x68\x41\x7b\x83\x4a\xb2\xc6\xc8\xbf\x03\x20\x3f\xf6\xe1\xe2\xd1\x93\x9f\x9f\x88\x01\xf7\x9a\x32\x56\x4a\x0e\x72\xda\x60\x3f\x16\x2e\xd7\xfd\x13\x34\xef\xab\x5e\xf7\xa4\xab\x3b\x2e\x30\xb5\xa4\xb0\xa5\x86\x28\xf0\x89\x55\xe8\x48\x85\x74\x81\xc3\xdd\x7c\x81\x3b\xab\xff\x38\x97\x47\x6e\x66\xf6\xfc\xf2\x54\x3e\xcc\x18\x25\x74\x73\x84\xec\x40\x6e\xdc\xf2\x8d\x83\x45\xb9\x50\x04\x9f\x50\x72\x78\xcc\x7b\x4c\x1b\x18\x90\x6f\x8d\xae\x58\x2f\x8b\xca\xa3\x89\x30\x92\x49\xa0\xbb\xb2\x54\xdb\xb6\x72\x23\x96\x95\x17\x9e\x80\xfb\x1a\x6a\x03\x57\xeb\x1e\xd4\xc6\x6b\xa7\x5e\xdc\x23\x80\xb8\x67\x55\xea\x83\xf9\x43\x22\x88\x39\x12\x98\x3f\x75\x52\x8a\x12\x73\x88\xed\x7a\x32\x3a\xb1\x10\x40\x20\x77\x51\xb8\x73\x08\xe4\x16\x5c\x03\xd2\xa0\x36\x0a\xa4\x77\xdb\x2c\xd4\x2a\xd7\x0f\xeb\x76\x53\x40\x5d\x15\x2c\x56\x3a\xfe\x43\x28\xc8\x30\xed\x13\x9f\x97\xdb\x0d\x08\x26\x2c\x16\x49\x25\x9d\x75\x1c\x97\x98\x17\xd9\xf8\x24\x7b\x4b\x79\x5f\x6a\xad\x63\x66\x3f\x76\x74\xe7\x88\xe9\xfc\x66\x44\xe8\x0d\x91\x42\x7a\xe8\x94\xf1\x98\xee\xc9\x44\x43\xa7\x12\x34\xc2\x38\x0a\x1d\x05\xf3\x07\xd0\x67\x63\xd2\xf7\xb6\x2e\xc3\x2b\x24\xb1\x78\xc9\x26\xd7\x35\x9f\x29\x7f\xae\x6e\x33\x87\x05\xdf\x9c\xed\x11\x04\x3b\xb1\x3c\x6d\xc8\x14\x85\xe1\x03\xa0\xd5\xca\x09\x26\x63\x07\x1b\x90\x04\x59\x31\xc7\xb9\xbd\x96\xdf\xc4\xe0\x09\xf8\x31\xc2\x4b\xe8\x9b\x23\xb4\x3e\x41\x9a\x45\x5d\x98\x96\xab\x14\x8d\xdf\xc1\xd9\x45\x93\xfe\x3c\x84\xdf\x32\xee\x28\x6d\x5f\xe9\x16\x3f\x1e\xcf\xc7\x5b\x49\x4e\x0f\xcc\xcd\x97\xc9\x4f\xe5\x18\x89\xd3\x75\x18\x40\x74\xbf\x4c\xf4\x0d\xe9\x58\x84\x4a\x9a\xbc\xe3\x68\x20\x5c\x1c\xfe\xc4\x5f\x38\x64\xb6\x71\xaa\x37\xb3\xbc\xf1\x4f\x19\xe3\x78\xfd\xba\x86\xaa\x78\xb2\x63\x5b\x0f\xac\x13\x2f\x74\x4e\x52\xde\xed\xb2\xcd\xd5\x5b\x8e\xeb\x00\x4f\xf9\x0b\x0e\x3f\x0f\xf6\xf7\xce\x6d\xd9\xe5\xd8\xa1\x53\x9a\x9d\x4a\x67\xad\xc1\x5a\x58\x80\x5e\x46\x15\x00\x7a\x68\xbe\xfb\x40\xea\x36\x3f\xec\x49\xac\xd1\x38\x05\x8e\xaf\x1c\x25\xdc\x08\xdb\x15\x41\xb3\x23\x4a\x05\x6c\xfa\x54\x52\xfa\xc1\x11\xee\x43\xc7\x0c\x75\xdc\xe0\x33\xde\x87\x74\x90\xb6\x7b\x9f\x90\xf1\x70\xa5\x0b\x7e\x0d\x68\x34\x07\x0d\xee\x30\x4a\x97\xfb\xd5\xf0\x91\xd6\xa1\x58\x7a\xe9\xae\x77\x64\x48\x20\x88\xf5\x82\xdb\xa9\xc6\xeb\x36\xc1\xb7\x99\xec\x60\x06\x50\xb6\x8f\xcf\xf7\x65\x39\xd6\xea\x03\x6a\x91\x69\xfa\x41\x3c\x5b\x93\x0f\x5b\x23\x5f\xb2\xba\xd8\x63\x34\x5f\xa5\x3a\x6b\x83\x8a\xa5\xc7\xfe\x46\x0c\x4b\x0c\x22\xfe\xf0\x47\x58\xa7\x01\xda\x72\x8e\xbf\xe0\xf6\xbc\xdd\x25\xec\xd8\x8d\xda\xf1\xa8\xd4\x9a\xee\xcc\xb9\x7e\xcb\xd8\x18\xc1\xe0\x9e\xa7\xcc\x67\x82\xa7\x79\xf2\x17\xc0\x03\xe3\xac\xba\x71\x2b\x0b\xde\x6c\xd4\xa6\xab\x8d\x36\x0f\x84\x81\x4e\xae\x36\x26\xba\xbb\xf0\x15\xec\xfa\x16\xcc\xb8\xbe\x42\xb5\x4f\x85\x78\x47\xd2\xa1\x79\x54\x4b\xe4\xe5\xf4\x84\xfd\x9a\x54\x9b\x23\x91\x6b\xf5\x6c\x5b\x89\x7a\xea\x2a\x3d\x93\xd2\x27\x42\xf7\xcb\xc9\x78\xa8\xfa\xff\xc7\x27\xc4\x1c\x3f\x4a\xe0\xb4\xc1\x37\x50\x1d\x6e\x79\xa1\xeb\x91\x52\x4b\x1a\xf2\x52\xaf\x73\xdb\x67\x2f\x4e\x92\x3a\x89\x7e\x77\x6f\x72\xc6\xaf\x3a\x74\xd6\x3e\xc1\x1c\xb0\xd8\x35\xe3\x45\xdb\xfa\xf1\xaf\x32\x7d\xcc\x4d\xc7\x0e\xca\xb5\x17\xd5\xa3\x3c\xc5\x66\x58\xa6\x66\x4e\x6c\x17\xa7\xef\x58\xe0\xa6\xe6\x49\x46\x3b\xbe\xf9\x2a\x2f\x9d\x0d\xe0\xf7\x5c\xf6\x71\x10\xdc\x8c\xfa\xd8\xcb\x49\xb8\xff\xc0\x36\x33\x99\x42\x93\xcf\x7d\xf0\x7a\xa5\x96\x98\xf1\x31\x08\xa7\x9b\xef\xd5\xf2\x6c\xca\x4f\xce\x8f\xef\x0e\x8b\x56\x00\xe4\xe6\x67\x77\xff\x29\x3f\x8b\x19\xc3\x5f\x05\x20\x7f\xa7\x24\xae\x84\x56\x94\xf0\x76\x56\xed\x29\x1e\x6e\xa0\xd5\x04\xb1\xa4\xa6\xaf\x7b\x70\x1d\x7b\x7c\x80\xfc\xa6\xe9\x54\x3c\x70\xf3\xcc\x3b\x9f\xb5\xb5\x9b\xd8\x85\xd3\xfc\xb6\x33\x06\xbe\xd3\xba\x49\x99\x74\x8b\x79\x3f\x80\x2e\x20\x1f\x38\x99\x43\x8d\xc8\x0a\x6d\x98\x1a\x3d\x06\xce\x87\x42\x09\x3e\x3a\xc4\xfe\x4b\x4e\x68\x7d\x57\xc0\x65\x54\xec\x8b\x41\xf6\x6c\x1b\x99\x51\x83\x6f\xf0\x21\xda\x94\x3f\x60\x08\x2e\xa9\x8e\x49\x9d\xff\x05\x41\x9f\xf7\x3b\xf4\xe3\x27\x35\x96\x20\x1a\x2a\x60\x05\x00\x4f\xb3\x7a\x00\xce\x95\xb2\x55\x52\x02\x2e\x8c\x46\xf9\x78\x9f\x1c\x9a\x51\xbf\xcb\xa9\x51\x8a\xaf\x33\x84\x5a\x4d\xf4\xeb\xce\xd5\x27\xa2\xb4\x7f\xbc\x7d\x7c\xbc\x70\x90\xf7\xbe\xfd\x89\xc1\x09\x22\x4a\x7f\x2e\x3a\xda\x12\x1c\xd3\xe9\x56\x88\xe7\x11\xf0\x88\x3c\xfd\x13\xa4\x5f\x59\x7b\xbd\xab\x92\x02\x2c\xa9\x08\x7f\x95\x4f\x52\x9f\x7c\x6e\x36\x4d\x13\x38\xd6\xd4\x5d\xeb\x1d\xa1\xc6\x1d\x94\x2b\x19\x12\x29\x02\xa0\xb9\xe8\xbe\xc0\xc1\xa1\x8e\x37\x43\x45\x4e\x22\xe4\xd9\xf4\xab\xc2\x59\xe7\xbc\x87\xae\x38\x13\x86\x40\xed\x34\x9d\xfa\x82\x0c\x14\x8c\x1e\x9e\xab\x9a\x75\x45\xa6\x77\x1d\xc2\x0a\xf7\x8d\x04\xa0\x79\x1a\xab\x78\x50\xa9\xe3\x90\xac\x52\xc0\x76\x9f\x9c\x7c\xb6\xd5\x53\xa7\x38\x3a\x6c\x0f\x9a\x0b\xa1\x8d\x59\xbc\x45\x43\x18\xd8\x81\x19\x1e\x77\x8d\xff\x9c\x21\x2e\x70\x74\x19\x7c\xe1\xec\x39\x49\xb4\x97\xd1\x7f\xf4\xfd\x8d\xf2\x98\x7e\xf5\xfb\xc3\xd4\x17\x96\x1b\x65\x98\x8f\xfb\x5f\x68\xa8\x28\xb8\x2c\x23\xbd\xd5\xde\x21\x2b\xc0\x2d\xc8\xa0\xcb\x5a\x29\x47\xd9\xe8\x18\xdf\x08\x9b\xaf\xbc\x9c\x47\xb6\x1e\xd4\xa1\xba\x84\x57\xff\x4d\x1f\x85\x45\x8e\x42\x0e\x3d\xc3\x3b\x6f\x7e\x5e\x94\xed\xcc\xcd\x77\xd6\xed\x5f\xa2\xc8\xfe\x3e\xc5\xa5\x30\xbf\xc8\x9c\xf7\x83\xd7\x96\x48\x65\xe1\xc0\x25\x42\x52\xa9\x31\x32\xeb\x0f\x45\x75\x08\x3b\x11\x29\xda\x93\xa7\xa8\x69\x05\x4f\x9b\xe4\xc7\xc8\xca\x4d\x05\x54\x16\xde\xeb\x5b\xb5\x01\xe4\xd2\x1e\xc5\x64\x25\xfb\xc7\xde\x50\x45\xb0\xdd\x53\xcf\x7f\x02\xe3\xb5\xdb\x75\x05\xc1\xe1\xf4\xf3\x93\x34\xdc\xee\x3e\x9c\xe9\x95\x74\x7d\x30\x82\x9d\xf4\x75\xe0\x6e\x05\xad\xf9\x03\x80\x64\xac\x34\xf5\xf1\x19\x20\x21\x0e\xf5\x19\x18\x90\x25\x27\x2c\x31\x0d\x7a\x30\xfd\x7b\xf6\xc9\xd2\x8d\xa3\xe0\xac\xba\x46\xba\x5a\x4f\xbb\xf4\xfb\x28\x24\x53\xf5\x1d\x3e\x6f\x4b\xf5\x2c\x7f\x34\x9c\xa6\xd5\x8b\x09\x99\x88\x60\x69\x81\x67\x44\x8d\x5e\xc6\x12\x0d\xf1\xbb\x38\xbc\x49\xef\x1f\x11\x9b\x05\xac\x3b\x12\x78\x89\x13\xf3\x8d\x8a\x9e\xca\xd6\xd4\xdf\x97\xc4\xa1\x10\x8b\x2e\x08\x35\x83\xcc\x4e\x89\xa0\xb3\xd9\x39\xd7\x36\x80\x4d\xaa\x68\x6d\x31\x53\x8b\x39\xbd\xda\x40\x04\xae\x67\x18\x82\xa6\x4f\xed\x35\x15\x48\x46\x70\x68\x05\x1f\xa4\x1b\x10\x31\xd8\x00\x61\xcf\x53\xf8\xcd\x67\x51\x63\x9a\xba\x4c\x39\x32\xea\x5c\x90\xbf\xa9\x4e\x9f\x01\x72\x0b\xe3\x10\xb3\xf2\x22\x47\x7e\x34\x63\xe3\x3e\xc1\xcf\xdc\xe6\xef\x72\x79\x66\x08\x9b\x0f\x88\x88\x85\x8b\x83\x63\x6c\x31\x17\x05\xf8\xd5\x32\xb9\xe4\x25\xce\x6d\x71\xc2\x12\xa6\x5b\xfa\x34\x3c\x39\xfb\x44\xe7\x3b\x90\xc6\x38\x43\x5e\x6f\xea\xdc\x4f\x27\xa0\xf0\x47\x6a\x39\x98\x5f\xd3\xba\xef\xb1\xd7\x20\x2c\x31\x38\x57\x13\xeb\xa7\x46\x4d\x32\xe0\x49\x31\x57\xda\xff\x33\x1a\x4b\x98\x92\xd7\x1e\x0a\x8f\xbf\x34\xb3\x54\x32\x11\x52\xb6\xfd\x6f\x49\xb5\x9b\xf8\xf1\x93\x8c\x50\x88\xff\x34\xb2\xaf\x43\xdb\x70\xac\x27\x7f\x67\xb0\x54\xc0\xb5\xa8\x53\x2e\xeb\xb5\xd2\x4e\x63\x78\xbe\xba\x92\x5f\x8a\x79\xad\x4f\x60\x39\x97\xb9\x97\xfc\x7a\xe2\x65\xc0\xe3\xfb\x9e\x0a\x5b\xa5\xe8\x96\xf3\x79\xe5\xf5\xd0\x78\x42\x8b\xad\x29\x53\x7e\x39\x2f\x1a\x40\x71\x03\x80\xdc\xda\x40\xdb\x27\x0b\xb3\xaf\x2a\x1b\x1d\x3f\x23\x42\x35\x02\x37\x04\x59\x2a\x52\x99\x15\x40\x5c\x83\x32\x52\x6f\x96\x96\x9f\x4f\x65\x2f\xbb\x5b\xdc\x2f\xd8\x49\x83\x8c\x41\x33\x86\x3a\x21\xb9\x32\xb7\xb9\x6f\x01\x2c\xd3\xf4\x94\x25\x34\x1f\xaa\xa2\x0f\x35\x04\x34\x03\x59\xb7\x1e\xd9\xd2\x53\x79\x08\xdf\x5f\xdc\x5b\x74\x77\x36\x2f\x4a\xa9\x30\x7a\x8a\x9a\x55\x53\x6c\x10\x94\x66\xa9\x4b\x15\x92\x96\x4e\x1b\xf5\x77\x20\x6d\xce\x47\x71\xed\x12\x0b\x89\x23\xea\x6c\x0f\x49\x4b\x89\xb2\xf7\x53\x79\xc7\x18\x8f\x3d\x5c\xfc\x8e\x62\x14\xaf\x80\x62\x40\x6f\x76\xd4\x23\xa4\x9d\x44\x6e\xe8\xb0\xef\x1a\x9e\x54\x4b\x34\x54\xac\x64\x93\x3b\x95\x71\xc1\x12\xe1\xce\xeb\xc4\x5d\x09\xad\xb5\xd9\xeb\x53\xa8\xc4\xd6\xbd\xf0\x2e\x76\x1a\x9b\xd2\x3a\x44\xbb\x8f\x0c\x50\x83\x75\x56\x72\x95\xd4\x71\xa3\x9b\xbd\x0a\x3e\xcd\x24\x38\x39\xbe\xad\xf8\xbe\x71\x2e\xd2\x50\xec\x12\xad\x2a\xf4\x99\x35\x85\x89\x37\xdb\xe7\xcb\xa6\x72\x6f\x47\xdc\x92\xf6\x46\x0e\x15\xc4\xc7\xf4\x6f\x3b\xb6\xdd\x7a\x8d\x16\x14\x42\x00\xcf\xe5\x6e\xac\x48\xd0\x74\xb6\x97\x0b\xa3\x93\x51\x39\xbf\x7d\x6e\xc8\x5b\x69\xf4\x38\xa3\xc1\x61\xde\x4f\xff\xdf\x06\x87\x48\xa3\x51\x52\x8b\xe6\x64\xa6\xb5\xb2\xd5\xf8\x43\x32\x94\xc4\x0f\x97\xc5\x23\x51\x6f\xa9\xa9\x7a\x02\x61\x1f\x30\x42\x23\x83\x95\x95\xbc\x4d\xc3\xdf\xa7\x4b\x79\xc0\x95\x18\xe7\x95\x71\x5d\x59\x5b\x19\xbc\x68\x4c\x7c\xc6\x5a\xb6\x14\xd6\x24\xf2\x70\x65\x1d\xca\x63\x46\x70\x9c\x97\x74\x51\x02\xb8\xee\xb3\xbf\x60\x97\x28\xe0\x8d\x94\xe3\xbf\x99\x89\x81\x77\x67\xf8\x1e\x80\x87\x50\x84\xb4\xa7\x8b\x69\x16\x84\x59\xa8\x70\x7c\x91\x6d\xec\x62\xd2\x41\x1b\x7e\xa3\x7f\xa1\x3e\x67\x95\xd4\xc5\x18\xd4\x14\xbe\x51\xf3\x85\x60\x4e\x1f\x41\xc4\x02\x6a\x68\xf0\xd6\xe7\x61\xec\x5f\x1e\xdf\x86\xee\xbb\x4d\xac\xc4\xfc\x45\xa1\xe1\x05\x21\x23\x60\x24\xe0\xeb\x08\xd6\x0f\x0b\x19\xed\xe8\x21\x11\x4a\xbf\x9f\x12\x55\xea\x18\xd9\xbb\x95\x6f\x56\xc9\xeb\x26\x1f\x5a\x98\x7c\x2c\xac\x98\xb5\x5a\x8f\x1f\x17\x46\x32\xad\x0c\x5b\x1f\x3f\x40\xf9\x57\x59\x07\x77\x4c\xe3\x2f\xca\xa2\xc0\x3e\x06\x8f\xb0\x56\xe4\x8a\x57\x04\x74\xa9\x43\x75\x8e\x1b\x06\xcb\x0d\x36\x03\x8e\xd4\x6c\x26\xd0\x21\xda\x1e\xc3\x19\xd7\x70\x7e\x4c\x87\x24\x24\x4a\x3d\x7a\xe8\x47\xbe\xb6\x7d\x98\xd8\x52\x2d\xa9\x23\x74\x90\x9c\xb1\x33\x02\x02\x49\xfa\x1f\xd8\x71\xf7\x96\x01\x00\xa4\x77\x82\x21\x08\xba\x64\x02\x13\x6c\x92\xd3\x7f\x06\x2c\x4d\xd4\x52\x2b\x44\x00\xda\xeb\x7e\x9b\x2d\x4c\x59\x5d\xbc\xab\xaf\x22\x3d\x81\xee\x95\x6c\x16\x13\x47\x70\xe8\xd2\x0f\xde\xaf\x34\xc8\x55\xa8\xc8\x35\x59\x90\xe8\x57\x57\x5e\x8d\x58\xe9\x5e\xa6\x21\x04\x0b\x53\x16\x65\xdf\x38\x8f\x85\xb8\xad\x25\x9e\x47\xe7\xdf\xac\xaa\x52\x0a\x41\xb8\xca\x22\x37\x44\xb2\xf8\x7b\x39\x30\x22\x9f\x03\x8e\x93\x0d\x5a\x52\x96\xb0\xde\x6c\x9f\xf1\x0d\x3f\xfc\x41\x2b\x9a\x26\x0c\xf7\xe5\x9d\x6d\xd3\x2b\x76\x9c\x0c\x37\xd2\xda\x0a\x27\xa4\x96\x62\xdc\x2b\x0f\xd4\x89\xca\x41\x04\x0b\x99\x65\x9b\x5e\x12\xd8\x1f\xcb\x05\xdf\xdb\x34\x6b\xac\x20\x7d\x44\x56\x78\x03\x6c\x14\x2c\x08\x17\x74\x02\x9a\x60\xff\x1f\x5c\x9f\x4f\x76\xb6\xe4\x3a\x5f\x83\x3c\x2c\x79\xf3\x88\x30\x61\xd9\x1e\xc8\xab\x78\x07\x0b\xb6\xdc\xaa\x40\x42\xae\x64\x14\x11\xdc\x6e\xc1\x0f\x2b\x80\x9e\xd5\x5d\x2d\xb7\x19\xd3\xfd\xe1\x71\x58\x01\x82\xbb\xdc\xa1\x01\xd2\x5e\x24\x86\x29\xd0\x99\x97\x0f\xb1\x3d\xe1\x76\xb2\xd0\x96\x94\x39\xf2\x0d\xad\xd5\x2f\xe7\x0b\x2b\x79\xa8\x53\x22\xb2\xab\x05\x68\x84\x37\xbe\x84\x30\x26\x53\xfa\x23\xde\x90\xda\x24\xaf\xdf\x7a\xea\xe5\x99\xc4\x20\xc6\xc4\x37\xea\x8c\x13\x2e\x72\x0e\xe3\x5d\x77\x11\x7e\xc1\xa5\xcd\x78\xbc\x7a\x31\xa5\x9e\x35\x2c\xa7\xab\xd7\x29\xfd\x18\xb1\x9f\x4f\x15\xbc\x07\x33\x7e\xdb\x37\x67\x3d\xf9\x0a\x56\x6e\x52\x47\xa3\x9a\xcb\x83\x22\xd3\x29\x42\xdc\x65\x14\xd8\x0f\xa7\x5d\x41\x6c\xb6\xda\x0f\x26\xf3\xde\x99\xd3\xaa\x23\x37\xad\x3e\xf6\x87\x3c\x4c\x5f\x24\xfc\x2f\x90\xc1\xde\x7c\xde\xd9\x1c\x22\x1e\xf3\x92\xc2\xa2\x4c\xee\xe8\x73\x34\x13\x27\x4a\x72\x92\xf6\x31\xe2\xea\x51\xa3\x64\x60\x03\xa1\x11\x70\x63\xcd\x5b\x85\x16\xa1\x5b\x2b\x92\x79\xcd\x91\x92\xe4\xfa\x15\x04\xf0\x4b\x14\x2e\xef\x8f\xe7\x1d\xe8\x48\x28\xbb\xaa\x36\xd7\x34\x7e\x03\x95\x19\xa5\x41\xcf\xe0\x92\x82\x8f\xcc\x4f\x7a\xe5\x69\x86\xe1\x61\xcc\xb8\xb3\x66\x0d\x3b\x0a\x9d\xea\xb2\x35\x7b\x5d\x46\x5f\x59\xa2\xae\xfd\x20\x11\x46\x64\xc2\x8d\xa5\xc6\xf0\x1c\x6b\x9b\x24\xaf\xa3\x04\x6b\x24\x06\xf8\x70\x6b\xe7\x5b\xb6\x0d\x6a\xc9\x56\xaa\x17\xee\x8f\xfd\x80\xde\x2a\xf0\x00\x78\x0a\xda\xec\x89\xcc\xba\xa2\xc2\x26\xd8\x64\x00\xc4\x51\xe0\x94\xb0\xb6\x99\x60\x1f\xeb\x3e\xa3\x35\x8c\x2d\x4b\xae\xc0\x87\xdd\x0b\x87\xf2\xe1\xc7\xd6\x9b\x4a\x5f\x22\xa1\x74\x0c\x1c\x36\xf0\x23\x47\x8f\xa3\xcc\x96\xc4\xb3\x5b\x7e\xed\xcb\x4e\x6f\xce\x65\xdc\xd0\xe9\x49\x3e\x23\x97\xf8\x28\xdc\x9f\xee\x48\x88\xfe\x82\x3d\x55\xd5\x1f\x12\xb4\x1e\x3d\xb0\xa6\x3b\x2e\xa6\xa2\x3e\x69\x3f\xcb\x31\xb7\x14\x21\xc6\x89\xde\x0a\xe7\x13\xb7\x01\x8a\x14\xd4\x6c\x7c\x06\x3a\x09\xa3\x01\xe6\xc4\xdd\x39\x83\xa2\x55\x8d\x83\x6d\xe9\xa9\x89\x8d\x4d\x3f\xc8\xed\x41\xb9\xe0\x0b\x12\xd6\x8b\x0b\x1c\x99\xd4\xd6\xe4\x7f\x29\xf1\x14\xc1\x7c\xc7\x88\x97\x10\x54\x4a\x66\x49\x2e\x60\x4f\xb9\xa7\x59\x30\xbb\xc1\x2d\x70\x87\xa6\x0a\x34\x9c\x5e\x69\x86\xc2\x47\x91\x47\xbf\xfb\x6c\x69\xbf\x84\x3a\x0b\x0a\x1f\xd0\x5f\x4f\x2f\x51\x8e\x84\x65\x0f\xb6\xc5\x5d\x33\x52\x87\x4f\x3f\x6e\xa5\x50\x18\xdb\x37\x25\xca\x38\x1a\x4e\x00\x8d\x62\xf0\x97\x88\xf5\x5d\xb1\xf9\xee\x8a\x07\x5f\x95\x58\xc0\xa1\x42\x83\x9f\x7d\x84\xe1\xa7\xd5\xdb\xeb\xcc\x78\x38\x42\xc3\x09\xd0\xb4\xca\x29\x12\xc3\x76\x5a\xe1\x33\xce\x4d\x2a\xf6\x84\x2e\x80\xd3\x72\xbd\xc3\x58\xbe\x20\x16\xcc\xc0\x87\x62\x0a\x9b\x84\x38\xc4\xe2\x38\x44\x45\x3e\x7d\x54\x3c\x2d\x5b\xf4\x5c\x27\xe4\x1d\x79\x98\x3d\xcf\x37\x3c\x1a\x33\x10\x8c\x13\x90\x51\xb3\xe4\xc6\xf4\x69\x7b\x8c\xe5\x33\xa5\x60\xe7\x56\x49\x51\x75\x8a\x24\x64\x29\x7d\xa2\xcb\x5e\xd5\x67\x90\x3e\x07\x86\x47\x42\xa0\x1d\xa5\xff\x83\x0f\x97\x2d\x99\x44\x2c\xb3\xda\x65\x4d\xf6\x58\x8c\x85\x9c\xfb\x6c\xa0\xb0\x09\x3e\x1d\x97\x6d\x1e\xf1\xbe\x92\xee\xd7\x9b\x46\x7a\xbe\x52\x0f\x21\xac\xd1\xe9\x5c\xdf\xdb\x38\xee\x5c\x19\xd8\xfb\x72\x78\x2b\x8c\xbd\x0c\xfb\x1c\xf8\x86\x6e\x18\x13\xfb\xdf\x71\x2c\xe2\x64\x5a\x49\x0e\xa5\x02\xbe\x27\x75\x00\x09\x9c\xae\x12\x95\xf8\x31\x8d\x78\x51\x62\xa7\x91\x84\xcf\xc0\xfc\xcb\xd6\x4c\x5f\xb3\x43\x86\x52\xa9\x8e\x1e\x7b\xb2\xdf\xb6\x7b\xbc\x90\xe9\x8e\x50\xce\x5c\x70\xb5\x33\x00\xcc\x1b\x7a\x5c\x2c\x77\x5a\xff\xed\x53\x63\x63\x28\x41\xa9\x46\x45\x54\x93\xff\xdf\x64\xb8\x8d\xe9\x9a\xce\xf8\xaa\x84\xe9\x0c\x11\xb3\xdd\xd4\x57\xc1\x66\x1e\xe9\x02\x2f\xd1\xa8\x6a\x2c\x98\x49\x58\x7f\x19\x69\xf7\x93\x4d\x4a\x1d\xb4\xc6\x76\x3f\x22\x37\x0a\x44\x5f\x08\xc1\x71\xcc\xff\xc2\x63\x9f\x0f\x75\x99\x37\xb7\x38\x1d\xee\xef\xd5\xef\x85\x82\xc8\x9a\xec\x51\xf8\x3d\x3f\x5c\x2b\xb8\x59\x86\x02\xb0\x52\x46\x3c\x01\x68\x5e\x79\xde\xcd\x8f\x53\xcb\xe3\x77\xae\xb9\x85\x05\xa4\x0a\xc4\x8a\xbe\xb0\xcb\xce\xc6\x96\x93\xcd\x67\x48\xba\x52\x80\x15\xdd\x01\x7a\x6c\x94\x94\x6a\x75\x73\x70\xa4\x2f\x0d\x70\xdf\x07\xf2\x17\xd7\x18\xa5\xe8\x6b\x4e\xe6\x19\x65\x88\x4a\x86\x17\x64\x86\x51\xa8\x90\x3a\xe7\x46\x68\xd8\xc5\xa3\x0a\xcb\x20\xd5\xf9\x7a\xd8\xf5\x08\x58\x04\xd2\x04\x33\x8f\x0a\x20\xc9\x6c\xa8\x19\x51\xbe\x5c\xd7\xe1\xcd\xfb\x73\xec\xd8\x13\xba\x08\x55\x3b\x64\xb6\xdd\x3b\x97\x8a\x66\x87\xb0\xaa\x28\xf8\xdb\xba\xa6\x0b\xf4\x3a\xc4\x61\x34\x08\x3d\x3c\x7a\x1e\x71\xa5\x53\x2a\x91\x60\xa6\xcc\x2d\x2c\x96\xfb\x4b\x5d\xb1\x75\xdf\xdb\xb8\x4e\xc6\x3a\xe1\x98\x06\xa9\xb6\x47\x06\xf2\xa4\xdb\x4c\x2a\x96\x3d\x4a\x0c\x91\x57\x54\x90\x76\x41\x07\x97\xfa\x67\x35\x1d\xf6\x68\xa4\x22\x7b\x19\x83\x6b\x82\x81\xbd\x12\x8f\xb2\x40\x4b\x51\x5e\x22\xcd\x55\x02\x8d\x4a\x87\xb1\x80\xd2\x7f\x07\x27\xe1\x61\xe2\xa2\x11\x71\x99\x38\x7c\x1f\xcd\x5c\x37\x23\xa4\xae\x4a\x2f\xcb\x58\x36\x0a\xf3\x05\xa6\x0b\x86\xc6\x0b\x46\xfb\xb5\x6d\x84\x07\x1b\x62\x8d\xde\x54\x43\xca\x8f\x6c\x28\xde\x6c\xf0\xe9\xbb\x15\xf5\x59\x17\x26\x71\x5f\x3b\x58\x33\xa2\xa0\x24\x58\x52\xd5\xb2\xfa\x81\x83\x28\x9d\xc9\x31\xd1\x7f\x08\x75\x9e\x7c\x52\xbf\xf0\x7d\x2d\x04\x64\x24\xa9\xd1\xac\x93\x3a\x92\xb8\xf3\x04\xe0\x29\x27\x6d\x64\xf6\xa6\x6b\xaf\x32\x18\x1d\xbc\x02\xd2\xfe\xf5\x36\xb5\xb9\x60\xc2\x61\xfc\x4c\xa2\xd4\x18\xe8\x23\x31\x03\x99\x99\x94\xb0\x9e\x64\xa5\xa2\x38\x53\xba\x5d\x84\x44\xf1\x68\x3d\x6b\xe2\xc5\x5b\x01\xd2\x04\x11\xdb\xa6\xaf\x4f\x3b\x61\x7b\xf6\x1d\x69\x9d\xf6\x09\xec\xb5\x38\x55\x72\x25\x05\x62\xba\xbb\x1d\x64\x7d\x78\x9c\x42\x4c\x86\xa9\xc5\x20\x88\x23\xa5\xd7\x9b\xfc\xad\x03\x67\x7a\xfe\x88\x24\xb1\x00\xc9\x7d\x41\x52\xe1\x1f\xcd\x44\xf8\xad\xda\xb0\x7a\x5e\x20\x09\xb2\xea\x7f\x6e\xab\x8e\x4f\xe1\xf5\xcb\xb5\xd1\x85\x6b\x85\x50\x1e\x94\xfd\x4d\xb6\xcc\x87\x51\x8a\xbe\x35\xc7\xeb\x4c\x0f\xb0\xd0\x4c\x68\x67\xf6\x2f\x84\x6d\x35\xa3\x15\x69\x24\x9c\x01\xf8\xbf\x05\x63\x53\x52\x4b\x04\x65\x6a\xd0\x2e\x9c\x97\x4a\xc6\xb0\x72\x1e\x28\x13\xbf\x3a\xa3\x09\x78\x1d\x2b\x4f\x8a\x06\xd5\xbf\xf7\xdc\x93\x59\x18\x2b\x88\xd5\x94\xb4\xef\xfe\x77\x6a\xdb\x5e\x93\x6d\x07\xe2\xe5\x7d\x51\xc0\xd0\xba\xfd\xd3\xb0\x86\x19\xa4\x9e\x99\x2d\x2a\x80\xd1\x5b\xa6\x04\xe7\x05\x6f\xf3\xab\x11\x1c\x7d\xb8\x17\xbc\x9d\xb1\xf7\x32\xe5\x35\x75\x7c\xeb\xa7\xda\x5c\xf7\x2f\x1b\x2b\x7a\xa9\x96\x6b\x2e\x56\x7b\xed\xcd\xa3\x32\x85\xd3\x1a\xc7\xa6\x9b\x89\x2f\x97\x85\x4c\x92\xe9\x9c\x94\xd1\xab\x9d\x94\x24\x6d\x5f\xb8\x3a\x58\xa3\x37\x23\xc6\xa7\x87\x7b\x88\x13\x0c\xa1\xf9\x1b\xfd\xa1\xde\x53\x88\xd7\x45\xc6\xf9\xd6\xaf\x29\x19\x16\xf1\x36\xa0\x1e\xf3\x04\x2a\x71\x01\xea\xda\x68\x61\x3f\x22\x9c\x90\x96\xe9\xf1\x92\x36\x74\x1d\x4b\xc5\xf9\xb1\x1d\xc9\x58\x29\xe0\xb0\xe5\x5d\x1e\xaf\x52\x40\xab\x96\xcb\x35\x80\xdb\x7a\xee\x6a\xf1\x93\x77\x1d\x86\xb3\xc0\x8f\xfd\xf0\xb3\xae\xd3\xb0\xd0\x60\xdc\x4a\x62\x05\x3d\x2c\x45\x37\x8d\xd7\x65\x79\x05\xc3\xf4\x09\xd3\x09\xbc\x77\x49\x84\x0f\x33\xc2\x52\x40\x22\xe8\xad\x87\x75\x2c\xda\x6a\xbb\xd1\x2c\x42\x10\xba\x1b\xfd\x14\x8c\x73\x50\xfe\xba\xb0\x3c\xc1\x81\xb4\x6e\x5e\x99\xb4\x29\xdf\xdc\x17\x68\x82\x58\xa6\x29\x59\x30\xc1\xe7\xc2\xe3\x3d\x66\x0d\xa0\x6f\x5c\xd1\x56\xd9\x90\x43\x00\x35\xee\xa5\x89\xbc\x9f\x63\x8d\xd3\x9d\x40\xdb\x7e\x30\xb7\xff\x67\x0e\x07\xd6\x6d\x49\xf2\x82\x0a\x05\x3a\xde\xba\x7b\x73\x6c\x8e\xf3\x46\x91\x9e\x31\xae\x32\x9d\x09\xd8\x81\x31\xc9\x31\x73\xa9\x50\xdb\x26\x6d\x56\xc6\xfb\x91\x31\xc8\x6a\x68\x7a\x33\x66\x50\x99\xe5\x95\x34\x23\xb7\x58\x72\xfe\xda\xcc\xd2\x58\xc6\xaf\x10\x6c\xc5\xc4\xbd\xda\xe3\x74\xab\x59\x9e\x9c\x8b\x0f\x64\x75\xe3\x5f\xd4\xca\x8a\xd6\x18\xb7\x38\x92\x28\x3a\x7b\xaa\x9a\xe8\x9c\x6c\x37\x55\x36\x4e\x34\xe5\xb8\xbb\x03\x76\xb6\x39\xda\x58\x7d\xb0\x69\xf7\x5f\xce\xae\x90\x0d\x03\xf8\xbd\x57\x2e\xd7\x18\x40\x29\x47\x88\xa7\xbb\x5a\xd6\x88\x87\x75\x45\x78\x87\xd7\xa7\xf0\x14\xc2\xbe\xbb\xca\x0f\x13\xeb\x8b\x26\x46\xb4\x36\x9a\xe5\x77\x3c\x85\x46\x58\x45\x97\x5f\x55\x72\xd2\x5e\xf6\x60\x3f\xda\x21\x78\x14\xd0\x55\x70\x2d\x40\x57\x35\xee\x54\xb6\x15\x29\x53\x07\x7a\x99\x2c\x11\x1c\x26\x08\xd1\xf4\x98\x4e\x3c\xa0\xcd\xc1\xe5\x22\x56\x57\xe8\xcf\x24\xa1\x7d\x35\x6b\xa5\x08\x7a\x5e\x78\xdf\x3e\xc7\x24\x56\x3e\xec\xe6\x6e\xe9\x4b\xb8\xdf\x23\xe4\x09\x8c\x98\xbd\xcc\xbf\x01\xdc\x3a\x8b\xc6\xbd\xa5\x77\x99\x4e\x42\x9e\x88\x2e\x00\x3c\xe7\xc4\x15\x88\x21\x65\x2d\xe8\xe8\x30\xa1\x40\x48\x49\xe9\xa8\x6d\x2c\x8d\x21\x8a\xe1\x6d\x44\xac\xba\xb5\x22\x56\x58\xc3\x87\xce\xa0\x11\xe3\x1b\x4d\x42\xe9\xfd\x96\x2e\x8c\x28\x81\x87\x11\xe4\x6d\xdf\xd7\x5b\x04\x78\x96\x28\x0c\x9b\xe4\xf1\x65\xd2\x48\xf6\x70\x5d\xb5\xfa\xe0\x72\xdc\x09\x5f\xb7\x03\x4d\x01\x2b\x9b\xe7\xd3\x89\xde\x95\x30\x06\x40\x6f\xb7\x4a\x09\x4f\x96\xf5\x28\xc3\xc9\x80\xb5\x91\x29\x23\x47\x2b\x78\x1a\x79\x54\xe2\x8f\x37\xd9\x9b\xee\x7b\xb2\x18\xa3\x4d\xf5\x82\xc4\x58\xf9\x29\xed\xb8\x21\xff\x98\x9b\xc4\x6c\x3e\xa6\xe6\x2b\xbd\xea\x87\xd7\x4b\x21\x84\x2e\xbc\x84\x8a\x25\xdb\xaf\xdf\x10\x74\xd8\x5c\xa5\xae\xb7\x65\xc4\xd8\x2b\x0f\xf5\xa1\x56\x95\x0e\xca\xc5\x5c\x9a\xd1\xa1\xbc\x3b\x57\xe0\xe1\x77\x05\x09\xa9\x7e\x53\x0e\xa2\x96\x86\xf6\xc1\x1a\x94\x2d\x6c\xc1\x0f\x9c\x45\x00\x0a\x04\x3c\x85\xcc\x72\x69\x80\xf0\x28\x9c\x8b\xfd\x0b\x13\x01\x5d\x22\x6f\x99\xba\xbe\x8b\x96\xa1\x36\x85\xbd\x74\xc4\x68\x42\x03\xcf\x31\x45\x3b\xf9\xcf\x10\x91\x21\xe2\x69\x6e\xa4\x09\x4f\xac\x78\x7f\xc4\x1d\x7d\x5f\xd2\x4f\xf9\x39\x19\xd4\x1e\xa0\xf9\xd3\x29\xd1\x83\x91\xf6\x7c\xe6\x62\x80\xfd\xd6\xf4\x6c\xf5\x6f\x1a\xe7\x69\x38\x4d\x45\x35\x0b\x5b\x23\x93\xcd\xf8\x8e\xf5\x15\xe0\x5d\xe9\xa6\x95\xcb\x2a\x31\x3d\xf0\x7b\xe8\x5a\xe0\x37\xfb\x9d\x90\xbb\x1b\x53\x46\x21\x40\x82\x6c\x18\xca\xa1\x98\x70\x54\x3a\xa4\xfc\x7f\x3b\xe7\xd9\xe0\x97\x05\x69\xe5\x68\xbb\x14\xa6\x2c\xfd\x45\x9d\x18\x32\x6b\xfe\xea\xb7\x7e\xf5\xbf\x25\x35\x5b\x48\xbe\x0f\x8b\x3c\xb4\x6b\xb7\xf4\xaa\x21\xb2\x89\xfe\x79\x37\xa7\xf3\x5f\xf1\xe4\x1f\x4c\x82\x24\xdf\x99\xb6\xdc\x77\xdc\x8f\x51\x9b\x37\xbd\x59\x3d\x30\xbe\x2f\x53\xa3\xcd\xff\xc6\x69\x24\x0e\xee\x70\xc2\xf5\x30\x23\x50\xff\x61\x2e\xc2\xa3\x85\xb4\xa7\x20\xec\x96\xc5\x4f\x1c\x7b\xfa\x34\x8e\xf8\x12\x96\xf6\xcf\x5f\xdf\xa8\x1b\x1a\xf4\xfd\xad\xd0\xcc\x45\x22\x09\x1d\x56\x67\x78\xc1\xfc\x64\x81\xc2\x99\x2b\xa1\x19\xcf\xb5\x60\x0a\x04\xf0\x6d\x42\x1d\xec\xf8\x4d\xeb\x2d\xf2\x69\x04\xfb\xe0\xce\x8f\xb1\xaf\x7d\x5e\xcc\x51\xc0\x7e\xdc\x8e\x96\x95\x42\xe4\xc1\xe4\x72\xe6\x7b\x99\x3e\x0d\x80\x05\x3f\x8a\xb8\x44\x8c\x46\x9e\xa8\xdd\x50\xeb\xe1\x41\x66\x18\xb4\x8d\x9f\x50\x7d\x46\xaa\x8c\x62\x92\xf9\x05\xd1\x62\x70\x29\x8f\xb8\x0e\x8b\xd1\xbe\x36\x93\xc9\x2f\xde\x07\xf4\xf3\x42\x60\x44\xff\x3b\xab\x49\xc2\xfb\x3a\x8c\x4d\xb4\x28\xd2\x1b\x34\x60\x13\x6f\xdd\x97\x5a\x16\xa8\x30\x2f\xac\x01\x88\x85\xfa\xb7\x1a\x95\x2c\xff\xb6\x6d\xfa\x1d\x88\x20\xd7\x76\xf2\x44\x14\x25\xc9\x0d\xa4\xc8\x4d\xde\xd0\x56\x39\x6f\xb5\x81\x58\x19\xad\x54\xdb\x77\xfe\xa8\xda\xa0\x16\x2a\x82\x40\xd2\xb9\xe8\x66\x7a\xae\x14\xc3\x8b\xf3\x97\x5b\x63\x9a\x03\x0d\x20\xaa\xb4\x9b\x35\x42\x76\x5e\xd5\x03\xd6\xe2\x2c\x89\xc2\x69\x8d\x25\xc7\xfd\x92\x67\xec\xc4\x12\xff\xae\x7a\xcb\x2c\xc0\xb1\x79\x7e\xff\x46\x04\x73\x84\x57\x55\xdc\xf2\xa9\x52\xcc\x5f\x12\x3f\xc9\xc5\xd5\x68\x20\x5c\x5e\x07\x3e\x3a\xa4\x66\x8d\x8c\xf5\xa2\x42\xda\xe3\x9f\x50\x25\x9f\x75\xf4\xed\xe9\x93\x4f\xc3\x44\xf1\xb6\x18\xe1\xe7\x8b\x39\xc4\xe7\x80\xac\xb5\x55\xb0\x17\xa6\x5f\x96\x13\x4f\xda\x35\x52\x0d\xfe\x3c\x84\x15\x14\xee\x5a\x18\xf8\xa4\x87\x56\x8e\x00\x47\x32\x70\x7c\x03\x6b\xce\xad\xd5\x98\x9e\x74\xb8\xaa\xd8\xee\xe6\xbd\x11\x69\x47\x29\x16\xde\x66\x51\x6e\x06\x78\x7e\xd7\x79\x5e\x8d\x28\xd4\x2e\x31\xb6\x0c\x8a\x8e\x10\xe3\x47\xcb\x04\xba\x6f\x86\x18\xcc\x7e\x67\x35\x41\xb7\xfc\x25\x50\x38\x89\x10\x82\x65\x51\xd5\xe7\xad\xa8\x5d\xfc\xb5\x27\x29\x2e\xf5\xe5\xfb\x86\xd4\xb1\x39\x9d\xb8\x21\x75\xc8\xb0\x27\x71\xe3\x4d\x1c\x4e\xc7\x90\xd6\xe0\xfd\xdb\xe2\x57\x6d\xd3\x46\x09\xa5\xf6\xec\xea\x74\x06\x60\x76\x9d\xe8\x54\xbf\xcb\xd4\x2d\x94\x72\xfa\xc6\x91\xaf\x39\x4f\xb6\x7d\x03\xbf\x14\x2b\x2d\xcf\xf6\xfc\xea\xe8\xf1\xbd\x60\xe7\x44\xc8\xe7\x17\x62\x99\x11\x11\x8d\x8c\x84\x1a\x14\xba\x25\x7a\xc1\x95\xdc\xea\xa5\x61\xfb\x19\x60\x67\xc8\x7e\x4d\x53\x19\xa2\xa8\x1f\xdd\x96\xb1\xb9\x6f\x92\xae\x5f\x0e\x19\xfe\xc1\x64\x98\x1b\x79\x0f\xb5\x33\x3e\x3f\x00\xb5\xc3\x01\x06\xb9\xef\xa7\x89\x7f\xde\xfb\x3c\x9e\xc8\x81\x9b\xd6\xf3\x31\xc8\x72\x8b\x0b\x48\xf5\x88\xc0\x72\xa1\x43\x20\x93\xa7\x30\x1f\x32\x62\xbd\xac\x50\xd0\x39\x24\x68\x2f\xb0\x0e\xc4\x10\xc1\x37\xcc\xc5\x9e\x0b\xdc\xf4\x60\x90\x3e\x9d\x98\xce\xe5\xee\x45\x64\x9b\x68\x81\x32\xc8\xe7\x85\xbe\x59\x47\x73\x8d\x99\x44\x35\x26\x4c\xd2\xf1\x11\x12\x36\x03\x65\xea\x00\x70\xc1\xef\x20\xa0\x8f\x94\x14\xa6\x04\xef\x1c\x43\x88\x99\xc6\x7f\x49\x6e\xf4\x7d\xe4\xf5\xdc\x4b\x06\xd6\x9d\x39\x94\x84\x8f\x0c\xb9\x1a\xbe\x5d\x42\x37\x29\xe8\x59\x6f\xf6\x2c\x44\x22\x92\x1b\x4d\x5b\xbc\xb8\x54\xda\xb4\x8f\x73\x31\x4c\x76\xe6\xf1\x86\x44\xa9\x01\x31', 2) \ No newline at end of file diff --git a/old/ctc/dist/pytransform/__init__.py b/old/ctc/dist/pytransform/__init__.py new file mode 100644 index 0000000..d8f7120 --- /dev/null +++ b/old/ctc/dist/pytransform/__init__.py @@ -0,0 +1,483 @@ +# These module alos are used by protection code, so that protection +# code needn't import anything +import os +import platform +import sys +import struct + +# Because ctypes is new from Python 2.5, so pytransform doesn't work +# before Python 2.5 +# +from ctypes import cdll, c_char, c_char_p, c_int, c_void_p, \ + pythonapi, py_object, PYFUNCTYPE, CFUNCTYPE +from fnmatch import fnmatch + +# +# Support Platforms +# +plat_path = 'platforms' + +plat_table = ( + ('windows', ('windows', 'cygwin*')), + ('darwin', ('darwin',)), + ('ios', ('ios',)), + ('linux', ('linux*',)), + ('freebsd', ('freebsd*', 'openbsd*', 'isilon onefs')), + ('poky', ('poky',)), +) + +arch_table = ( + ('x86', ('i?86', )), + ('x86_64', ('x64', 'x86_64', 'amd64', 'intel')), + ('arm', ('armv5',)), + ('armv6', ('armv6l',)), + ('armv7', ('armv7l',)), + ('ppc64', ('ppc64le',)), + ('mips32', ('mips',)), + ('aarch32', ('aarch32',)), + ('aarch64', ('aarch64', 'arm64')) +) + +# +# Hardware type +# +HT_HARDDISK, HT_IFMAC, HT_IPV4, HT_IPV6, HT_DOMAIN = range(5) + +# +# Global +# +_pytransform = None + + +class PytransformError(Exception): + pass + + +def dllmethod(func): + def wrap(*args, **kwargs): + return func(*args, **kwargs) + return wrap + + +@dllmethod +def version_info(): + prototype = PYFUNCTYPE(py_object) + dlfunc = prototype(('version_info', _pytransform)) + return dlfunc() + + +@dllmethod +def init_pytransform(): + major, minor = sys.version_info[0:2] + # Python2.5 no sys.maxsize but sys.maxint + # bitness = 64 if sys.maxsize > 2**32 else 32 + prototype = PYFUNCTYPE(c_int, c_int, c_int, c_void_p) + init_module = prototype(('init_module', _pytransform)) + ret = init_module(major, minor, pythonapi._handle) + if (ret & 0xF000) == 0x1000: + raise PytransformError('Initialize python wrapper failed (%d)' + % (ret & 0xFFF)) + return ret + + +@dllmethod +def init_runtime(): + prototype = PYFUNCTYPE(c_int, c_int, c_int, c_int, c_int) + _init_runtime = prototype(('init_runtime', _pytransform)) + return _init_runtime(0, 0, 0, 0) + + +@dllmethod +def encrypt_code_object(pubkey, co, flags, suffix=''): + _pytransform.set_option(6, suffix.encode()) + prototype = PYFUNCTYPE(py_object, py_object, py_object, c_int) + dlfunc = prototype(('encrypt_code_object', _pytransform)) + return dlfunc(pubkey, co, flags) + + +@dllmethod +def generate_license_key(prikey, keysize, rcode): + prototype = PYFUNCTYPE(py_object, c_char_p, c_int, c_char_p) + dlfunc = prototype(('generate_license_key', _pytransform)) + return dlfunc(prikey, keysize, rcode) if sys.version_info[0] == 2 \ + else dlfunc(prikey, keysize, rcode.encode()) + + +@dllmethod +def get_registration_code(): + prototype = PYFUNCTYPE(py_object) + dlfunc = prototype(('get_registration_code', _pytransform)) + return dlfunc() + + +@dllmethod +def get_expired_days(): + prototype = PYFUNCTYPE(py_object) + dlfunc = prototype(('get_expired_days', _pytransform)) + return dlfunc() + + +@dllmethod +def clean_obj(obj, kind): + prototype = PYFUNCTYPE(c_int, py_object, c_int) + dlfunc = prototype(('clean_obj', _pytransform)) + return dlfunc(obj, kind) + + +def clean_str(*args): + tdict = { + 'str': 0, + 'bytearray': 1, + 'unicode': 2 + } + for obj in args: + k = tdict.get(type(obj).__name__) + if k is None: + raise RuntimeError('Can not clean object: %s' % obj) + clean_obj(obj, k) + + +def get_hd_info(hdtype, name=None): + if hdtype not in range(HT_DOMAIN + 1): + raise RuntimeError('Invalid parameter hdtype: %s' % hdtype) + size = 256 + t_buf = c_char * size + buf = t_buf() + cname = c_char_p(0 if name is None + else name.encode('utf-8') if hasattr('name', 'encode') + else name) + if (_pytransform.get_hd_info(hdtype, buf, size, cname) == -1): + raise PytransformError('Get hardware information failed') + return buf.value.decode() + + +def show_hd_info(): + return _pytransform.show_hd_info() + + +def assert_armored(*names): + prototype = PYFUNCTYPE(py_object, py_object) + dlfunc = prototype(('assert_armored', _pytransform)) + + def wrapper(func): + def wrap_execute(*args, **kwargs): + dlfunc(names) + return func(*args, **kwargs) + return wrap_execute + return wrapper + + +def check_armored(*names): + try: + prototype = PYFUNCTYPE(py_object, py_object) + prototype(('assert_armored', _pytransform))(names) + return True + except RuntimeError: + return False + + +def get_license_info(): + info = { + 'ISSUER': None, + 'EXPIRED': None, + 'HARDDISK': None, + 'IFMAC': None, + 'IFIPV4': None, + 'DOMAIN': None, + 'DATA': None, + 'CODE': None, + } + rcode = get_registration_code().decode() + if rcode.startswith('*VERSION:'): + index = rcode.find('\n') + info['ISSUER'] = rcode[9:index].split('.')[0].replace('-sn-1.txt', '') + rcode = rcode[index+1:] + + index = 0 + if rcode.startswith('*TIME:'): + from time import ctime + index = rcode.find('\n') + info['EXPIRED'] = ctime(float(rcode[6:index])) + index += 1 + + if rcode[index:].startswith('*FLAGS:'): + index += len('*FLAGS:') + 1 + info['FLAGS'] = ord(rcode[index - 1]) + + prev = None + start = index + for k in ['HARDDISK', 'IFMAC', 'IFIPV4', 'DOMAIN', 'FIXKEY', 'CODE']: + index = rcode.find('*%s:' % k) + if index > -1: + if prev is not None: + info[prev] = rcode[start:index] + prev = k + start = index + len(k) + 2 + info['CODE'] = rcode[start:] + i = info['CODE'].find(';') + if i > 0: + info['DATA'] = info['CODE'][i+1:] + info['CODE'] = info['CODE'][:i] + return info + + +def get_license_code(): + return get_license_info()['CODE'] + + +def get_user_data(): + return get_license_info()['DATA'] + + +def _match_features(patterns, s): + for pat in patterns: + if fnmatch(s, pat): + return True + + +def _gnu_get_libc_version(): + try: + prototype = CFUNCTYPE(c_char_p) + ver = prototype(('gnu_get_libc_version', cdll.LoadLibrary('')))() + return ver.decode().split('.') + except Exception: + pass + + +def format_platform(platid=None): + if platid: + return os.path.normpath(platid) + + plat = platform.system().lower() + mach = platform.machine().lower() + + for alias, platlist in plat_table: + if _match_features(platlist, plat): + plat = alias + break + + if plat == 'linux': + cname, cver = platform.libc_ver() + if cname == 'musl': + plat = 'musl' + elif cname == 'libc': + plat = 'android' + elif cname == 'glibc': + v = _gnu_get_libc_version() + if v and len(v) >= 2 and (int(v[0]) * 100 + int(v[1])) < 214: + plat = 'centos6' + + for alias, archlist in arch_table: + if _match_features(archlist, mach): + mach = alias + break + + if plat == 'windows' and mach == 'x86_64': + bitness = struct.calcsize('P'.encode()) * 8 + if bitness == 32: + mach = 'x86' + + return os.path.join(plat, mach) + + +# Load _pytransform library +def _load_library(path=None, is_runtime=0, platid=None, suffix='', advanced=0): + path = os.path.dirname(__file__) if path is None \ + else os.path.normpath(path) + + plat = platform.system().lower() + for alias, platlist in plat_table: + if _match_features(platlist, plat): + plat = alias + break + + name = '_pytransform' + suffix + if plat == 'linux': + filename = os.path.abspath(os.path.join(path, name + '.so')) + elif plat in ('darwin', 'ios'): + filename = os.path.join(path, name + '.dylib') + elif plat == 'windows': + filename = os.path.join(path, name + '.dll') + elif plat in ('freebsd', 'poky'): + filename = os.path.join(path, name + '.so') + else: + filename = None + + if platid is not None and os.path.isfile(platid): + filename = platid + elif platid is not None or not os.path.exists(filename) or not is_runtime: + libpath = platid if platid is not None and os.path.isabs(platid) else \ + os.path.join(path, plat_path, format_platform(platid)) + filename = os.path.join(libpath, os.path.basename(filename)) + + if filename is None: + raise PytransformError('Platform %s not supported' % plat) + + if not os.path.exists(filename): + raise PytransformError('Could not find "%s"' % filename) + + try: + m = cdll.LoadLibrary(filename) + except Exception as e: + if sys.flags.debug: + print('Load %s failed:\n%s' % (filename, e)) + raise + + # Removed from v4.6.1 + # if plat == 'linux': + # m.set_option(-1, find_library('c').encode()) + + if not os.path.abspath('.') == os.path.abspath(path): + m.set_option(1, path.encode() if sys.version_info[0] == 3 else path) + elif (not is_runtime) and sys.platform.startswith('cygwin'): + path = os.environ['PYARMOR_CYGHOME'] + m.set_option(1, path.encode() if sys.version_info[0] == 3 else path) + + # Required from Python3.6 + m.set_option(2, sys.byteorder.encode()) + + if sys.flags.debug: + m.set_option(3, c_char_p(1)) + m.set_option(4, c_char_p(not is_runtime)) + + # Disable advanced mode by default + m.set_option(5, c_char_p(not advanced)) + + # Set suffix for private package + if suffix: + m.set_option(6, suffix.encode()) + + return m + + +def pyarmor_init(path=None, is_runtime=0, platid=None, suffix='', advanced=0): + global _pytransform + _pytransform = _load_library(path, is_runtime, platid, suffix, advanced) + return init_pytransform() + + +def pyarmor_runtime(path=None, suffix='', advanced=0): + if _pytransform is not None: + return + + try: + pyarmor_init(path, is_runtime=1, suffix=suffix, advanced=advanced) + init_runtime() + except Exception as e: + if sys.flags.debug or hasattr(sys, '_catch_pyarmor'): + raise + sys.stderr.write("%s\n" % str(e)) + sys.exit(1) + + +# ---------------------------------------------------------- +# End of pytransform +# ---------------------------------------------------------- + +# +# Unused +# + + +@dllmethod +def generate_license_file(filename, priname, rcode, start=-1, count=1): + prototype = PYFUNCTYPE(c_int, c_char_p, c_char_p, c_char_p, c_int, c_int) + dlfunc = prototype(('generate_project_license_files', _pytransform)) + return dlfunc(filename.encode(), priname.encode(), rcode.encode(), + start, count) if sys.version_info[0] == 3 \ + else dlfunc(filename, priname, rcode, start, count) + +# +# Not available from v5.6 +# + + +def generate_capsule(licfile): + prikey, pubkey, prolic = _generate_project_capsule() + capkey, newkey = _generate_pytransform_key(licfile, pubkey) + return prikey, pubkey, capkey, newkey, prolic + + +@dllmethod +def _generate_project_capsule(): + prototype = PYFUNCTYPE(py_object) + dlfunc = prototype(('generate_project_capsule', _pytransform)) + return dlfunc() + + +@dllmethod +def _generate_pytransform_key(licfile, pubkey): + prototype = PYFUNCTYPE(py_object, c_char_p, py_object) + dlfunc = prototype(('generate_pytransform_key', _pytransform)) + return dlfunc(licfile.encode() if sys.version_info[0] == 3 else licfile, + pubkey) + + +# +# Deprecated functions from v5.1 +# + + +@dllmethod +def encrypt_project_files(proname, filelist, mode=0): + prototype = PYFUNCTYPE(c_int, c_char_p, py_object, c_int) + dlfunc = prototype(('encrypt_project_files', _pytransform)) + return dlfunc(proname.encode(), filelist, mode) + + +def generate_project_capsule(licfile): + prikey, pubkey, prolic = _generate_project_capsule() + capkey = _encode_capsule_key_file(licfile) + return prikey, pubkey, capkey, prolic + + +@dllmethod +def _encode_capsule_key_file(licfile): + prototype = PYFUNCTYPE(py_object, c_char_p, c_char_p) + dlfunc = prototype(('encode_capsule_key_file', _pytransform)) + return dlfunc(licfile.encode(), None) + + +@dllmethod +def encrypt_files(key, filelist, mode=0): + t_key = c_char * 32 + prototype = PYFUNCTYPE(c_int, t_key, py_object, c_int) + dlfunc = prototype(('encrypt_files', _pytransform)) + return dlfunc(t_key(*key), filelist, mode) + + +@dllmethod +def generate_module_key(pubname, key): + t_key = c_char * 32 + prototype = PYFUNCTYPE(py_object, c_char_p, t_key, c_char_p) + dlfunc = prototype(('generate_module_key', _pytransform)) + return dlfunc(pubname.encode(), t_key(*key), None) + +# +# Compatible for PyArmor v3.0 +# + + +@dllmethod +def old_init_runtime(systrace=0, sysprofile=1, threadtrace=0, threadprofile=1): + '''Only for old version, before PyArmor 3''' + pyarmor_init(is_runtime=1) + prototype = PYFUNCTYPE(c_int, c_int, c_int, c_int, c_int) + _init_runtime = prototype(('init_runtime', _pytransform)) + return _init_runtime(systrace, sysprofile, threadtrace, threadprofile) + + +@dllmethod +def import_module(modname, filename): + '''Only for old version, before PyArmor 3''' + prototype = PYFUNCTYPE(py_object, c_char_p, c_char_p) + _import_module = prototype(('import_module', _pytransform)) + return _import_module(modname.encode(), filename.encode()) + + +@dllmethod +def exec_file(filename): + '''Only for old version, before PyArmor 3''' + prototype = PYFUNCTYPE(c_int, c_char_p) + _exec_file = prototype(('exec_file', _pytransform)) + return _exec_file(filename.encode()) diff --git a/old/ctc/dist/pytransform/_pytransform.so b/old/ctc/dist/pytransform/_pytransform.so new file mode 100755 index 0000000..c5892da Binary files /dev/null and b/old/ctc/dist/pytransform/_pytransform.so differ diff --git a/old/ctc/get_infos.py b/old/ctc/get_infos.py new file mode 100644 index 0000000..08eda02 --- /dev/null +++ b/old/ctc/get_infos.py @@ -0,0 +1,603 @@ +#-*- coding:utf-8 -*- +#!/usr/bin/env python3 +import json +import sys +import signal +import readline +import os + + +c_title = '\033[1;4;31;42m' # title color +c_br = '\033[1;31m' # bold red +c_bg = '\033[1;32m' # bold green +c_by = '\033[1;33m' # bold yellow +c_bb = '\033[1;34m' # bold blue +c_bp = '\033[1;35m' # bold purple +c_bc = '\033[1;36m' # bold cyan +c_bir= '\033[1;3;31m' # * bold italic red +c_bib = '\033[1;3;34m' # * bold italic cyan +c_bic = '\033[1;3;36m' # bold italic cyan +c_e = '\033[0m' # reset + + +def get_parent(parent_log, inp_parent_id): + parent_all = {"dyn_first_parent": "动态一层父", "dyn_first_parent_all": "动态一层父所有", "dyn_first_parent_backups": "动态一层备父", "dyn_second_parent": "动态二层父", "first_parent": "一层父", "first_parent_backups": "一层备父", "pre_first_parent": "预部署一层父", "pre_first_parent_backups": "预部署一层备父", "pre_second_parent": "预部署二层父", "pre_second_parent_backups": "预部署二层备父", "second_parent": "二层父", "second_parent_backups": "二层备父"} + parent_related = {} + with open(parent_log) as obj_parent: + parents=json.loads(obj_parent.read()) + for parent in parents['result']: + if parent['parent_id'] == inp_parent_id: + parent_name = parent['parent_name'] + print(f"父方案: {parent_name}") + for parent_en, parent_cn in parent_all.items(): + if parent[parent_en] != '': + parent_related[parent[parent_en]] = parent_cn + for parent_en, parent_cn in parent_related.items(): + print(f"{parent_cn}: {parent_en}") + break + + +def get_respool(respool_log, inp_template_id, pool_type): + with open(respool_log) as obj_respool: + respools=json.loads(obj_respool.read()) + for respool in respools['result']: + if int(respool['template_id']) == int(inp_template_id): + # print(f"{pool_type}: {respool['template_name']}") + return (f"{pool_type}: {respool['template_name']}") + + +def domain_info_1(domain_info_log, inp_domain): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 判断是否是重叠域名 + multi = len(domain_infos['data']) + if multi == 0: + print(f"{c_br}未找到该域名相关信息,可以登录网页系统查看是否有配置解析组,退出...{c_e}") + sys.exit(205) + overlap = "是" if multi > 1 else "否" + inp_index = 1 + if multi > 1: + print(f"{c_bp}该域名是重叠域名,请确认要查询域名的归属账号: {c_e}") + index = 1 + flag = 0 + # 遍历重叠域名的账号邮箱,需要输入确定的序号 + for domain_info in domain_infos['data']: + print(f"账号{index} - ", end="") + for find_it in domain_info['domains']: + if find_it['domain'] == inp_domain: + pretty_print3(f"账户: {find_it['account_name']}", f"邮箱: {find_it['email']}", f"accid: {find_it['account_id']}") + flag = 0 + break + flag = 1 + + if flag == 1: + print() + flag = 0 + + index += 1 + + print(f"{c_by}请输入要查询域名归属账号的序号(e.g. 1, 2, 3...): {c_e}") + # 验证index是合法输入的逻辑 + inp_index = input() + if inp_index.isdigit() and 1 <= int(inp_index) and int(inp_index) < index: + inp_index = int(inp_index) + else: + print(f"{c_br}请输入正确的序号,{c_e}", end="") + sys.exit(200) + inp_index -= 1 + inp_index = inp_index if inp_index != 0 else 0 + common_cname = len(domain_infos['data'][inp_index]['domains']) + for find_it in range(common_cname): + if domain_infos['data'][inp_index]['domains'][find_it]['domain'] == inp_domain: + break + + common_cname = '是' if common_cname > 1 else '否' + common_cnames = [] + for domain in domain_infos['data'][inp_index]['domains']: + common_cnames.append(domain['domain']) + + account = domain_infos['data'][inp_index]['domains'][find_it]['account_name'] + account_id = domain_infos['data'][inp_index]['domains'][find_it]['account_id'] + access_id = domain_infos['data'][inp_index]['domains'][find_it]['access_id'] + email = domain_infos['data'][inp_index]['domains'][find_it]['email'] + cname = domain_infos['data'][inp_index]['cname'] + cname_vendor = domain_infos['data'][inp_index]['access_vendor_cname'] + parse_group = domain_infos['data'][inp_index]['parse_group_name'] + + with open("info.log", 'w', encoding='utf-8') as obj_info: + obj_info.write(f"1:{account}\n") + obj_info.write(f"2:{email}\n") + obj_info.write(f"3:{account_id}\n") + obj_info.write(f"4:{access_id}\n") + + pretty_print3(f"账户: {account}", f"邮箱: {email}", f"accId: {account_id}") + pretty_print3(f"Map: {parse_group}", f"accessId: {access_id}", f"重叠域名: {overlap}") + pretty_print3(f"合作方: {cname_vendor}", f"CNAME: {cname}", f"是否共享CNAME缓存: {common_cname}") + if common_cname == '是': + print(f"共享CNAME缓存域名列表: {common_cnames}") + if parse_group == '': + sys.exit(201) + + +def domain_info_2(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 遍历账号名称相同的客户,使用request id确定唯一的信息 + for acc_info in domain_infos['results']['items']: + if acc_info['accountId'] == inp_accid: + managerArea = acc_info['managerArea'] + platformVipLevel = acc_info['platformVipLevel'] + businessLevel = acc_info['businessLevel'] + ctYunVipLevel = acc_info['ctYunVipLevel'] + clientId = acc_info['clientId'] + accountType = acc_info['accountType'] + clientInsideName = acc_info['clientInsideName'] + maintainAfterName = acc_info['maintainAfterName'] + maintainAfterPhone = acc_info['maintainAfterPhone'] + maintainAfterEmail = acc_info['maintainAfterEmail'] + managerVendor = acc_info['managerVendor'] + + pretty_print3(f"售后姓名: {maintainAfterName}", f"售后电话: {maintainAfterPhone}", f"售后邮箱: {maintainAfterEmail}") + pretty_print3(f"天翼云VIP等级: {ctYunVipLevel}", f"平台VIP等级: {platformVipLevel}", f"客户VIP等级: {businessLevel}") + pretty_print3(f"clientId: {clientId}", f"客户内部名称: {clientInsideName}", f"商务渠道: {managerArea}") + pretty_print2(f"承载平台: {managerVendor}", f"客户类型: {accountType}") + break + + +def domain_info_3(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 遍历重叠域名,使用request id确定唯一的信息 + for domain_info in domain_infos['data']['results']: + if domain_info['accountId'] == inp_accid: + statusName = domain_info['statusName'] + ipv6Switch = domain_info['ipv6Switch'] + productName = domain_info['productName'] + innerTestDomain = domain_info['innerTestDomain'] + ipv6Switch = '是' if ipv6Switch == 1 else '否' + innerTestDomain = '是' if innerTestDomain == 1 else '否' + + pretty_print2(f"域名状态: {statusName}", f"是否开启IPv6: {ipv6Switch}") + pretty_print2(f"是否内部测试域名: {innerTestDomain}", f"产品类型: {productName}") + break + + +def domain_info_4(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 遍历账号名称相同的客户,使用request id确定唯一的信息 + if len(domain_infos['result']) == 0: + sys.exit(204) + else: + for domain_info in domain_infos['result']: + if domain_info['account_id'] == inp_accid: + # 1. 回源地址 + origin = [] + for ori in domain_info['origin']: + origin.append(ori['role'] + ':' + ori['origin']) + # 2. 访问协议 + 端口 + http_visit = domain_info['basic_conf']['http_server_port'] if domain_info['http_status'] == 'on' else 'X' + https_visit = domain_info['basic_conf']['https_server_port'] if domain_info['https_status'] == 'on' else 'X' + url_visit = str(http_visit) + '/' + str(https_visit) + # 3. 回源协议 + 端口 + https_origin = str(domain_info['basic_conf']['https_origin_port']) + http_origin = str(domain_info['basic_conf']['http_origin_port']) + if domain_info['backorigin_protocol'] == 'follow_request': + url_origin = http_origin + '/' + https_origin + elif domain_info['backorigin_protocol'] == 'http': + url_origin = http_origin + '/X' + elif domain_info['backorigin_protocol'] == 'https': + url_origin = 'X/' + https_origin + else: + print("回源协议除了http/https/follow_request之外,还有第四种方式,请补充...") + sys.exit(201) + # 4. 证书备注名 + cert_name = domain_info['cert_name'] + + + # 6. 预部署资源池 + pre_node_list = domain_info['pre_node_list'] + off_pool = get_respool("respool.log", pre_node_list, '预部署资源池') + # 7. 全局资源池 + node_list = domain_info['node_list'] + on_pool = get_respool("respool.log", node_list, '全局资源池') + # 8. 是否热加载 + conf_order_id = domain_info['conf_order_id'] + conf_order_id = '否' if conf_order_id == -1 else '是' + + pretty_print2(f"证书备注名: {cert_name}", f"热加载: {conf_order_id}") + pretty_print2(off_pool, on_pool) + print(f"回源地址: {origin}") + print(f"http/https访问: {url_visit}") + print(f"http/https回源: {url_origin}") + + # 5. 父方案 parent_id + parent_id = domain_info['parent_id'] + get_parent("parent.log", parent_id) + break + + +def domain_info_5(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 遍历账号名称相同的客户,使用request id确定唯一的信息 + for domain_info in domain_infos['result']: + if domain_info['account_id'] == inp_accid: + with open("info.log", 'w', encoding='utf-8') as obj_info: + obj_info.write(f"4:{domain_info['domain_id']}\n") + break + +# 如下accid没用到 +def domain_info_6(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_info=json.loads(obj_domain_info.read())['result'] + + # 推拉流模式 + push_stream_domain = '' + pull_stream_mode = domain_info['base_conf']['pull_stream_mode'] + if pull_stream_mode == 0: + pull_stream_mode = "直播拉流(推拉流)" + push_stream_domain = domain_info['base_conf']['push_stream_domain'] + elif pull_stream_mode == 1: + pull_stream_mode = "直播拉流(回源拉流)" + else: + pull_stream_mode = "直播推流" + + + # 证书备注名 + if domain_info['protocol_control']['https_switch'] == 1: + cert_name = domain_info['protocol_control']['cert_name'] + else: + cert_name = '无绑定证书' + + pretty_print3(f"推拉流模式: {pull_stream_mode}", f"推流域名: {push_stream_domain}", f"证书备注名: {cert_name}") + + # 预部署资源池 + pre_node_list = domain_info['pre_resouce_id'] + off_pool = get_respool("respool.log", pre_node_list, '预部署资源池') + # 全局资源池 + node_list = domain_info['resouce_id'] + on_pool = get_respool("respool.log", node_list, '全局资源池') + + pretty_print2(off_pool, on_pool) + + # 回源模式 + origin_mode = domain_info['base_conf']['origin_mode'] + for mode in origin_mode: + print(f"回源模式: {mode}") + mode_desc = domain_info['base_conf'][f'{mode}_origin'] + for ori in mode_desc: + for k, v in ori.items(): + if v != '': + print(f"{k}: {v}") + # 父方案 parent_id + parent_id = domain_info['parent_id'] + get_parent("parent.log", parent_id) + + +def domain_map_info(domain_map_log, flg): + with open(domain_map_log) as obj_domain_map_log: + map_info=json.loads(obj_domain_map_log.read()) + # 判断是否是重叠域名 + parse_detail=map_info['parse_detail'] + if int(flg) == 0: + print('------------------------------分区域解析------------------------------') + for item in parse_detail: + pretty_print3(item['area_cnname'], item['type'], item['value'], 1) + # write to file here + + print('----------------------------------------------------------------------') + else: + with open('map.log', 'w') as obj_map_log: + for item in parse_detail: + obj_map_log.write(f"{item['value']}\n") + + +def map_info(map_info_log, inp_domain): + with open(map_info_log) as obj_map_info: + map_infos=json.loads(obj_map_info.read()) + # 判断是否是重叠域名 + multi = len(map_infos['data']) + if multi == 0: + print(f"{c_br}未找到该域名相关信息,可以登录网页系统查看是否有配置解析组,退出...{c_e}") + sys.exit(205) + + inp_index = 1 + if multi > 1: + print(f"{c_bp}该域名是重叠域名,请确认要查询域名的归属账号: {c_e}") + index = 1 + flag = 0 + # 遍历重叠域名的账号邮箱,需要输入确定的序号 + for map_info in map_infos['data']: + print(f"账号{index} - ", end="") + for find_it in map_info['domains']: + if find_it['domain'] == inp_domain: + pretty_print3(f"账户: {find_it['account_name']}", f"邮箱: {find_it['email']}", f"accid: {find_it['account_id']}") + flag = 0 + break + flag = 1 + + if flag == 1: + print() + flag = 0 + + index += 1 + print(f"{c_by}请输入要查询域名归属账号的序号(e.g. 1, 2, 3...): {c_e}") + # 验证index是合法输入的逻辑 + inp_index = input() + if inp_index.isdigit() and 1 <= int(inp_index) and int(inp_index) < index: + inp_index = int(inp_index) + else: + print(f"{c_br}请输入正确的序号,{c_e}", end="") + sys.exit(200) + inp_index -= 1 + inp_index = inp_index if inp_index != 0 else 0 + parse_group = map_infos['data'][inp_index]['parse_group_name'] + common_cname = len(map_infos['data'][inp_index]['domains']) + for find_it in range(common_cname): + if map_infos['data'][inp_index]['domains'][find_it]['domain'] == inp_domain: + break + account_id = map_infos['data'][inp_index]['domains'][find_it]['account_id'] + access_id = map_infos['data'][inp_index]['domains'][find_it]['access_id'] + with open("info.log", 'w', encoding='utf-8') as obj_info: + obj_info.write(f"3:{account_id}\n") + obj_info.write(f"4:{access_id}\n") + if parse_group != '': + with open("map.log", 'w', encoding='utf-8') as obj_map: + obj_map.write(f"{parse_group}\n") + else: + sys.exit(201) + + + +def domain_config_cdn(domain_info_log, inp_accid, domain): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 遍历账号名称相同的客户,使用request id确定唯一的信息 + if len(domain_infos['result']) == 0: + sys.exit(204) + else: + for domain_info in domain_infos['result']: + config_json = json.dumps(domain_info) + os.environ['config_json'] = config_json + os.environ['domain_json'] = domain + if domain_info['account_id'] == inp_accid: + os.system("echo $config_json > $domain_json") + break + + +def domain_config_live(domain_info_log, domain): + with open(domain_info_log) as obj_domain_info: + domain_info=json.loads(obj_domain_info.read())['result'] + config_json = json.dumps(domain_info) + os.environ['config_json'] = config_json + os.environ['domain_json'] = domain + os.system("echo $config_json > $domain_json") + + + +def parent_info_4(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 遍历账号名称相同的客户,使用request id确定唯一的信息 + if len(domain_infos['result']) == 0: + sys.exit(204) + else: + for domain_info in domain_infos['result']: + if domain_info['account_id'] == inp_accid: + # 5. 父方案 parent_id + parent_id = domain_info['parent_id'] + get_parent_info("parent.log", parent_id) + break + + +def parent_info_5(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_infos=json.loads(obj_domain_info.read()) + # 遍历账号名称相同的客户,使用request id确定唯一的信息 + for domain_info in domain_infos['result']: + if domain_info['account_id'] == inp_accid: + with open("info.log", 'w', encoding='utf-8') as obj_info: + obj_info.write(f"2:{domain_info['domain_id']}\n") + break + +# 如下accid没用到 +def parent_info_6(domain_info_log, inp_accid): + with open(domain_info_log) as obj_domain_info: + domain_info=json.loads(obj_domain_info.read())['result'] + # 父方案 parent_id + parent_id = domain_info['parent_id'] + get_parent_info("parent.log", parent_id) + + + +def get_parent_info(parent_log, inp_parent_id): + parent_all = ["dyn_first_parent", "dyn_first_parent_all", "dyn_first_parent_backups", "dyn_second_parent", "first_parent", "first_parent_backups", "pre_first_parent", "pre_first_parent_backups", "pre_second_parent", "pre_second_parent_backups", "second_parent", "second_parent_backups"] + parent_related = {} + with open(parent_log) as obj_parent: + parents=json.loads(obj_parent.read()) + for parent in parents['result']: + if parent['parent_id'] == inp_parent_id: + parent_name = parent['parent_name'] + index = 1 + for parent_en in parent_all: + if parent[parent_en] != '': + with open("cmap", 'a', encoding='utf-8') as obj_cmap: + obj_cmap.write(f"{index}. {parent[parent_en]}\n") + index += 1 + break + + +def quit(signum, frame): + print("Bye!") + sys.exit(205) + +def pretty_print2(col_1, col_2): + len_1 = len(col_1) + len_2 = len(col_2) + len_1_utf8 = len(col_1.encode('utf-8')) + len_2_utf8 = len(col_2.encode('utf-8')) + size_1 = 48 - int((len_1_utf8 - len_1) / 2) + size_2 = 40 - int((len_2_utf8 - len_2) / 2) + print(f"%-{size_1}s%-{size_2}s" % (col_1, col_2)) + + +def pretty_print3(col_1, col_2, col_3, col_4=0): + len_1 = len(col_1) + len_2 = len(col_2) + len_3 = len(col_3) + len_1_utf8 = len(col_1.encode('utf-8')) + len_2_utf8 = len(col_2.encode('utf-8')) + len_3_utf8 = len(col_3.encode('utf-8')) + size_1 = 48- int((len_1_utf8 - len_1) / 2) + size_2 = 40 - int((len_2_utf8 - len_2) / 2) + size_3 = 30 - int((len_2_utf8 - len_2) / 2) + if col_4 == 0: + print(f"%-{size_1}s%-{size_2}s%-{size_3}s" % (col_1, col_2, col_3)) + else: + size_1 = 16- int((len_1_utf8 - len_1) / 2) + size_2 = 10 - int((len_2_utf8 - len_2) / 2) + size_3 = 60 - int((len_2_utf8 - len_2) / 2) + print(f"%-{size_1}s%-{size_2}s%-{size_3}s" % (col_1, col_2, col_3)) + +def pretty_print_data(width: list, cols: list): + for i in range(len(cols)): + len_text = len(cols[i]) + len_utf8 = len(cols[i].encode('utf-8')) + len_size = width[i] - int((len_utf8 - len_text) / 2) + if i == 8: + if float(cols[i]) < 10: + print(f"{c_br}%-{len_size}s{c_e}" % (cols[i]), end='') + elif float(cols[i]) < 30: + print(f"{c_by}%-{len_size}s{c_e}" % (cols[i]), end='') + else: + print(f"{c_bg}%-{len_size}s{c_e}" % (cols[i]), end='') + else: + print(f"%-{len_size}s" % (cols[i]), end='') + print() + +def pretty_print_title(width: list, cols: list): + for i in range(len(cols)): + len_text = len(cols[i]) + len_utf8 = len(cols[i].encode('utf-8')) + len_size = width[i] - int((len_utf8 - len_text) / 2) + print(f"{c_title}%-{len_size}s{c_e}" % (cols[i]), end='') + print() + +def fmt_print_global(res_map): + title = ["大区", "省份", "View", "组名", "VIP", "LAKE", "压测带宽", "实时带宽", "冗余带宽", "昨晚高峰", "昨中高峰"] + width = [5, 18, 25, 25, 32, 15, 10, 10, 10, 10, 10] + pretty_print_title(width, title) + with open(res_map) as obj_res_map: + lines = obj_res_map.readlines() + count = 1 + for line in lines: + pretty_print_data(width, line.strip().split()) + count += 1 + if count % 25 == 0: + pretty_print_title(width, title) + +def fmt_print_partial(res_map, view, query, domain, domain_map): + + if os.path.getsize(view): + title = ["大区", "省份", "View", "组名", "VIP", "LAKE", "压测带宽", "实时带宽", "冗余带宽", "昨晚高峰", "昨中高峰"] + width = [5, 18, 25, 25, 32, 15, 10, 10, 10, 10, 10] + pretty_print_title(width, title) + + with open(res_map) as obj_res_map, open(view) as obj_view: + views = obj_view.readlines() + lines = obj_res_map.readlines() + count = 1 + for view_s in views: + for line in lines: + c_line = line.strip().split() + if c_line[2] == view_s.strip(): + pretty_print_data(width, c_line) + count += 1 + if count % 25 == 0: + pretty_print_title(width, title) + if count == 1: + print(f"{c_br}域名{domain}的解析组{domain_map}中,不存在{query}地区的覆盖节点,请确认。{c_e}\n") + sys.exit(206) + else: + print(f"{c_br}请按照规则,输入正确的查询条件,退出...{c_e}") + sys.exit(202) + + + + + +def main(): + + option = sys.argv[1] + + if option == '--domain_info_1': + domain_info_log = sys.argv[2] + inp_domain = sys.argv[3] + domain_info_1(domain_info_log, inp_domain) + elif option == '--domain_info_2': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + domain_info_2(domain_info_log, inp_accid) + elif option == '--domain_info_3': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + domain_info_3(domain_info_log, inp_accid) + elif option == '--domain_info_4': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + domain_info_4(domain_info_log, inp_accid) + elif option == '--domain_info_5': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + domain_info_5(domain_info_log, inp_accid) + elif option == '--domain_info_6': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + domain_info_6(domain_info_log, inp_accid) + elif option == '--domain_map_info': + domain_map_log = sys.argv[2] + flg = sys.argv[3] + domain_map_info(domain_map_log, flg) + elif option == '--map_info': + map_info_log = sys.argv[2] + inp_accid = sys.argv[3] + map_info(map_info_log, inp_accid) + elif option == '--format-global': + res_map = sys.argv[2] + fmt_print_global(res_map) + elif option == '--format-partial': + query = sys.argv[2] + view = sys.argv[3] + res_map = sys.argv[4] + domain = sys.argv[5] + domain_map = sys.argv[6] + fmt_print_partial(res_map, view, query, domain, domain_map) + elif option == '--domain_config_cdn': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + domain = sys.argv[4] + domain_config_cdn(domain_info_log, inp_accid, domain) + elif option == '--domain_config_live': + domain_info_log = sys.argv[2] + domain = sys.argv[3] + domain_config_live(domain_info_log, domain) + elif option == '--parent_info_4': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + parent_info_4(domain_info_log, inp_accid) + elif option == '--parent_info_5': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + parent_info_5(domain_info_log, inp_accid) + elif option == '--parent_info_6': + domain_info_log = sys.argv[2] + inp_accid = sys.argv[3] + parent_info_6(domain_info_log, inp_accid) + + +if __name__ == "__main__": + signal.signal(signal.SIGINT, quit) + main() + + diff --git a/old/ctc/group_chatbot_xibei.sh b/old/ctc/group_chatbot_xibei.sh new file mode 100644 index 0000000..d533f9f --- /dev/null +++ b/old/ctc/group_chatbot_xibei.sh @@ -0,0 +1,55 @@ +#!/bin/bash +#=================================================================== +# Filename : group_chatbot_xibei.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2023-05-12 08:59 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + +function sendMsg() { + + # 个人测试 + # curl 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' \ + # -H 'Content-Type: application/json' \ + # -d ' + # { + # "msgtype": "markdown", + # "markdown": { + # "content": "**'"$alarmTitle"'**\n + # > '"$alarmInfo"'" + # } + # }' > /dev/null 2>&1 + + # 群hook + curl 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=5c5f18f1-8494-4f42-b7f1-9ef7295b0578' \ + -H 'Content-Type: application/json' \ + -d ' + { + "msgtype": "markdown", + "markdown": { + "content": "**'"$alarmTitle"'**\n + > '"$alarmInfo"'" + } + }' > /dev/null 2>&1 +} + + +time_opt=$1 +alarmTitle="周报提醒" + +if [[ $time_opt == '1' ]]; then + alarmInfo='周四了,请各位及时填写周报~~' +elif [[ $time_opt == '2' ]]; then + alarmInfo='记得写周报,记得写周报,记得写周报~~' +else + : +fi + +sendMsg $alarmTitle $alarmInfo diff --git a/old/ctc/ids.sh b/old/ctc/ids.sh new file mode 100644 index 0000000..8e869eb --- /dev/null +++ b/old/ctc/ids.sh @@ -0,0 +1,1151 @@ +#!/bin/bash +# 功能实现:根据requestID以及IP,获得完整的链路日志信息 +# 依赖文件:ips.sh +# 存在问题: + +# 静态-点播-直播 +# 1. 10:00:04 - 10:15:33 - 10:29:01 - 10:30:01 - 10:45:28 - 10:59:55 +# 2. 一个小时前 | 两个小时前 | 三个小时前 | 四个小时前 | 八个小时前 | 两天前 +# 3. 00:00:00前后 + +# 各个层级HIT/MISS情况 +# - MISS MISS | MISS HIT | HIT MISS | HIT HIT + +# 直播特殊验证项 +# 拉流-合并回源 | 推流 | 转推 | 转码 + +#======================================================================================= +# 功能:捕获 Ctrl + C 将后台进程全部终止 +# 入参:bg_pids, progress_pid +# 出参:None +function onCtrlC () { + exec 3>&2 # 3 is now a copy of 2 + exec 2> /dev/null # 2 now points to /dev/null + kill ${bg_pids} ${progress_pid} >/dev/null 2>&1 + sleep 1 # sleep to wait for process to die + exec 2>&3 # restore stderr to saved + exec 3>&- # close saved version + echo -e "${c_bir}IDS!\n${c_e}" + echo -e "${c_bir}[IDS-100] Ctrl+C is captured, exiting...\n${c_e}" + exit 100 +} + +#======================================================================================= +# 功能:捕获 `exit` 退出指令,并计算脚本实际运行时间 +# 入参:TS +# 出参:None +function onExit () { + local te=`date +%s` + echo -e "${c_bib}Start Time: $(date -d@$((ts-0)) +'%Y-%m-%d %H:%M:%S')" + echo -e "${c_bib}End Time : `date +'%Y-%m-%d %H:%M:%S'`" + echo -e "${c_bib}Duration : $((te-ts)) seconds\n${c_e}" +} + +#======================================================================================= +# 功能:检查输入的时间范围是否符合格式要求:14天内,不能是未来时间,10位数字 +# 入参:time_range +# 出参:current, year, month, day, hour, time_range +function time_check() { + # 如果入参 time_range 的值是空,或者说函数没有入参 + if [[ $time_range == '' ]]; then + time_range=`date +%Y%m%d%H` + year=${time_range:0:4} + month=${time_range:4:2} + day=${time_range:6:2} + hour=${time_range:8:2} + current='yes' + return 0 + fi + + # 检查入参是否正确:长度,表示的时间范围等 + [[ ! $time_range =~ ^[0-9]{10}$ ]] && { echo -e "${c_br}[IDS-101] 请输入正确的时间格式,退出...\n${c_e}"; exit 101; } + # 验证入参是10天以内的时间范围 + now=`date +%s` + # 准备工作,后续要用 + year=${time_range:0:4} + month=${time_range:4:2} + day=${time_range:6:2} + hour=${time_range:8:2} + # 将入参转换为秒 + previous=`date -d "$year-$month-$day $hour:00:00" +"%s"` + # 计算当前时间 - 入参时间 + let range_s=now-previous + let range_d=range_s/86400 + # 如果是14天以外的入参时间,则不可查 + [[ $range_d -gt 10 ]] && { echo -e "${c_br}[IDS-102] 只能查找最近10天以内的日志记录,退出...\n${c_e}"; exit 102; } + # 判断 time_range 是否是当前时间,并用 current 来标识,默认是当前,即 current = yes + [[ $time_range == `date +%Y%m%d%H` ]] && current='yes' || current='no' +} + +#======================================================================================= +# 功能:等待后台进程结束时,输出进度条 +# 入参:bg_pids, level +# 出参:None +function progress() { + length=75 + ratio=1 + # ps -p pidlist命令的作用是列出pidlist里面所有pid的运行状态,已经结束的pid将不会被列出,每个pid一行 + while [[ "$(ps -p ${bg_pids} | wc -l)" -ne 1 ]]; do + mark='>' + progress_bar='' + # 小于ratio的部分填充'>',大于ratio的部分,填充' ',必须是空格,不然ratio重新变成1的时候,没有变化 + for i in $(seq 1 $length); do + if [[ $i -gt $ratio ]]; then + mark=' ' + fi + progress_bar="${progress_bar}${mark}" + done + echo -ne "${c_bic}Collecting $level Data: ${progress_bar}\r${c_e}" + ratio=$((ratio+1)) + if [[ $ratio -gt $length ]]; then + ratio=1 + fi + sleep 1 + done +} + +#======================================================================================= +# 功能:适用于旧版本的reqID,获取$label_en和$time_range +# 入参:None +# 出参:time_range, label_en, edge +function node_time_old() { + echo -e "${c_bg}请输入request ID所在节点信息,支持边缘RIP/VIP/中英文节点名:${c_e}" + # 60s时间接收输入:要查询的节点 + read -t 60 edge + # 判断60s内无输入,则自动退出 + [[ $? -ne 0 ]] && { echo -e "${c_br}[IDS-103] 60s内无任何输入,退出...\n${c_e}"; exit 103; } + # 判断输入的边缘节点信息是空,则自动退出 + [[ $edge == '' ]] && { echo -e "${c_br}[IDS-104] 请输入正确的边缘节点信息,退出...\n${c_e}"; exit 104; } + + # 判断边缘节点信息是否在天翼平台 + ips $edge > ips.log 2>&1 + [[ $? -ne 0 ]] && { cat ips.log; echo -e "${c_br}[IDS-105]${c_e}"; exit 105; } || cd $trash + label_en=`cat ips.log | grep -Eo '(ct|cu|cm|bgp|ctbgp|cmbgp|cubgp|as|eu|sa|na|cbn|cern)_[a-z]{2,3}_[a-z]{2,20}[0-9]{1,2}_(c|e|n)[0-9]{0,2}' | sort | uniq` + # 输入中文节点名的情况下,可能会得到两个边缘节点,需要手动确认是哪个 + number=`echo $label_en | awk '{print NF}'` + [[ $label_en == '' ]] && { echo -e "${c_br}[IDS-106] 请确认输入的 $edge 是边缘节点,退出...\n${c_e}"; exit 106; } + if [[ $number -gt 1 ]]; then + echo $label_en | awk '{for(i=1;i<=NF;i++) print " -", $i}' + echo -e "${c_bp}输入的 $edge 边缘节点中有两个组,请确认具体是哪个:${c_e}" + read -t 60 label_en_input + # 判断60s内无输入,则自动退出 + [[ $? -ne 0 ]] && { echo -e "${c_br}[IDS-107] 60s内无任何输入,退出...\n${c_e}"; exit 107; } + # 判断输入信息是否是正确的 + echo $label_en | grep -wq $label_en_input + [[ $? -ne 0 ]] && { echo -e "${c_br}[IDS-108] 需要从如上选择正确的边缘节点信息,请重新运行,退出...\n${c_e}"; exit 108; } + label_en=$label_en_input + fi + + # 60s时间接收输入:要查询的时间 + echo -e "${c_bg}请输入要查询的reqID生成时间,格式为yyyymmddHH(默认当前 - $(date +%Y%m%d%H)): ${c_e} " + read -t 60 time_range + [[ $? -ne 0 ]] && { echo -e "${c_br}[IDS-109] 60s内无任何输入,退出...\n${c_e}"; exit 109; } +} + + +#======================================================================================= +# 功能:适用于新版本的reqID,获取label_en和time_range +# 入参:None +# 出参:time_range, label_en, edge +function node_time_new() { + ts_hex=`echo $req_id | awk -F '_' '{print $1}'` + ts_hex=`printf "%d" "0x$ts_hex"` + time_range=`date -d @$ts_hex +'%Y%m%d%H'` + ts=`date -d @$ts_hex +'%Y-%m-%d %H:%M:%S'` + + # 根据中间部分的信息找出label_en + edge=`echo $req_id | awk -F '_' '{print $2}'` + edge=`echo $edge | awk -F '-' '{print $1"-"$2"-ca"$3}'` + ping -c 4 -q $edge > ping.log + if [[ $? -eq 0 ]]; then + rip=`cat ping.log | grep -Eo "([0-9]{1,3}\.){3}[0-9]{1,3}"` + # 获取$label_en + ips $rip > ips.log 2>&1 + [[ $? -ne 0 ]] && { cat ips.log; echo -e "${c_br}[IDS-110]${c_e}"; exit 110; } || cd $trash + label_en=`cat ips.log | grep -Eo '(ct|cu|cm|bgp|ctbgp|cmbgp|cubgp|as|eu|sa|na|cbn|cern)_[a-z]{2,3}_[a-z]{2,20}[0-9]{1,2}_(e)[0-9]{0,2}' | sort | uniq` + else + echo -e "${c_br}[IDS-111] 本地到边缘缓存主机$edge的网络无法ping通,需要检查下服务器是否宕机,或者检查下${edge}是否是天翼的节点...${c_e}" + exit 111 + fi + echo -e "${c_bib}将在$rip($label_en)上查找$ts时间点的日志,如果时间靠近${c_bir}整点${c_bib},有可能会因日志切割导致无法正确查询...${c_e}" +} + +#======================================================================================= +# 功能:获取 prod_type - 产品类型 +# 入参:None +# 出参:TS, prod_type +function prod_type_inp() { + # 60s时间接收输入:要查询的类型 + echo -e "1. 静态/下载/点播/全站(default - v03/ov06)" + echo -e "2. 直播(ACC1/2/3)" + echo -e "3. 安全" + echo -e "4. quic" + echo -e "5. L1/L2/L3/L4/L5/L6/L7(e.g. 查询L3,则输入5.3)" + echo -e "${c_bg}请输入要查询的业务类型(目前仅支持1/2/5): ${c_e}" + read -t 60 prod_type + [[ $? -ne 0 ]] && { echo -e "${c_br}[IDS-112] 60s内无任何输入,退出...\n${c_e}"; exit 112; } + [[ $prod_type == '' ]] && prod_type='1' + ts=`date +%s` # 开始计时 +} + +#======================================================================================= +# 功能:CDN的NG访问日志处理,静态/点播/下载/全站/L1-7 +# 入参:rip_list, bg_pids, current, year, month, day, hour, req_id +# 出参:access_$rip +function cdn_log_access() { + # 根据业务类型,指定前缀 + [[ $prod_type == '1' ]] && prefix='' + [[ $prod_type == '5.1' ]] && prefix='L1_' + [[ $prod_type == '5.2' ]] && prefix='L2_' + [[ $prod_type == '5.3' ]] && prefix='L3_' + [[ $prod_type == '5.4' ]] && prefix='L4_' + [[ $prod_type == '5.5' ]] && prefix='L5_' + [[ $prod_type == '5.6' ]] && prefix='L6_' + [[ $prod_type == '5.7' ]] && prefix='L7_' + # 如果time_range是当前时间 + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索core_access.log,设定ssh连接超时时长为CT,*${time_range}*表示包含time_range就要进行搜索 + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行,并将它们的PID存入bg_pids + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log | grep $req_id; + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $req_id" > access_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $req_id; + zcat $cdn_access_log/$year$month$day/${prefix}core_access.log_*${time_range}* | grep $req_id" > access_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + fi +} + +#======================================================================================= +# 功能:CDN的ATS回源日志处理,静态/点播/下载/全站/L1-7 +# 入参:rip_list, bg_pids, current, year, month, day, hour, req_id +# 出参:origin_$rip +function cdn_log_origin() { + # 如果time_range是当前时间 + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索origin.log,设定ssh连接超时时长为CT + # 把搜索的结果放进origin_$rip文件,所有的ssh命令都后台执行,并将它们的PID存入bg_pids + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log | grep $req_id; + cat $cdn_origin_log/origin.log_*${time_range}* | grep $req_id" > origin_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log_*${time_range}* | grep $req_id; + zcat $cdn_origin_log/$year$month$day/origin.log_*${time_range}* | grep $req_id" > origin_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + fi +} + +#======================================================================================= +# 功能:搜索处理每一层级的CDN日志,静态/点播/下载/全站/L1-7 +# 入参:level, label_en, current, year, month, day, hour +# 出参:upstream, port, upstream_and_port +function cdn_log_proc() { + # ------------------------------------------------------------------------- + # 获取 rip 列表,并初始化 bg_pids 数组 + rip_list=`cat $data/ip.group | grep $label_en | awk '{print $1}' | sort | uniq` + bg_pids='' + # access日志:/home/log/cluster_gateway_log/core_access.log + cdn_log_access "$rip_list" "$bg_pids" "$current" "$year" "$month" "$day" "$hour" "$req_id" + # origin日志:/home/log/trafficserver/origin.log + cdn_log_origin "$rip_list" "$bg_pids" "$current" "$year" "$month" "$day" "$hour" "$req_id" + + # 动态进度条 + progress "${bg_pids}" $level & + progress_pid=$(jobs -p | tail -1) + + # 等待所有 bg_pids 存储的后台进程执行完毕 + wait + echo -ne "${c_bic}$level Data collected.${c_e}" + echo -ne " " + + # ------------------------------------------------------------------------- + # 处理上述生成的rip中拿到的access和origin日志,筛选出包含req_id的行 + # 初始化access.log和origin.log -- 处理单个rip上生成的access/origin日志,匹配req_id + # 如果找到则将对应的rip和日志内容用追加到相应日志尾部,以"为分隔,rip作为日志的最后一个字段 + log_merge $rip_list $req_id + + # ------------------------------------------------------------------------- + # 可以根据当前文件夹下的图片cdnlog_search_logic.jpg来理解 + # 如果找到了access日志,则过滤打印输出,并将access_flg重置 + if [[ $access_flg -eq 1 ]]; then + # v03版本第5个字段是日志打印时间戳 + cat access.log | sort -t'"' -n -k5 > log && mv log access.log + # 按时间排序,打印找到的所有日志内容 + cat access.log | while read line; do + access_ip=`echo $line | awk -F'"' '{print $NF}'` + echo -e "${c_by}\n$level NG Log: searching $label_en ...... $access_ip${c_e}" + # 将匹配"*的右边最短路径删除,也就是将之前追加到最末尾的rip删掉 + out=${line%\"*} + log_format "$out" + done + access_flg=0 + # 如果找到了origin日志,则过滤打印输出,并将origin_flg重置 + if [[ $origin_flg -eq 1 ]]; then + # ov06版本第43个字段是回源开始时间戳 + cat origin.log | sort -t'"' -n -k43 > log && mv log origin.log + cat origin.log | while read line; do + origin_ip=`echo $line | awk -F'"' '{print $NF}'` + echo -e "${c_by}\n$level ATS Log: searching $label_en ...... $origin_ip${c_e}" + # 将匹配"*的右边最短路径删除,也就是将之前追加到最末尾的rip删掉 + out=${line%\"*} + log_format "$out" + done + origin_flg=0 + # 如果origin日志找到了,则从origin日志中获取upstream和port + upstream_and_port=`cat origin.log | awk -F '"' '{print $8":"$9}' | sort | uniq` + uap=$upstream_and_port + for nhi in $upstream_and_port; do + upstream=`echo $nhi | awk -F ':' '{print $1}'` + port=`echo $nhi | awk -F ':' '{print $2}'` + ips $upstream > ips.log 2>&1 + # 如果upstream是天翼的IP,则输出如下 + if [[ $? -eq 0 ]]; then + echo -e "${c_bp}\n[ATS-$level]: 从origin日志查询到$req_id的回上层地址是$nhi(CTC IP),继续查询...${c_e}" + cd $trash + # 如果upstream不是天翼的IP,则输出如下 + else + echo -e "${c_br}\n[ATS-$level]: 从origin日志查询到$req_id的回源地址$nhi不是天翼的IP,可能是ATS回源了${c_e}" + echo -e "${c_bp}请确认${c_bc}$nhi${c_bp}是否是源站IP。\n${c_e}" + uap=`echo $uap | sed -n "s/$nhi//p"` + fi + done + [[ $uap == '' ]] && { echo -e "${c_bg}[IDS-113] 所有IP均已处理完毕,退出...${c_e}"; exit 113; } || { upstream_and_port=$uap; cd $trash; } + # 如果没有找到origin日志,则从access日志获取upstream和port,并用ips脚本检测查询upstream是否是天翼的IP + else + # 如果没有找到origin日志,则判断是否缓存了 + # MISS from hb-wuhan13-ca26, MISS from fj-quanzhou6-ca16 -- ct_fj_quanzhou6_e1 + hit=`echo $label_en | awk -F'_' '{print "HIT from", $2"-"$3}'` + cat access.log | grep -iq "$hit" + [[ $? -eq 0 ]] && { echo -e "${c_bp}\n[IDS-114] [Access-$level]: 边缘已经命中缓存,搜索结束...${c_e}\n"; exit 114; } + + # 如果不是缓存住了,从access日志获取回上层信息 + uap_28=`cat access.log | tail -1 | awk -F '"' '{print $28}'` + uap_24=`cat access.log | tail -1 | awk -F '"' '{print $24}'` + if [[ $uap_28 == '' || $uap_28 == '-' ]]; then + if [[ $uap_24 == '' || $uap_24 == '-' ]]; then + echo -e "${c_br}\n[IDS-115] [Access-$level]: 没有找到origin日志,也找不到$req_id的回上层地址,无法继续查询,退出...${c_e}" + exit 115 + else + upstream_and_port=$uap_24 + fi + else + upstream_and_port=$uap_28 + fi + + # WARNING: 此处没有考虑当uap_24有多个后端代理的情况 + # 判断是否是天翼云 IP + upstream_and_port=`echo $upstream_and_port | tr ',' ' '` + uap=$upstream_and_port + for nhi in $upstream_and_port; do + upstream=`echo $nhi | awk -F ':' '{print $1}'` + port=`echo $nhi | awk -F ':' '{print $2}'` + ips $upstream > ips.log 2>&1 + # 如果upstream是天翼的IP,则输出如下 + if [[ $? -eq 0 ]]; then + echo -e "${c_bp}\n[NG-$level]: 从access日志查询到$req_id的回上层地址是$nhi(CTC IP),继续查询...${c_e}" + cd $trash + # 如果upstream不是天翼的IP,则输出如下 + else + echo -ne "${c_br}\n[NG-$level]: 从access日志查询到$req_id的回源地址$nhi不是天翼的IP,可能是NG直接回源了。${c_e}" + echo -e "${c_br}或者查询access主机上的error日志,看是否是ats因自身或源站故障导致无法应答${c_e}" + echo -e "${c_bp}请确认${c_bc}$nhi${c_bp}是否是源站IP。${c_e}" + uap=`echo $uap | sed -n "s/$nhi//p"` + fi + done + [[ $uap == '' ]] && { echo -e "${c_bg}[IDS-116] 所有IP均已处理完毕,退出...${c_e}"; exit 116; } || { upstream_and_port=$uap; cd $trash; } + fi + # 如果没有找到access日志,则要判断当前是在哪个层级,边缘-一层父-二层父 + else + # 如果是一层父或者是二层父,再找一下origin日志是否存在 + if [[ $level == 'Center' || $level == 'Nation' ]]; then + # 如果origin日志有找到记录,则打印出来 + if [[ $origin_flg -eq 1 ]]; then + cat origin.log | sort -t'"' -n -k43 > log && mv log origin.log + cat origin.log | while read line; do + origin_ip=`echo $line | awk -F'"' '{print $NF}'` + echo -e "${c_by}\n$level ATS Log: searching $label_en ...... $origin_ip${c_e}" + out=${line%\"*} + log_format "$out" + done + origin_flg=0 + # 如果origin日志找到了,则从origin日志中获取upstream和port + upstream_and_port=`cat origin.log | awk -F '"' '{print $8":"$9}' | sort | uniq` + uap=$upstream_and_port + for nhi in $upstream_and_port; do + upstream=`echo $nhi | awk -F ':' '{print $1}'` + port=`echo $nhi | awk -F ':' '{print $2}'` + ips $upstream > ips.log 2>&1 + # 如果upstream是天翼的IP,则输出如下 + if [[ $? -eq 0 ]]; then + echo -e "${c_bp}\n[ATS-$level]: 从origin日志查询到$req_id的回上层地址是$nhi(CTC IP),继续查询...${c_e}" + cd $trash + # 如果upstream不是天翼的IP,则输出如下 + else + echo -e "${c_br}\n[ATS-$level]: 从origin日志查询到$req_id的回源地址$nhi不是天翼的IP,可能是ATS回源了${c_e}" + echo -e "${c_bp}请确认${c_bc}$nhi${c_bp}是否是源站IP。\n${c_e}" + uap=`echo $uap | sed -n "s/$nhi//p"` + fi + done + [[ $uap == '' ]] && { echo -e "${c_bg}[IDS-117] 所有IP均已处理完毕,退出...${c_e}"; exit 117; } || { upstream_and_port=$uap; cd $trash; } + # 如果origin日志也有找到记录 + else + echo -e "${c_br}\n[IDS-118] [NG-ATS-$level]: 很奇怪,从上层NG/ATS日志查询到$req_id的回源地址是$upstream_and_port(CTC IP),却找不到access/origin日志。可以检查一下父层对应日志是否已经被删除,或者查询的日志时间是否是靠近整点,例如22:59:01,由于日志切割机制,基于本工具的逻辑,这样的日志有可能会漏掉,如是这种情况,请更换访问日志,重新查询。\n${c_e}" + exit 118 + fi + # 如果边缘机器上就找不到 NG 日志 + else + echo -e "\n${c_br}[IDS-119] [NG-$level]: 无法在$label_en($edge)上找到$req_id,请确认输入的request ID,访问时间或者节点信息是准确的。\n${c_e}" + exit 119 + fi + fi +} + +#======================================================================================= +# 功能:分层级查询日志链 +# 入参:level, cdn_log_proc +# 出参:None +function cdn_log_search() { + # 1. 搜索边缘日志 + level="Edge" && cdn_log_proc $level + # 脚本运行到此处,说明边缘access或者origin日志已经找到,upstream_and_port也已经获得且一定是天翼IP + for nhi_e in $upstream_and_port; do + upstream=`echo $nhi_e | awk -F ':' '{print $1}'` + port=`echo $nhi_e | awk -F ':' '{print $2}'` + ips $upstream > ips.log 2>&1 && cd $trash + # 获取下一层级的 label_en,这里依赖ips工具输出的格式 + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "${c_bb}\nNext Hop IP: $nhi_e\n${c_e}" + + # 2. 搜索一层父日志 + level="Center" && cdn_log_proc $level + # 脚本运行到此处,说明边缘access或者origin日志已经找到,upstream_and_port也已经获得且一定是天翼IP + for nhi_c in $upstream_and_port; do + upstream=`echo $nhi_c | awk -F ':' '{print $1}'` + port=`echo $nhi_c | awk -F ':' '{print $2}'` + ips $upstream > ips.log 2>&1 && cd $trash + # 获取下一层级的 label_en,这里依赖ips工具输出的格式 + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "${c_bb}\nNext Hop IP: $nhi_c\n${c_e}" + + # 3. 搜索二层父日志 + level="Nation" && cdn_log_proc $level + # 脚本运行到此处,说明边缘access或者origin日志已经找到,upstream_and_port也已经获得且一定是天翼IP + for nhi_n in $upstream_and_port; do + upstream=`echo $nhi_n | awk -F ':' '{print $1}'` + port=`echo $nhi_n | awk -F ':' '{print $2}'` + ips $upstream > ips.log 2>&1 && cd $trash + # 获取下一层级的 label_en,这里依赖ips工具输出的格式 + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "${c_bb}\nNext Hop IP: $nhi_n\n${c_e}" + + # 4. 未知层级 -- 容错作用 + level="Unknown" && cdn_log_proc $level + # 脚本运行到此处,说明边缘access或者origin日志已经找到,upstream_and_port也已经获得且一定是天翼IP + for nhi_u in $upstream_and_port; do + upstream=`echo $nhi_u | awk -F ':' '{print $1}'` + port=`echo $nhi_u | awk -F ':' '{print $2}'` + ips $upstream > ips.log 2>&1 && cd $trash + # 获取下一层级的 label_en,这里依赖ips工具输出的格式 + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "${c_bb}\nNext Hop IP: $nhi_u\n${c_e}" + done + done + done + done +} + +#======================================================================================= +# 功能:直播业务访问日志处理 -- all: flv, hls, rtmp +# 入参:rip_list, bg_pids, current, year, month, day, hour, req_id, rbplus +# 出参:access_$rip +function live_log_access() { + # 如果time_range是当前时间,则搜索当前文件夹下的对应日志,以及回滚日志 + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索uni_access.log和uni_access.log_${time_range}*,设定ssh连接超时时长 CT + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行,并将它们的PID存入$bg_pids + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $livelog/uni_access.log* | grep $req_id" > access_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + # 如果time_range不是当前时间,则搜索当前文件夹下的回滚日志,以及目录下的归档日志 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件,因为归档文件是奇数小时命名,所以过滤 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $livelog/uni_access.log* | grep $req_id; + zcat $livelog/$dirpath/uni_access.log_${rbplus}* | grep $req_id; + zcat $livelog/$dirpath/uni_access.log_${rb}* | grep $req_id; + zcat $livelog/$dirpath/uni_access.log_${rbminus}* | grep $req_id" > access_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + fi +} + + +#======================================================================================= +# 功能:直播业务回源日志处理 -- flv/hls +# 入参:rip_list, bg_pids, current, year, month, day, hour, req_id, rbplus +# 出参:origin_$rip +function live_log_origin() { + # 如果time_range是当前时间,则搜索当前文件夹下的对应日志,以及回滚日志 + if [[ $current == 'yes' ]]; then + # ssh进每一个 rip,搜索uni_origin.log和uni_origin.log_${time_range}*,设定ssh连接超时时长CT + # 把搜索的结果放进origin_$rip/ats_$rip文件,所有的ssh命令都后台执行,并将它们的PID存入$bg_pids + # 因为hls业务走的是点播逻辑,是从ATS回源的,所以为了后续方便处理,直接两种情况都获取一下,也不会多用多少时间 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $livelog/uni_origin.log* | grep $req_id" > origin_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + # for hls + ssh -o ConnectTimeout=$CT $rip " + cat $liveatslog/origin.log* | grep $req_id" > ats_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + # 如果time_range不是当前时间,则搜索当前文件夹下的回滚日志,以及目录下的归档日志 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件,因为归档文件是奇数小时命名,所以过滤 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $livelog/uni_origin.log* | grep $req_id; + zcat $livelog/$dirpath/uni_origin.log_${rbplus}* | grep $req_id; + zcat $livelog/$dirpath/uni_origin.log_${rb}* | grep $req_id; + zcat $livelog/$dirpath/uni_origin.log_${rbminus}* | grep $req_id" > origin_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + # for hls + ssh -o ConnectTimeout=$CT $rip " + cat $liveatslog/origin.log* | grep $req_id; + zcat $liveatslog/$dirpath_ats/origin.log_${rb_ats_minus}* | grep $req_id + zcat $liveatslog/$dirpath_ats/origin.log_${rb_ats}* | grep $req_id + zcat $liveatslog/$dirpath_ats/origin.log_${rb_ats_plus}* | grep $req_id" > ats_${rip} 2>&1 & + bg_pids=$bg_pids' '$(jobs -p | tail -1) + done + fi +} + +#======================================================================================= +# 功能:区分直播日志格式,因为不同格式上层IP以及HIT/MISS字段位置不同,内部回源IP&端口位置也不同 +# 功能:判断是否是hls业务 | flv业务的回上层IP&端口 | hls业务的回上层IP&端口 +# 入参:--ishls, --uapflv, --uaphls, access.log, origin.log +# 出参:HOM, log_v, upstream, upstream_and_port, port, ishls +function log_version() { + opt=$1 + log=$2 + log_v=`tail -1 $log | awk -F '"' '{print $1}'` + log_v=${log_v:${#log_v}-1:1} + # 根据不同的日志版本,从uni_access日志种获取回源端口,进而判断是hls业务还是其他 + if [[ $opt == '--ishls' ]]; then + if [[ $log_v == '1' ]]; then + upstream=`tail -1 $log | awk -F '"' '{print $30}' | awk -F':' '{print $1}'` + port=`tail -1 $log | awk -F '"' '{print $30}' | awk -F':' '{print $2}'` + elif [[ $log_v == '2' ]]; then + upstream=`tail -1 $log | awk -F '"' '{print $32}' | awk -F':' '{print $1}'` + port=`tail -1 $log | awk -F '"' '{print $32}' | awk -F':' '{print $2}'` + elif [[ $log_v == '3' ]]; then + upstream=`tail -1 $log | awk -F '"' '{print $7}'` + port=`tail -1 $log | awk -F '"' '{print $8}'` + else + echo -e "\n${c_br}[IDS-120] 未知的日志格式,请联系fanmf11@chinatelecom.cn反馈...${c_e}" + exit 120 + fi + [[ $port == '8080' ]] && ishls=1 || ishls=0 + + # 如果是flv业务,根据不同的日志版本,从uni_origin日志中获取回上层IP以及端口 + elif [[ $opt == '--uapflv' ]]; then + if [[ $log_v == '1' ]]; then + upstream=`tail -1 $log | awk -F '"' '{print $8}' | awk -F':' '{print $1}'` + port=`tail -1 $log | awk -F '"' '{print $8}' | awk -F':' '{print $2}'` + HOM=`tail -1 $log | awk -F '"' '{print $29}'` + elif [[ $log_v == '2' ]]; then + upstream=`tail -1 $log | awk -F '"' '{print $8}'` + port=`tail -1 $log | awk -F '"' '{print $9}'` + HOM=`tail -1 $log | awk -F '"' '{print $31}'` + elif [[ $log_v == '3' ]]; then + upstream=`tail -1 $log | awk -F '"' '{print $10}'` + port=`tail -1 $log | awk -F '"' '{print $11}'` + HOM=`tail -1 $log | awk -F '"' '{print $29}'` + else + echo -e "\n${c_br}[IDS-121] 未知的日志格式,请联系fanmf11@chinatelecom.cn反馈...${c_e}" + exit 121 + fi + if [[ $HOM == 'HIT' ]]; then + echo -e "\n${c_big}[IDS-122] $level $log Log: 请求在该层命中,退出...${c_e}" + exit 122 + fi + # 如果是hls业务,日志是ov06版本的,回上层IP以及端口是固定位置 + elif [[ $opt == '--uaphls' ]]; then + upstream=`cat origin.log | tail -1 | awk -F '"' '{print $8}'` + port=`cat origin.log | tail -1 | awk -F '"' '{print $9}'` + upstream_and_port=${upstream}":"${port} + fi +} + +#======================================================================================= +# 功能:处理CDN,直播的access和origin日志 +# 入参:access_$rip, origin_$rip, rip_list, req_id +# 出参:access.log, origin.log +function log_merge() { + # 处理上述生成的rip中拿到的access和origin日志,筛选出包含req_id的行 + # 初始化access.log和origin.log -- 处理单个rip上生成的access/origin日志,匹配req_id + # 如果找到则将对应的rip和日志内容用追加到相应日志尾部,以"为分隔,rip作为日志的最后一个字段 + > access.log && > origin.log + for rip in $rip_list; do + cat access_$rip | grep -q $req_id + # access和origin日志的处理方式一致,由于一个访问,可能在边缘机器上出现多次 -- 合并回源 + # 所以要检查每一个rip的日志,凡是匹配到的都记录到access.log和origin.log + if [[ $? -eq 0 ]]; then + access_flg=1 + cat access_$rip | grep $req_id > log + cat log | while read line; do + echo -n $line >> access.log + echo '"'$rip >> access.log + done + fi + cat origin_$rip | grep -q $req_id + if [[ $? -eq 0 ]]; then + origin_flg=1 + cat origin_$rip | grep $req_id > log + cat log | while read line; do + echo -n $line >> origin.log + echo '"'$rip >> origin.log + done + fi + + # 检查是否有 ssh 连接失败的情况,有的话,打印出来 + cat access.log | grep -iq 'Connection timed out during banner exchange' + [[ $? -eq 0 ]] && { echo -e "${c_br}[NG-$level] SSH Connection Failed:${c_e}"; echo $rip; } + + cat origin.log | grep -iq 'Connection timed out during banner exchange' + [[ $? -eq 0 ]] && { echo -e "${c_br}[ATS-$level] SSH Connection Failed:${c_e}"; echo $rip; } + done + +} + +#======================================================================================= +# 功能:获取直播网关日志,搜索处理每一层级的live日志 +# 入参:level, label_en, current, year, month, day, hour +# 出参:upstream, upstream_port +function live_log_proc() { + # 获取 rip 列表,并初始化 bg_pids 数组 + rip_list=`cat $data/ip.group | grep $label_en | awk '{print $1}' | sort | uniq` + bg_pids='' + dirpath="$year-$month-$day" + dirpath_ats="$year$month$day" + rb="$year-$month-$day-$hour" + rb_ats="$year$month$day$hour" + # 直播归档日志规则:偶数小时的日志归档到相邻较大的奇数小时命名的日志中 + # 所以如果是过去时间,hour 是偶数,则加一,如果是奇数,则保持 + # 这里奇数的时候设置成"KEEP"字符串,为的是简化搜索逻辑,避免更多的判断分支 + let clock=`echo "obase=10; $hour" | bc` + hourplus=`printf "%02d" $((clock+1))` + hourminus=`printf "%02d" $((clock-1))` + # [[ $((clock%2)) -eq 1 ]] && rbplus=$rb || rbplus="$year-$month-$day-$hourplus" + rbplus="$year-$month-$day-$hourplus" + rbminus="$year-$month-$day-$hourminus" + rb_ats_minus="$year$month$day$hourminus" + rb_ats_plus="$year$month$day$hourplus" + + # access日志:/home/log/cluster_live_log/uni_access.log + live_log_access "$rip_list" "$bg_pids" "$current" "$year" "$month" "$day" "$hour" "$req_id" $rbplus + # origin日志:/home/log/cluster_live_log/uni_origin.log + live_log_origin "$rip_list" "$bg_pids" "$current" "$year" "$month" "$day" "$hour" "$req_id" $rbplus + + # 动态进度条 + progress "${bg_pids}" $level & + progress_pid=$(jobs -p | tail -1) + + # 等待所有 bg_pids 存储的后台进程执行完毕 + wait + echo -ne "${c_bic}$level Data collected.${c_e}" + echo -ne " " + + # ------------------------------------------------------------------------- + # 处理上述生成的rip中拿到的access和origin日志,筛选出包含req_id的行 + # 初始化access.log和origin.log -- 处理单个rip上生成的access/origin日志,匹配req_id + # 如果找到则将对应的rip和日志内容用追加到相应日志尾部,以"为分隔,rip作为日志的最后一个字段 + log_merge $rip_list $req_id + + # 判断是否是hls或者ts业务 + if [[ $access_flg -eq 1 ]]; then + cat access.log | sort -t'"' -n -k2 -k6 > log && mv log access.log + log_version --ishls access.log + if [[ $ishls -eq 1 ]]; then + for rip in $rip_list; do + cp ats_$rip origin_$rip + done + # 重新做一次日志整理 + log_merge $rip_list $req_id + fi + fi + + # 如果是hls或者ts业务 + if [[ $ishls -eq 1 ]]; then + # 如果找到了access日志,则过滤打印输出,并将access_flg重置 + if [[ $access_flg -eq 1 ]]; then + cat access.log | sort -t'"' -n -k2 -k6 > log && mv log access.log + # 按时间排序,打印找到的所有日志内容 + cat access.log | while read line; do + access_ip=`echo $line | awk -F'"' '{print $NF}'` + echo -e "${c_big}\n$level Access Log:${c_by} searching $label_en ...... $access_ip${c_e}" + out=${line%\"*} + log_format "$out" + done + access_flg=0 + log_version --uapflv access.log + # 找到了origin日志,则过滤打印输出,并将origin_flg重置 + if [[ $origin_flg -eq 1 ]]; then + cat origin.log | sort -t'"' -n -k43 > log && mv log origin.log + cat origin.log | while read line; do + origin_ip=`echo $line | awk -F'"' '{print $NF}'` + echo -e "${c_big}\n$level Origin Log:${c_by} searching $label_en ...... $origin_ip${c_e}" + out=${line%\"*} + log_format "$out" + done + origin_flg=0 + log_version --uaphls origin.log + # 没有找到origin日志, + else + echo -e "\n${c_br}[IDS-123] [Origin-$level]: 很奇怪,Access未命中,也无法在$label_en上找到$req_id,请联系fanmf11@chinatelecom.cn排查未知的场景。\n${c_e}" + exit 123 + fi + # 如果边缘Access没有找到request id对应的日志信息 + else + echo -e "\n${c_br}[IDS-124] [ACCESS-$level]: 无法在$label_en($edge)上找到$req_id,请确认输入的request ID,访问时间或者节点信息是准确的。\n${c_e}" + exit 124 + fi + # 如果不是hls,而是rtmp或者flv业务 + else + # 如果找到了access日志,则过滤打印输出,并将access_flg重置 + if [[ $access_flg -eq 1 ]]; then + cat access.log | sort -t'"' -n -k2 -k6 > log && mv log access.log + # 按时间排序,打印找到的所有日志内容 + cat access.log | while read line; do + access_ip=`echo $line | awk -F'"' '{print $NF}'` + echo -e "${c_big}\n$level Access Log:${c_by} searching $label_en ...... $access_ip${c_e}" + out=${line%\"*} + log_format "$out" + done + access_flg=0 + log_version --uapflv access.log + # 找到了origin日志,则过滤打印输出,并将origin_flg重置 + if [[ $origin_flg -eq 1 ]]; then + cat origin.log | sort -t'"' -n -k2 -k6 > log && mv log origin.log + cat origin.log | while read line; do + origin_ip=`echo $line | awk -F'"' '{print $NF}'` + echo -e "${c_big}\n$level Origin Log:${c_by} searching $label_en ...... $origin_ip${c_e}" + out=${line%\"*} + log_format "$out" + done + origin_flg=0 + # 如果origin日志找到了,则从origin日志中获取upstream和port + log_version --uapflv origin.log + + # 没有找到origin日志,则考虑是不是合并回源导致request id变了,针对合并回源的情况,需要在合并回源的机器上,查找uni_rtmp_monitor.log日志,从中拿到relay session id,在过滤error.log匹配relay session和字符'request_id', + else + echo -e "\n${c_bib}稍等,正在处理其他信息,可能需要一些时间...${c_e}" + # 要取按时间排序的最后一条日志,如果有合并回源,则合并到该节点了 + merge_ip=`cat access.log | tail -1 | awk -F'"' '{print $NF}'` + # echo merge ip = $merge_ip + # 找到relay session id + ssh -o ConnectTimeout=$CT $merge_ip " + cat $livelog/uni_rtmp_monitor.log* | grep $req_id; + zcat $livelog/$dirpath/uni_rtmp_monitor.log_${rbplus}* | grep $req_id; + zcat $livelog/$dirpath/uni_rtmp_monitor.log_${rb}* | grep $req_id; + zcat $livelog/$dirpath/uni_rtmp_monitor.log_${rbminus}* | grep $req_id;" > monitor.log 2>&1 + relay_id=`cat monitor.log | grep $req_id | head -n 1 | awk -F'"' '{print $14}'` + [[ $relay_id == '' ]] && { echo -e "${c_br}[IDS-125] 无法找到relay session,需确认uni_rtmp_monitor是否开启,或者手动查询...${c_e}"; exit 125; } + + # echo relay id = $relay_id + # 找合并回源的request id + ssh -o ConnectTimeout=$CT $merge_ip " + cat $livelog/error.log* | grep $relay_id; + zcat $livelog/$dirpath/error.log_${rbplus}* | grep $relay_id; + zcat $livelog/$dirpath/error.log_${rb}* | grep $relay_id; + zcat $livelog/$dirpath/error.log_${rbminus}* | grep $relay_id" > error.log 2>&1 + cat error.log | grep 'request_id' | head -n 1 > log && mv log error.log + [[ `cat error.log | wc -l` -eq 0 ]] && { echo -e "${c_br}[IDS-126] 无法找到合并回源记录,退出...${c_e}"; exit 126; } + req_id_new=`cat error.log | awk '{for(i=1;i<=NF;i++) print $i}' | grep -E "^request_id" | tr -d '"' | awk -F ':' '{print $2}'` + # echo request id = $req_id_new + if [[ $req_id_new == $req_id ]]; then + upper_inner=`cat error.log | awk '{for(i=1;i<=NF;i++) print $i}' | grep -E "^peer:" | tr -d '"' | awk -F ':' '{print $2}'` + if [[ $upper_inner == 'unix' ]]; then + merger_ip=$merge_ip + flg=1 + else + ips -m $merge_ip > inner_ip.log + merge_ip=`cat inner_ip.log | grep -w "$upper_inner" | awk '{print $1}'` + fi + # echo upper_inner = $upper_inner + # echo merge_ip = $merge_ip + ssh -o ConnectTimeout=$CT $merge_ip " + cat $livelog/error.log* | grep $req_id; + zcat $livelog/$dirpath/error.log_${rbplus}* | grep $req_id; + zcat $livelog/$dirpath/error.log_${rb}* | grep $req_id; + zcat $livelog/$dirpath/error.log_${rbminus}* | grep $req_id;" > error.new 2>&1 + cat error.new | grep -E "found dummy_session:.* is already pulling" | head -n 1 > log && mv log error.new + [[ `cat error.new | wc -l` -eq 0 ]] && { echo -e "${c_br}[IDS-127] 无法找到合并回源记录,退出...${c_e}"; exit 127; } + relay_id=`cat error.new | awk '{for(i=1;i<=NF;i++) print $i}' | grep -E "^dummy_session" | awk -F ':' '{print $2}' | tr -d '"'` + # echo relay id new = $relay_id + + ssh -o ConnectTimeout=$CT $merge_ip " + cat $livelog/error.log* | grep $relay_id; + zcat $livelog/$dirpath/error.log_${rbplus}* | grep $relay_id; + zcat $livelog/$dirpath/error.log_${rb}* | grep $relay_id; + zcat $livelog/$dirpath/error.log_${rbminus}* | grep $relay_id;" > error.relay 2>&1 + cat error.relay | grep -E "ngx_rtmp_pull_create_task.*request_id" | sort | head -n 1 > log && mv log error.relay + [[ `cat error.relay | wc -l` -eq 0 ]] && { echo -e "${c_br}[IDS-128] 无法找到合并回源记录,退出...${c_e}"; exit 128; } + req_id=`cat error.relay | awk '{for(i=1;i<=NF;i++) print $i}' | grep -E "^request_id" | tr -d '"' | awk -F ':' '{print $2}'` + else + req_id=$req_id_new + fi + # echo request id = $req_id + # echo merge_ip = $merge_ip + # 继续在 merge ip 上查找新的 request id 对应的 Origin 日志,其实应该是在rip_list里查询 + live_log_origin "$rip_list" "$bg_pids" "$current" "$year" "$month" "$day" "$hour" "$req_id" $rbplus + wait + log_merge $rip_list $req_id + + cat origin.log | grep -q $req_id + # 判断是否找到新的 request id 相关日志 + if [[ $? -eq 0 ]]; then + echo -e "${c_bib}找到了合并回源新的 request id: $req_id - 后续将使用该 ID 继续搜索${c_e}" + cat origin.log | grep $req_id | sort -t'"' -n -k2 -k6 > log && mv log origin.log + cat origin.log | while read line; do + echo -e "${c_big}\n$level Origin Log:${c_by} searching $label_en ...... $merge_ip${c_e}" + out=${line} + log_format "$out" + done + origin_flg=0 + # 如果 Origin 日志找到了,则从日志中获取 upstream ip 和 port + log_version --uapflv origin.log + # 没找到,则输出提示信息 + else + echo -e "${c_bp}\n[IDS-129] [ORIGIN-$level]: 回上层IP是$upstream:$port,在uni_origin.log中未查询到$req_id,退出...${c_e}" + exit 129 + fi + fi + # 如果边缘Access没有找到request id对应的日志信息 + else + echo -e "\n${c_br}[IDS-130] [ACCESS-$level]: 无法在$label_en($edge)上找到$req_id,请确认输入的request ID,访问时间或者节点信息是准确的。\n${c_e}" + exit 130 + fi + fi +} + +#======================================================================================= +# 功能:分层级查询日志链 +# 入参:level, live_log_proc +# 出参:None +function live_log_search() { + # ---------------------------------------------------------------------------- + # 1. 边缘日志搜索处理 + level="Edge" && live_log_proc $level + # 脚本运行到此处,说明access/origin日志可能都已经找到,upstream和prot也已经获得,需要检查upstream是否是天翼 IP + ips $upstream > ips.log 2>&1 + # 如果不是天翼IP,则表示该IP可能是源站地址 + [[ $? -ne 0 ]] && { echo -e "${c_bp}\n[IDS-131] 请确认${c_bc} $upstream:$port ${c_bp}是否是源站地址。\n${c_e}"; exit 131; } || cd $trash + # 如果是天翼IP则表示下一层级还会有相关日志,所以需要找到下一层级的label_en以及对应的rip + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "\n${c_bb}Next Hop IP: $upstream:$port\n${c_e}" + + # ---------------------------------------------------------------------------- + # 2. 父层日志搜索处理 + level="Center" && live_log_proc $level + # 脚本运行到此处,说明access/origin日志可能都已经找到,upstream和prot也已经获得,需要检查upstream是否是天翼 IP + ips $upstream > ips.log 2>&1 + # 如果不是天翼 IP,则表示该 IP 可能是源站地址 + [[ $? -ne 0 ]] && { echo -e "${c_bp}\n[IDS-132] 请确认${c_bc} $upstream:$port ${c_bp}是否是源站地址。\n${c_e}"; exit 132; } || cd $trash + # 如果是天翼IP则表示下一层级还会有相关日志,所以需要找到下一层级的label_en以及对应的rip + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "\n${c_bb}Next Hop IP: $upstream:$port\n${c_e}" + + # ---------------------------------------------------------------------------- + # 3. 中心日志搜索处理 + level="Nation" && live_log_proc $level + # 脚本运行到此处,说明access/origin日志可能都已经找到,upstream和prot也已经获得,需要检查upstream是否是天翼 IP + ips $upstream > ips.log 2>&1 + # 如果不是天翼 IP,则表示该 IP 可能是源站地址 + [[ $? -ne 0 ]] && { echo -e "${c_bp}\n[IDS-133] 请确认${c_bc} $upstream:$port ${c_bp}是否是源站地址。\n${c_e}"; exit 133; } || cd $trash + # 如果是天翼IP则表示下一层级还会有相关日志,所以需要找到下一层级的label_en以及对应的rip + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "\n${c_bb}Next Hop IP: $upstream:$port\n${c_e}" + + # ---------------------------------------------------------------------------- + # 4. 容错日志搜索处理 + level="Unknown" && live_log_proc $level + # 脚本运行到此处,说明access/origin日志可能都已经找到,upstream和prot也已经获得,需要检查upstream是否是天翼 IP + ips $upstream > ips.log 2>&1 + # 如果不是天翼 IP,则表示该 IP 可能是源站地址 + [[ $? -ne 0 ]] && { echo -e "${c_bp}\n[IDS-134] 请确认${c_bc} $upstream:$port ${c_bp}是否是源站地址。\n${c_e}"; exit 134; } || cd $trash + # 如果是天翼IP则表示下一层级还会有相关日志,所以需要找到下一层级的label_en以及对应的rip + label_en=`cat ips.log | grep "所属资源池" | awk -F '(' '{print $2}' | awk -F ')' '{print $1}'` + width=`tput cols` && echo + for i in `seq $width`; do + echo -ne "${c_bb}=${c_e}" + done + echo -e "\n${c_bb}Next Hop IP: $upstream:$port\n${c_e}" +} + +#======================================================================================= +# 功能:格式化输出日志,使其字段更容易看懂,输入第一个参数是日志全文 +# 入参:access.log, origin.log, v03, ov06, OUT_ACC1, OUT_ACC2, OUT_ACC3, OUT_ORI1, OUT_ORI2 +# 出参:格式化输出 +function log_format() { + log=$1 + log_v=`echo $log | awk -F '"' '{print $1}'` + log_fs='v03 ov06 OUT_ACC1 OUT_ACC2 OUT_ACC3 IN_ACC1 IN_ACC2 IN_ACC3 OUT_ORI1 OUT_ORI2 OUT_ORI3 IN_ORI1 IN_ORI2 IN_ORI3' + for log_f in $log_fs; do + if [[ $log_v == $log_f ]]; then + eval log_f=\$$log_f + echo $log_f | awk -F '"' '{for(i=1;i<=NF;i++) printf "%s %s\"", "\033[1;3;36m"i"\033[0m", $i} END{print ""}' + fi + done + width=`tput cols` + echo -n " " + for i in `seq $((width-8))`; do + echo -ne "${c_bip}^${c_e}" + done + echo -n " " + echo $log | awk -F '"' '{for(i=1;i<=NF;i++) printf "%s %s\"", "\033[1;3;36m"i"\033[0m", $i} END{print ""}' +} + +function logfile() { + if [[ -d $trash ]]; then + echo -e "${c_br}[IDS-135] 对于同一个用户,同一时间只能运行一个实例,请重新运行...${c_e}" + exit 135 + else + mkdir -p $trash + cd $trash && cd .. + docs=`ls` + for doc in $docs; do + [[ -f $doc ]] && rm -rf $doc + done + folders=`ls -t` + while [[ `echo $folders | awk '{print NF}'` -gt 29 ]]; do + folder=`ls -t | tail -1` + rm -rf $folder + folders=`ls -t` + done + cd $trash && touch ids + fi +} + +# ---------------------------------------------------------------------------- +# 自定义颜色显示 +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_by='\e[1;33m' # bold yellow +c_bb='\e[1;34m' # bold blue +c_bp='\e[1;35m' # bold purple +c_bc='\e[1;36m' # bold cyan +c_bir='\e[1;3;31m' # * bold italic red +c_big='\e[1;3;32m' # bold italic cyan +c_bib='\e[1;3;34m' # * bold italic cyan +c_bip='\e[1;3;35m' # bold italic cyan +c_bic='\e[1;3;36m' # bold italic cyan +c_e='\e[0m' # reset +# some initializing action +TS=`date +%s%N` # * +ts=`date +%s` # * +stty erase '^H' # * 允许回退键删除内容 +req_id='' # * 接收入参 +year=0 # * year +month=0 # * month +day=0 # * day +hour=0 # * hour +label_en='' # * 节点英文标签 +number=0 # * label_en 中的行数 +label='' # single label name in English +prod_type=1 # * 产品类型 +flg=0 # signify if get CTC group successfully, 0 - NG and 1 - OK +access_flg=0 # * 是否在access日志中找到reqid,0表示未找到,1表示找到了 +origin_flg=0 # * 是否在origin日志中找到reqid,0表示未找到,1表示找到了 +time_range='' # * input: the specified time to search logs +range_d=0 # * days between now and time_range +range_s=0 # * seconds between now and time_range +current='yes' # * 判断输入的时间是否是当前时间,默认是yes +CT=60 # * ssh connection timeout +bg_pids='' # * pid lists which run in background +cdn_access_log='/home/log/cluster_gateway_log' # * +cdn_origin_log='/home/log/trafficserver' # * +toolbox='/usr/local/script/fanmf11' # * +data='/usr/local/script/fanmf11/data' # * +host=`whoami` # * 判断执行用户 +trash="/usr/local/script/fanmf11/trash/$host/$TS" # * 每个用户的临时文件存放处 +access_ip='0.0.0.0' +origin_ip='0.0.0.0' +upstream='0.0.0.0' +port='0' +upstream_and_port='0.0.0.0:0' + +livelog='/home/log/cluster_live_log' +liveatslog='/home/log/trafficserver' + +v03='$version"$timeLocal"$request_id"$httpCode"$timestamp"$respondTime"$rwt_time"$wwt_time"$firstDur"$finalize_error_code"$serverIp"$destPort"$clientIp"$clientPort"$method"$protocol"$channel"$url"$httpVersion"$requestBytes"$sent_http_content_length"$bodyBytes"$body_sent"$upstreamAddr"$upstream_status"$mesc"$status"$upstreamIp"$upstream_http_ctl_server_code"$httpRange"$sent_http_content_range"$fileType"$referer"$Ua"$proxyIp"$content_type"$fft_time"$via"$real_client_ip"$attack_type"$dysta"$request_body_length"$ssl_time"$extra1"$extra2"$extra3"$extra4"$extra5"$extra6"$extra7"$extra8"$extra9"$extra10"$extra11"$extra12"$extra13"$extra14"$extra15"$extra16"$extra17"$extra18"$extra19"$extra20"$extra21"$extra22"$extra23"$extra24"$extra25"$extra26"$extra27"$extra28"$extra29"$extra30"$extra31"$extra32"$extra33"$extra34"$extra35"$extra36"$extra37"$extra38"$extra39"$extra40"$extra41"$extra42"$extra43"$extra44"$extra45"$extra46"$extra47"$extra48"$extra49"$extra50"$ex1"$ex2"$ex3"$ex4"$ex5"$ex6"$ex7"$ex8"$ex9"$ex10"$ex11"$ex12"$ex13"$ex14"$ex15"$ex16"$ex17"$ex18"$ex19"$ex20"$ex21"$ex22"$ex23"$ex24"$ex25"$ex26"$ex27"$ex28"$ex29"$ex30"$ex31"$ex32"$ex33"$ex34"$ex35"$ex36"$ex37"$ex38"$ex39"$ex40"$ex41"$ex42"$ex43"$ex44"$ex45"$ex46"$ex47"$ex48"$ex49"$ex50' +ov06='$version"$cqtn"$request_id"$sct"$firstDur"$mesc_milisecond"$clientIp"$nhi"$nhp"$httpCode"$pssc"$cquup"$bodyBytes"$sscl"$rwtms"$sec"$Range_cqh"$Content-Range_psh"$If-Modified-Since_psh"$If-Range_psh"$Via_psh"$dqrtt"$dqrcd"$dqnst"$Host_pqh"$pqsn"$cwr"$pls"$X-Forwarded-For_pqh"$requestBytes"$cqhl"$pqhl"$cqhm"$httpVersion"$Referer_pqh"$User-Agent_pqh"$Cookie_pqh"$serverIp"$pqsp"$protocol"$Content-Type_psh"$Last-Modified_psh"$timestamp"$channel"$server_ssh"$cqtx"$sent_http_content_length"$oqup"$oquq"$accid"$extra2"$extra3"$extra4"$extra5"$extra6"$extra7"$extra8"$extra9"$extra10"$extra11"$extra12' +OUT_ACC1='$version"$protocol"$connect_type"$request_from"$requestTime"$timestamp"$respondTime"$clientIpPort"$connect_time"$publish_play_time"$firstDur"$tcpinfo_rtt"$tcpinfo_rttvar"$proxyIp"$method"$channel"$uri"$appName"$stream"$httpVersion"$url"$httpCode"$recvBytes"$bodyBytes"$upstream_bytes_sent"$upstream_bytes_received"$destIpPort"$hostname"$status"$upstreamAddr"$playDur"$remote_user"$referer"$Ua"$fileType"$httpRange"$http_cookie"$connTag"$firstTag"$connect_status"$ex1"$ex2"$ex3"$ex4"$ex5"$ex6"$ex7"$ex8"$ex9"$ex10"$ex11"$ex12"$ex13"$ex14"$ex15"$ex16"$ex17"$ex18"$ex19"$ex20"$ex21"$ex22"$ex23"$ex24"$ex25' +OUT_ACC2='$version"$protocol"$connect_type"$request_from"$requestTime"$timestamp"$respondTime"$clientIp"$clientPort"$connect_time"$publish_play_time"$firstDur"$tcpinfo_rtt"$tcpinfo_rttvar"$proxyIp"$method"$channel"$uri"$appName"$stream"$httpVersion"$url"$httpCode"$recvBytes"$bodyBytes"$upstream_bytes_sent"$upstream_bytes_received"$serverIp"$destPort"$hostname"$status"$upstreamAddr"$playDur"$remote_user"$referer"$Ua"$fileType"$httpRange"$http_cookie"$request_id"$firstTag"$connect_status"$time_local"$server_rip"$rwt_time"$wwt_time"$relay_url"$session_id"$finalize_error_code"$body_bytes_sent"$response_header_len"$request_body_len"$request_header_len"$sent_http_content_length"$upstream_response_time"$sdtfrom"$ext1"$ext2"$ext3"$ext4"$ext5"$ext6"$ext7"$ext8"$ext9"$ext10"$ext11"$ext12"$ext13"$ext14"$ext15"$ext16"$ext17"$ext18"$ext19"$ext20"$ext21"$ext22"$ext23"$ext24"$ext25"$ex1"$ex2"$ex3"$ex4"$ex5"$ex6"$ex7"$ex8"$ex9"$ex10"$ex11"$ex12"$ex13"$ex14"$ex15"$ex16"$ex17"$ex18"$ex19"$ex20"$ex21"$ex22"$ex23"$ex24"$ex25' +OUT_ACC3='$version"$timestamp"$protocol"$connect_type"$request_from"$hostname"$serverIp"$destPort"$requestTime"$clientIp"$clientPort"$method"$url"$uri"$channel"$appName"$stream"$httpVersion"$httpCode"$respondTime"$recvBytes"$bodyBytes"$upstream_bytes_sent"$upstream_bytes_received"$response_header_len"$body_bytes_sent"$request_header_len"$request_body_len"$status"$firstDur"$tcpinfo_rtt"$tcpinfo_rttvar"$proxyIp"$referer"$Ua"$fileType"$httpRange"$http_cookie"$firstTag"$finalize_error_code"$request_id"$ext1"$ext2"$ext3"$ext4"$ext5"$ext6"$ext7"$ext8"$ext9"$ext10"$ext11"$ext12"$ext13"$ext14"$ext15"$ext16"$ext17"$ext18"$ext19"$ext20"$ext21"$ext22"$ext23"$ext24"$ext25"$ex1"$ex2"$ex3"$ex4"$ex5"$ex6"$ex7"$ex8"$ex9"$ex10"$ex11"$ex12"$ex13"$ex14"$ex15"$ex16"$ex17"$ex18"$ex19"$ex20"$ex21"$ex22"$ex23"$ex24"$ex25' +IN_ACC1=$OUT_ACC1 && IN_ORI1=$OUT_ACC1 && OUT_ORI1=$OUT_ACC1 +IN_ACC2=$OUT_ACC2 && IN_ORI2=$OUT_ACC2 && OUT_ORI2=$OUT_ACC2 +IN_ACC3=$OUT_ACC3 && IN_ORI3=$OUT_ACC3 && OUT_ORI3=$OUT_ACC3 + + +# ---------------------------------------------------------------------------- +# 正常退出时触发 +trap 'onExit' EXIT + +# 捕获Ctrl+C时触发 +trap 'onCtrlC' INT + + +# ---------------------------------------------------------------------------- +# 只接收一个入参 +if [[ $# -ne 1 ]]; then + echo -e "${c_bc}[IDS-136] 该脚本工具只接收一个参数——request ID,作为入参,示例如下: ${c_e}" + echo -e "${c_big}1. ids 30b4876065d0be98199db0525530af39 ${c_e}" + echo -e "${c_big}2. ids 63f72079_zj-jiaxing2-13_da2daa84cd2498ae - less than 64bits \n${c_e}" + echo -e "${c_br}注意:对于靠近整点(例如:22:59:01)的日志,查询可能遭遇失败,对应情形可更换日志重新查询\n${c_e}" + exit 136 +fi +req_id=$1 && logfile + +echo $req_id | grep -Eq '_' +# 新款request id +if [[ $? -eq 0 ]]; then + # 获取$label_en, $time_range + node_time_new + # 功能:检查输入的时间范围是否符合格式要求:14天内,不能是未来时间,10位数字 + # 入参:time_range + # 出参:current, year, month, day, hour + time_check $time_range + # 功能:获取 prod_type - 产品类型 + # 入参:None + # 出参:TS, prod_type + prod_type_inp + if [[ $prod_type == '1' ]]; then + # 利用$label_en查找日志链 + cdn_log_search + elif [[ $prod_type == '2' ]]; then + echo -e "${c_br}[IDS-137] 还未支持,退出...${c_e}" + exit 137 + elif [[ $prod_type == '3' ]]; then + echo -e "${c_br}[IDS-138] 还未支持,退出...${c_e}" + exit 138 + elif [[ $prod_type == '4' ]]; then + echo -e "${c_br}[IDS-139] 还未支持,退出...${c_e}" + exit 139 + elif [[ $prod_type == '5.1' ]]; then + echo -e "${c_br}[IDS-140] 还未支持,退出...${c_e}" + exit 140 + elif [[ $prod_type == '5.2' ]]; then + echo -e "${c_br}[IDS-141] 还未支持,退出...${c_e}" + exit 141 + elif [[ $prod_type == '5.3' ]]; then + echo -e "${c_br}[IDS-142] 还未支持,退出...${c_e}" + exit 142 + elif [[ $prod_type == '5.4' ]]; then + echo -e "${c_br}[IDS-143] 还未支持,退出...${c_e}" + exit 143 + elif [[ $prod_type == '5.5' ]]; then + echo -e "${c_br}[IDS-144] 还未支持,退出...${c_e}" + exit 144 + elif [[ $prod_type == '5.6' ]]; then + echo -e "${c_br}[IDS-145] 还未支持,退出...${c_e}" + exit 145 + elif [[ $prod_type == '5.7' ]]; then + echo -e "${c_br}[IDS-146] 还未支持,退出...${c_e}" + exit 146 + else + echo -e "${c_br}[IDS-147] 请按照如上提示,输入正确的产品类型序号,退出...${c_e}" + exit 147 + fi +# 老款request id +else + # 功能:适用于旧版本的reqID,获取$label_en和$time_range + # 入参:None + # 出参:time_range, label_en, edge + node_time_old + # 功能:检查输入的时间范围是否符合格式要求:14天内,不能是未来时间,10位数字 + # 入参:time_range + # 出参:current, year, month, day, hour + time_check $time_range + # 功能:获取 prod_type - 产品类型 + # 入参:None + # 出参:TS, prod_type + prod_type_inp + if [[ $prod_type == '1' ]]; then + # 功能:分层级查询日志链 + # 入参:level, cdn_log_proc + # 出参:None + cdn_log_search + elif [[ $prod_type == '2' ]]; then + live_log_search + elif [[ $prod_type == '3' ]]; then + echo -e "${c_br}[IDS-148] 还未支持,退出...${c_e}" + exit 148 + elif [[ $prod_type == '5.1' ]]; then + cdn_log_search + elif [[ $prod_type == '5.2' ]]; then + cdn_log_search + elif [[ $prod_type == '5.3' ]]; then + cdn_log_search + elif [[ $prod_type == '5.4' ]]; then + cdn_log_search + elif [[ $prod_type == '5.5' ]]; then + cdn_log_search + elif [[ $prod_type == '5.6' ]]; then + cdn_log_search + elif [[ $prod_type == '5.7' ]]; then + cdn_log_search + else + echo -e "${c_br}[IDS-149] 请按照如上提示,输入正确的产品类型序号,退出...${c_e}" + exit 149 + fi +fi + + diff --git a/old/ctc/infos.sh b/old/ctc/infos.sh new file mode 100644 index 0000000..d729d4a --- /dev/null +++ b/old/ctc/infos.sh @@ -0,0 +1,284 @@ +#!/bin/bash + +# 捕获 Ctrl + C 终止整个脚本的运行 +function onCtrlC () { + exec 3>&2 # 3 is now a copy of 2 + exec 2> /dev/null # 2 now points to /dev/null + kill ${bg_pids} ${progress_pid} >/dev/null 2>&1 + sleep 1 # sleep to wait for process to die + exec 2>&3 # restore stderr to saved + exec 3>&- # close saved version + echo + echo -e "${c_bir}Ctrl+C is captured, exiting...\n${c_e}" + exit 100 +} + + +function infos() { + # 判断要查询的域名是否在平台,domain.list文件每小时更新一次 -- task.sh + res=`cat $data/domain.list | grep -w "$domain"` + if [[ $res == '' ]]; then + echo -e "${c_br}该域名[$domain]未在天翼平台配置,一个小时内新增的域名无法查询,退出...${c_e}" + echo -e "${c_br}只接收一个参数,请输入要查询的域名\ne.g. infos www.ctyun.cn\n${c_e}" + exit 247 + fi + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 1 + curl "https://dcp.ctcdn.cn/traffic-control-api/v2/access/list?access_version_id=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjYzfQ.XZ_nNtRWoBRcPBM-bHAG_ciohkNh69n4AVHwV755r6Q' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_1.log > domain_info_1.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_1.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名-part 1信息失败,退出...${c_e}"; exit 246; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_info_1 domain_info_1.log $domain + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_1 domain_info_1.log $domain + r_code=$? + if [[ $r_code -eq 205 ]]; then + exit 205 + elif [[ $r_code -eq 201 ]]; then + accessid=`cat info.log | awk -F ':' '$1==4 {print $2}'` + curl "https://dcp.ctcdn.cn/traffic-control-api/v2/access/parseDetail?access_id=$accessid" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjYzfQ.XZ_nNtRWoBRcPBM-bHAG_ciohkNh69n4AVHwV755r6Q' -H 'content-type: application/json;charset=UTF-8' -vo domain_map_info.log > domain_map_info.response 2>&1 + # 判断响应是否200 + cat $trash/domain_map_info.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名-map信息失败,退出...${c_e}"; exit 206; } + python3 /usr/local/script/fanmf11/get_infos.py --domain_map_info domain_map_info.log 0 + + elif [[ $r_code -ne 0 ]]; then + echo -e "${c_br}处理域名-part 1信息失败,退出...${c_e}" + exit 242 + fi + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 2 + account=`cat info.log | awk -F ':' '$1==1 {print $2}'` + accid=`cat info.log | awk -F ':' '$1==3 {print $2}'` + + curl 'https://bs.ctcdn.cn/api/v3/clientInfo/searchClientInfo' -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjI1NH0.9Zw66R0R85avd92qzO-3KQ4DJ2zYXL4ght5bH41iTfA' -H 'content-type: application/json;charset=UTF-8' -X POST -d '{"clientInfo":[{"key":"clientCnname", "value": "'$account'"}],"columnList":["openTime", "accountType", "accountResource", "accountEmail"]}' -vo domain_info_2.log > domain_info_2.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_2.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 2失败,退出...${c_e}"; exit 245; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_info_2 domain_info_2.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_2 domain_info_2.log $accid + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 2信息失败,退出...${c_e}"; exit 241; } + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 3 + curl "http://bs.ctcdn.cn/api/v3/manageDomain/list?partner=&sales_channel=&status=&productCode=&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjI1NH0.9Zw66R0R85avd92qzO-3KQ4DJ2zYXL4ght5bH41iTfA' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_3.log > domain_info_3.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_3.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 3失败,退出...${c_e}"; exit 244; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_info_3 domain_info_3.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_3 domain_info_3.log $accid + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 3信息失败,退出...${c_e}"; exit 240; } + + # ---------------------------------------------------------------------------------------- + # 获取父方案信息 + curl 'https://lasc-new.ctcdn.cn/v1/domain/internal/parent_draft' -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo parent.log > parent.response 2>&1 + + # 判断响应是否200 + cat $trash/parent.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取父方案信息失败,退出...${c_e}"; exit 234; } + + # ---------------------------------------------------------------------------------------- + # 获取资源池信息 + curl 'http://rap.ctcdn.cn/v2/rapApi/resourcePoolToResourceGroup' -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE1N30.IXVEAglOYm8bUInW4uXqDugBnd6POouBK8q4z_HItns' -H 'content-type: application/json;charset=UTF-8' -vo respool.log > respool.response 2>&1 + + # 判断响应是否200 + cat $trash/respool.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取资源池信息失败,退出...${c_e}"; exit 233; } + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 4 + curl "https://confmanager.ctcdn.cn/v1/domain/conf/query_union?precise_query=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjQxNn0.KEFh_yg3J1y8sL7s3X_8jIR8GGq88A89b7J5YhVYVf8' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_4.log > domain_info_4.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_4.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 4失败,退出...${c_e}"; exit 243; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_info_4 domain_info_4.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_4 domain_info_4.log $accid + r_code=$? + if [[ $r_code -eq 204 ]]; then + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 5 + curl "https://lasc-new.ctcdn.cn/v1/domain/conf/op_query_union?page_size=20&page=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_5.log > domain_info_5.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_5.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 5失败,退出...${c_e}"; exit 235; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_info_5 domain_info_5.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_5 domain_info_5.log $accid + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 5信息失败,退出...${c_e}"; exit 237; } + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 6 + domain_id=`cat info.log | awk -F ':' '$1==4 {print $2}'` + curl "https://lasc-new.ctcdn.cn/v1/domain/internal/domain_config?domain_id=$domain_id" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_6.log > domain_info_6.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_6.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 6失败,退出...${c_e}"; exit 238; } + + python3 /usr/local/script/fanmf11/get_infos.py --domain_info_6 domain_info_6.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_6 domain_info_6.log $accid + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 6信息失败,退出...${c_e}"; exit 236; } + + elif [[ $r_code -ne 0 ]]; then + echo -e "${c_br}处理域名-part 4信息失败,退出...${c_e}" + exit 239 + else + exit 0 + fi +} + +# map.sh用如下函数获取解析组信息 +function map() { + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 1 - 其中包括解析组信息,但有可能是重叠域名 + curl "https://dcp.ctcdn.cn/traffic-control-api/v2/access/list?access_version_id=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjYzfQ.XZ_nNtRWoBRcPBM-bHAG_ciohkNh69n4AVHwV755r6Q' -H 'content-type: application/json;charset=UTF-8' -vo map_info.log > map_info.response 2>&1 + + # 判断响应是否200 + cat map_info.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名MAP信息失败,退出...${c_e}"; exit 232; } + + # 处理接口获取的信息,拿到正确的解析组 + python3 /usr/local/script/fanmf11/get_infos.py --map_info map_info.log $domain + # python3 /home/fanmf11/fanmf11/get_infos.py --map_info map_info.log $domain + r_code=$? + if [[ $r_code -eq 205 ]]; then + exit 205 + elif [[ $r_code -eq 201 ]]; then + accessid=`cat info.log | awk -F ':' '$1==4 {print $2}'` + curl "https://dcp.ctcdn.cn/traffic-control-api/v2/access/parseDetail?access_id=$accessid" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjYzfQ.XZ_nNtRWoBRcPBM-bHAG_ciohkNh69n4AVHwV755r6Q' -H 'content-type: application/json;charset=UTF-8' -vo domain_map_info.log > domain_map_info.response 2>&1 + # 判断响应是否200 + cat $trash/domain_map_info.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名-map信息失败,退出...${c_e}"; exit 206; } + python3 /usr/local/script/fanmf11/get_infos.py --domain_map_info domain_map_info.log 1 + + elif [[ $r_code -ne 0 ]]; then + echo -e "${c_br}处理域名MAP信息失败,退出...${c_e}" + exit 231 + fi +} + +function parent() { + # ---------------------------------------------------------------------------------------- + # 获取父方案信息 + curl 'https://lasc-new.ctcdn.cn/v1/domain/internal/parent_draft' -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo parent.log > parent.response 2>&1 + + # 判断响应是否200 + cat $trash/parent.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取父方案信息失败,退出...${c_e}"; exit 234; } + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 4 + accid=`cat ../$parent_dir/info.log | awk -F ':' '$1==3 {print $2}'` + curl "https://confmanager.ctcdn.cn/v1/domain/conf/query_union?precise_query=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjQxNn0.KEFh_yg3J1y8sL7s3X_8jIR8GGq88A89b7J5YhVYVf8' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_4.log > domain_info_4.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_4.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 4失败,退出...${c_e}"; exit 243; } + + python3 /usr/local/script/fanmf11/get_infos.py --parent_info_4 domain_info_4.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_4 domain_info_4.log $accid + r_code=$? + if [[ $r_code -eq 204 ]]; then + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 5 + curl "https://lasc-new.ctcdn.cn/v1/domain/conf/op_query_union?page_size=20&page=1&domain=$domain" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_5.log > domain_info_5.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_5.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 5失败,退出...${c_e}"; exit 235; } + + python3 /usr/local/script/fanmf11/get_infos.py --parent_info_5 domain_info_5.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_5 domain_info_5.log $accid + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 5信息失败,退出...${c_e}"; exit 237; } + + # ---------------------------------------------------------------------------------------- + # 获取域名信息 - part 6 + domain_id=`cat info.log | awk -F ':' '$1==2 {print $2}'` + curl "https://lasc-new.ctcdn.cn/v1/domain/internal/domain_config?domain_id=$domain_id" -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjE4NH0.1Pgfnky-grT0MN2ic04PqMf32BdnF6iN_PMk3kNOzjY' -H 'content-type: application/json;charset=UTF-8' -vo domain_info_6.log > domain_info_6.response 2>&1 + + # 判断响应是否200 + cat $trash/domain_info_6.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + [[ $? -ne 0 ]] && { echo -e "${c_br}获取域名信息-part 6失败,退出...${c_e}"; exit 238; } + + python3 /usr/local/script/fanmf11/get_infos.py --parent_info_6 domain_info_6.log $accid + # python3 /home/fanmf11/fanmf11/get_infos.py --domain_info_6 domain_info_6.log $accid + [[ $? -ne 0 ]] && { echo -e "${c_br}处理域名-part 6信息失败,退出...${c_e}"; exit 236; } + + elif [[ $r_code -ne 0 ]]; then + echo -e "${c_br}处理域名-part 4信息失败,退出...${c_e}" + exit 239 + else + : + fi +} + +# 自定义颜色显示 +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_by='\e[1;33m' # bold yellow +c_bb='\e[1;34m' # bold blue +c_bp='\e[1;35m' # bold purple +c_bc='\e[1;36m' # bold cyan +c_bir='\e[1;3;31m' # * bold italic red +c_bib='\e[1;3;34m' # * bold italic cyan +c_bic='\e[1;3;36m' # bold italic cyan +c_e='\e[0m' # reset + +# 初始化变量 +TS=`date +%s%N` +toolbox='/usr/local/script/fanmf11' # * +data='/usr/local/script/fanmf11/data' # * +host=`whoami` # * 判断执行用户 +trash="/usr/local/script/fanmf11/trash/$host/$TS" # * 每个用户的临时文件存放处 + +if [[ -d $trash ]]; then + echo -e "${c_br}对于同一个用户,同一时间只能运行一个实例,请重新运行...${c_e}" + exit 245 +else + mkdir -p $trash + cd $trash && cd .. + docs=`ls` + for doc in $docs; do + [[ -f $doc ]] && rm -rf $doc + done + folders=`ls -t` + while [[ `echo $folders | awk '{print NF}'` -gt 29 ]]; do + folder=`ls -t | tail -1` + rm -rf $folder + folders=`ls -t` + done + cd $trash && touch infos +fi + +# 捕获Ctrl+C时触发 +trap 'onCtrlC' INT + +# ---------------------------------------------------------------------------------------- +# 判断入参数量是否合法 +if [[ $# -eq 1 ]]; then + domain=$1 + infos +elif [[ $# -eq 3 && $1 == '--map' ]]; then + domain=$2 && map_dir=$3 + map + cp map.log ../$map_dir/ + cp info.log ../$map_dir/ +elif [[ $# -eq 3 && $1 == '--parent' ]]; then + domain=$2 && parent_dir=$3 + parent + cp ./cmap ../$parent_dir/ +else + echo -e "${c_br}只接收一个参数,请输入要查询的域名\ne.g. infos www.ctyun.cn\n${c_e}" + exit 249 +fi + diff --git a/old/ctc/ips.sh b/old/ctc/ips.sh new file mode 100644 index 0000000..653509b --- /dev/null +++ b/old/ctc/ips.sh @@ -0,0 +1,396 @@ +#!/bin/bash +# 功能实现:判定一个IP或者组是否是天翼平台的 +# 依赖文件:ip.group/lakes +# 存在问题: +# 整体逻辑: +# 1. 输入有四种可能:IPv4/IPv6/英文标签/中文标签 +# 2. 以上四种可能,最终使用一个函数来完成——ipvx_check +# 3. ipvx_check的作用就是检查一个IP是否属于天翼 +# 4. 首先根据IP找出所在的节点英文标签,再根据英文标签后缀不同,进行逐一判别 +# 5. ip.group文件: +# 对于缓存服务器(边缘/父层/中心)节点 - 第一列是RIP,第二列是英文标签,第三列是VIP,第八列是LVS(全都一样) +# 对于LVS节点 - 第一列是IP。第二列是英文标签 +# 对于其他节点,同LVS +# 6. lakes文件:文件开头有介绍每一列的含义 +# 7. 英文标签的特点可以通过`cat ip.group | awk '{print $2}' | awk -F '_' '{print $4}' | sort | uniq`来筛选 + +# 自定义颜色显示 +c_br="\e[1;31m" # bold red +c_bg="\e[1;32m" # bold green +c_by="\e[1;33m" # bold yellow +c_bp="\e[1;35m" # bold purple +c_iy="\e[3;33m" # italic yellow +c_bir='\e[1;3;31m' # * bold italic red +c_big='\e[1;3;32m' # bold italic cyan +c_bib='\e[1;3;34m' # * bold italic cyan +c_bip='\e[1;3;35m' # bold italic cyan +c_bic='\e[1;3;36m' # bold italic cyan +c_e="\e[0m" # reset + +# 使用说明 +function usage { + echo -e "${c_bg}1. 查找V(R)IP/集群中文名/集群英文名是否是归属天翼云平台${c_e}" + echo -e "${c_bg}2. 查询IP对应的内网IP和主机名,只支持IPv4地址${c_e}" + echo -e "${c_iy}实例:${c_e}" + echo -e "${c_iy} ips 59.56.177.149${c_e}" + echo -e "${c_iy} ips ct_fj_fuzhou3_e1${c_e}" + echo -e "${c_iy} ips 福州3${c_e}" + echo -e "${c_iy} ips -m 59.56.177.149${c_e}\n" + echo -e "${c_bp}查询内网IP对应关系功能,因线上IPv6的机器没有加白,暂不支持获取IPv6主机内网IP...${c_e}" + exit 1 +} + +# 如果输入是IP,则判断该IP是否属于天翼 +# 入参-1:IP +# 入参-2:flg -- 0/1/2 +# flg = 0 -- 脚本输入的是IPv4或者IPv6 +# flg = 1 -- 脚本输入的是英文节点名 +# flg = 2 -- 脚本输入的是中文节点名 +function ipvx_check() { + ipvx=$1 + if [[ $flg -eq 0 ]]; then + # 同一个IP可能会过滤出来多个英文节点 + labels=`cat $data/ip.group | fgrep -w $ipvx | awk '{print $2}' | sort | uniq` + elif [[ $flg -eq 1 ]]; then + # 确保过滤出来的就是输入的节点名,排除其他含有相同IP的节点 + labels=`cat $data/ip.group | fgrep -w $ipvx | awk '$2=="'$tbd'" {print $2}' | sort | uniq` + elif [[ $flg -eq 2 ]]; then + # 中文节点名可能对应多个不同节点,全部输出 + labels=`cat $data/ip.group | fgrep -w $ipvx | awk '$2=="'$label_single'" {print $2}' | sort | uniq` + fi + [[ $labels == '' ]] && { echo -e "${c_br}$tbd 不是天翼平台的节点/IP,退出...${c_e}"; exit 44; } + for label in $labels; do + # 根据后缀输出 + # 后缀如果是 -- e/c/n/lvs,则输出对应的资源池,vip,rip,lvs信息,如果一个lvs对应多个边缘节点,则全部输出 + # 其他后缀则只输出对应节点的信息 + postfix=`echo $label | awk -F'_' '{print $4}'` + if [[ $postfix =~ ^c[0-9]{0,2}$ ]]; then + center_name_en=$label + rip=`cat $data/ip.group | awk '$2=="'$center_name_en'"' | awk '{print $1}' | sort` + vip=`cat $data/ip.group | awk '$2=="'$center_name_en'"' | awk '{print $3}' | sort | uniq` + lvs_name=`cat $data/ip.group | awk '$2=="'$center_name_en'"' | awk '{print $8}' | sort | uniq | awk -F ',' '{print $1}'` + lvs=`cat $data/ip.group | awk '$2=="'$lvs_name'"' | awk '{print $1}' | sort` + resource=`cat $data/lakes | grep $center_name_en | awk '{print $6}' | sort | uniq` + center_name_cn=`cat $data/lakes | grep $center_name_en | awk '{print $11}' | sort | uniq` + + echo -e "$c_bp[$center_name_en: ${c_bg}RIP]$c_e" + echo $rip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'"&&"'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + echo + + echo -e "$c_bp[$center_name_en: ${c_bg}VIP]$c_e" + echo $vip | awk -F ',' '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + echo -e "$c_bp[$lvs_name: ${c_bg}IP]$c_e" + echo $lvs | awk '{for(i=1;i<=NF;i++) print " ", $i}' + echo + + echo -e "$c_by[$center_name_cn($center_name_en)所属资源池]$c_e" + echo $resource | awk '{for(i=1;i<=NF;i++) print " ", $i} END{print ""}' + + elif [[ $postfix =~ ^dns[0-9]{0,2}$ ]]; then + dns_label=$label + dnsip=`cat $data/ip.group | awk '$2=="'$dns_label'"' | awk '{print $1}' | sort` + dnsrip=`cat $data/ip.group | awk '$2=="'$dns_label'"' | awk '{print $1}' | sort` + dnsvip=`cat $data/ip.group | awk '$2=="'$dns_label'"' | awk '{print $3}' | sort | uniq` + + echo -e "$c_bp[$dns_label: ${c_bg}RIP]$c_e" + echo $dnsip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + echo -e "$c_bp[$dns_label: ${c_bg}VIP]$c_e" + echo $dnsvip | awk -F ',' '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + elif [[ $postfix =~ ^e[0-9]{0,2}$ ]]; then + edge_name_en=$label + rip=`cat $data/ip.group | awk '$2=="'$edge_name_en'"' | awk '{print $1}' | sort` + vip=`cat $data/ip.group | awk '$2=="'$edge_name_en'"' | awk '{print $3}' | sort | uniq` + lvs_name=`cat $data/ip.group | awk '$2=="'$edge_name_en'"' | awk '{print $8}' | sort | uniq | awk -F ',' '{print $1}'` + lvs=`cat $data/ip.group | awk '$2=="'$lvs_name'"' | awk '{print $1}' | sort` + resource=`cat $data/lakes | grep $edge_name_en | awk '{print $6}' | sort | uniq` + edge_name_cn=`cat $data/lakes | grep $edge_name_en | awk '{print $11}' | sort | uniq` + + echo -e "$c_bp[$edge_name_en: ${c_bg}RIP]$c_e" + echo $rip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + echo + + echo -e "$c_bp[$edge_name_en: ${c_bg}VIP]$c_e" + echo $vip | awk -F ',' '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + echo -e "$c_bp[$lvs_name: ${c_bg}IP]$c_e" + echo $lvs | awk '{for(i=1;i<=NF;i++) print " ", $i}' + echo + + echo -e "$c_by[$edge_name_cn($edge_name_en)所属资源池]$c_e" + echo $resource | awk '{for(i=1;i<=NF;i++) print " ", $i} END{print ""}' + + elif [[ $postfix =~ ^lvs[0-9]{0,2}$ ]]; then + lvs_name=$label + lvs=`cat $data/ip.group | awk '$2=="'$lvs_name'"' | awk '{print $1}' | sort` + level_unknown=`cat $data/ip.group | awk '$8 ~ "'$lvs_name'"' | awk '{print $2}' | sort | uniq` + for unknown_en in $level_unknown; do + rip=`cat $data/ip.group | awk '$8 ~ "'$lvs_name'" && $2 == "'$unknown_en'"' | awk '{print $1}' | sort` + vip=`cat $data/ip.group | awk '$8 ~ "'$lvs_name'" && $2 == "'$unknown_en'"' | awk '{print $3}' | sort | uniq` + resource=`cat $data/lakes | grep $unknown_en | awk '{print $6}' | sort | uniq` + unknown_cn=`cat $data/lakes | grep $unknown_en | awk '{print $11}' | sort | uniq` + + echo -e "$c_bp[$unknown_en: ${c_bg}RIP]$c_e" + echo $rip | awk '{for(i=1;i<=NF;i++) print " ", $i}' + echo + + echo -e "$c_bp[$unknown_en: ${c_bg}VIP]$c_e" + echo $vip | awk -F ',' '{for(i=1;i<=NF;i++) print " ", $i}' + + echo -e "$c_by[$unknown_cn($unknown_en)所属资源池]$c_e" + echo $resource | awk '{for(i=1;i<=NF;i++) print " ", $i} END{print ""}' + done + echo -e "$c_bp[$lvs_name: ${c_bg}IP]$c_e" + echo $lvs | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + echo + + elif [[ $postfix =~ ^m[0-9]{0,2}$ ]]; then + mgt_label=$label + mgtip=`cat $data/ip.group | awk '$2=="'$mgt_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$mgt_label: ${c_bg}IP]$c_e" + echo $mgtip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + echo + + elif [[ $postfix =~ ^mysql[0-9]{0,2}$ ]]; then + mysql_label=$label + mysqlip=`cat $data/ip.group | awk '$2=="'$mysql_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$mysql_label: ${c_bg}IP]$c_e" + echo $mysqlip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + elif [[ $postfix =~ ^n[0-9]{0,2}$ ]]; then + nation_name_en=$label + rip=`cat $data/ip.group | awk '$2=="'$nation_name_en'"' | awk '{print $1}' | sort` + vip=`cat $data/ip.group | awk '$2=="'$nation_name_en'"' | awk '{print $3}' | sort | uniq` + lvs_name=`cat $data/ip.group | awk '$2=="'$nation_name_en'"' | awk '{print $8}' | sort | uniq | awk -F ',' '{print $1}'` + lvs=`cat $data/ip.group | awk '$2=="'$lvs_name'"' | awk '{print $1}' | sort` + resource=`cat $data/lakes | grep $nation_name_en | awk '{print $6}' | sort | uniq` + nation_name_cn=`cat $data/lakes | grep $nation_name_en | awk '{print $11}' | sort | uniq` + + echo -e "$c_bp[$nation_name_en: ${c_bg}RIP]$c_e" + echo $rip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + echo + + echo -e "$c_bp[$nation_name_en: ${c_bg}VIP]$c_e" + echo $vip | awk -F ',' '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + echo -e "$c_bp[$lvs_name: ${c_bg}IP]$c_e" + echo $lvs | awk '{for(i=1;i<=NF;i++) print " ", $i}' + echo + + echo -e "$c_by[$nation_name_cn($nation_name_en)所属资源池]$c_e" + echo $resource | awk '{for(i=1;i<=NF;i++) print " ", $i} END{print ""}' + + elif [[ $postfix =~ ^prets[0-9]{0,2}$ ]]; then + prets_label=$label + pretsip=`cat $data/ip.group | awk '$2=="'$prets_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$prets_label: ${c_bg}IP]$c_e" + echo $pretsip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + elif [[ $postfix =~ ^pretw[0-9]{0,2}$ ]]; then + pretw_label=$label + pretwip=`cat $data/ip.group | awk '$2=="'$pretw_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$pretw_label: ${c_bg}IP]$c_e" + echo $pretwip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + elif [[ $postfix =~ ^redis[0-9]{0,2}$ ]]; then + redis_label=$label + redisip=`cat $data/ip.group | awk '$2=="'$redis_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$redis_label: ${c_bg}IP]$c_e" + echo $redisip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + elif [[ $postfix =~ ^testts[0-9]{0,2}$ ]]; then + testts_label=$label + testtsip=`cat $data/ip.group | awk '$2=="'$testts_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$testts_label: ${c_bg}IP]$c_e" + echo $testtsip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + elif [[ $postfix =~ ^ts[0-9]{0,2}$ ]]; then + ts_label=$label + tsip=`cat $data/ip.group | awk '$2=="'$ts_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$ts_label: ${c_bg}IP]$c_e" + echo $tsip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + elif [[ $postfix =~ ^tw[0-9]{0,2}$ ]]; then + tw_label=$label + twip=`cat $data/ip.group | awk '$2=="'$tw_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$tw_label: ${c_bg}IP]$c_e" + echo $twip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + elif [[ $postfix =~ ^uatts[0-9]{0,2}$ ]]; then + uatts_label=$label + uattsip=`cat $data/ip.group | awk '$2=="'$uatts_label'"' | awk '{print $1}' | sort` + + echo -e "$c_bp[$uatts_label: ${c_bg}IP]$c_e" + echo $uattsip | awk '{for(i=1;i<=NF;i++) if($i=="'$ipvx'" && "'$flg'"==0) print " \033[1;3;36m", $i, "\033[0m"; else print " ", $i}' + + else + echo -e "${c_br}${ipvx}${c_e}不属于我司节点,如有误判,请联系fanmf11@chinatelecom.cn。\n" + exit 92 + fi + done +} + +function ip_search() { + # 判断如果是IPv4,在判断是否合法 + if [[ $tbd =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then + ip_1234=`echo $tbd | awk -F '.' '{print $1, $2, $3, $4}'` + for num in $ip_1234; do + if [[ $num -gt 255 ]]; then + echo -e "${c_br}非法,请输入有效的IPv4地址。${c_e}" + usage + fi + done + + isInner=`echo $ip_1234 | awk '{print $1}'` + if [[ $isInner == '192' ]]; then + echo -e "${c_br}$tbd是内网IP,非法,请输入有效的外网IPv4地址。${c_e}" + usage + fi + flg=0 + ipvx_check $tbd $flg + + # 判断如果是IPv6(粗略的匹配规则,最短11,最长39,包含数字大小写字母以及英文冒号) + elif [[ $tbd =~ ^[0-9a-fA-F:]{11,39}$ ]]; then + flg=0 + ipvx_check $tbd $flg + # 判断如果是节点英文标签格式 + elif [[ $tbd =~ ^(ct|cu|cm|bgp|ctbgp|cmbgp|cubgp|as|eu|sa|na|cbn|cern)_[a-z]{2,3}_[a-z]{2,20}[0-9]{1,2}_(c|dns|e|lvs|m|mysql|n|prets|pretw|redis|testts|ts|tw|uatts1)[0-9]{0,2}$ ]]; then + anyip=`cat $data/ip.group | awk '$2=="'$tbd'"' | head -n 1 | awk '{print $1}'` + if [[ $anyip == '' ]]; then + echo -e "${c_br}${tbd}${c_e}不属于我司节点,如有误判,请联系fanmf11@chinatelecom.cn。" + usage + exit 90 + fi + flg=1 + ipvx_check $anyip $flg + + # 剩余的情况一律归结为中文标签格式 + else + # 一个中文标签可能会对应着多个不同的节点 + label_multi=`cat $data/lakes | awk '$11=="'$tbd'" {print $1}' | sort | uniq` + if [[ $label_multi == '' ]]; then + echo -e "${c_br}${tbd}${c_e}不属于我司节点,如有误判,请联系fanmf11@chinatelecom.cn。" + usage + exit 91 + fi + flg=2 + for label_single in $label_multi; do + anyip=`cat $data/ip.group | awk '$2=="'$label_single'"' | head -n 1 | awk '{print $1}'` + if [[ $anyip != '' ]]; then + ipvx_check $anyip $flg + else + echo -e "${c_br}${label_single}节点存在,但是无法找到其下IP,可使用rip命令尝试再次查询。${c_e}\n" + fi + done + fi +} + +function ip_inner() { + > res.log + let number=`cat ips.log | tr -d ' ' | grep -Ev ']' | sed 's/^$/#/g' | awk 'BEGIN{RS="#"} {print NR}' | tail -1` + cat ips.log | tr -d ' ' | grep -Ev ']' | sed 's/^$/#/g' | awk 'BEGIN{RS="#"} {for(i=1;i<=NR;i++) if(i==NR) print $0 > i}' + for i in `seq $number`; do + cat $i | fgrep -q "$tbd" + [[ $? -ne 0 ]] && continue + cat $i | grep -Eo "[0-9a-fA-F:]{11,39}" > ip$i + cat $i | grep -Eo "([0-9]{1,3}\.){3}[0-9]{1,3}" >> ip$i + # 将每一块的IP重新放回文件i,并将结果追加到res.log + cat ip$i > $i && cat ip$i >> res.log + done + ip_list=`cat res.log | sort | uniq` + for ipy in $ip_list; do + echo $ipy | grep -Eq "[0-9a-fA-F:]{11,39}" + if [[ $? -eq 0 ]]; then + echo "跳板机无IPv6出口,暂不支持获取IPv6主机内网IP..." > inner_$ipy.log + else + ssh -o ConnectTimeout=30 $ipy "hostname; /usr/sbin/ifconfig | grep 'inet 192'" > inner_$ipy.log 2>&1 & + fi + done + wait + + echo '-----------------------------------------------------------------------------------------' + printf "${c_bic}%-25s%-20s%-50s\n${c_e}" "IP" "Inner IP" "Hostname" + for ipy in $ip_list; do + cat inner_$ipy.log |grep -iq 'timed out' + res1=$? + cat inner_$ipy.log |grep -iq 'closed by' + res2=$? + cat inner_$ipy.log |grep -iq 'IPv6' + res3=$? + if [[ $res1 -eq 0 ]]; then + printf "${c_bir}%-25s%-20s%-40s${c_e}\n" $ipy "===========>" "Connection timed out during banner exchange" + continue + elif [[ $res2 -eq 0 ]]; then + printf "${c_bir}%-25s%-20s%-40s${c_e}\n" $ipy "===========>" "ssh_exchange_identification: Connection closed by remote host" + continue + elif [[ $res3 -eq 0 ]]; then + printf "${c_bir}%-25s%-20s%-40s${c_e}\n" $ipy "===========>" "IPv6的机器没有加白,暂不支持获取IPv6主机内网IP..." + continue + else + host=`cat inner_$ipy.log | fgrep 'in.ctcdn.cn'` + [[ $host == '' ]] && host='-' + inner_ip=`cat inner_$ipy.log | grep 'inet 192' | awk '{print $2}'` + [[ $inner_ip == '' ]] && inner_ip='-' + printf "%-25s%-20s%-40s\n" $ipy $inner_ip $host + fi + done + printf "${c_bic}%-25s%-20s%-50s\n${c_e}" "IP" "Inner IP" "Hostname" + echo '-----------------------------------------------------------------------------------------' +} + +toolbox='/usr/local/script/fanmf11/' +data='/usr/local/script/fanmf11/data' +label_single='' +TS=`date +%s%N` +host=`whoami` # * 判断执行用户 +trash="/usr/local/script/fanmf11/trash/$host/$TS" # * 每个用户的临时文件存放处 + +if [[ -d $trash ]]; then + echo -e "${c_br}对于同一个用户,同一时间只能运行一个实例,请重新运行...${c_e}" + exit 245 +else + mkdir -p $trash + cd $trash && cd .. + docs=`ls` + for doc in $docs; do + [[ -f $doc ]] && rm -rf $doc + done + folders=`ls -t` + while [[ `echo $folders | awk '{print NF}'` -gt 29 ]]; do + folder=`ls -t | tail -1` + rm -rf $folder + folders=`ls -t` + done + cd $trash && touch ips +fi + +# 参数个数必须是一个,并把第一个参数赋值给tbd +if [[ $# -eq 1 ]]; then + tbd=$1 + ip_search +elif [[ $# -eq 2 && $1 == '-m' ]]; then + tbd=$2 + ip_search > ips.log 2>&1 + [[ $? -ne 0 ]] && { cat ips.log; exit 211; } + # 判断如果是IPv4,在判断是否合法 + if [[ $tbd =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then + ip_inner + else + echo -e "${c_br}输入只能是IPv4,不接受其他格式的内容。${c_e}\n" + exit 112 + fi +else + usage +fi + + + diff --git a/old/ctc/jaydiff b/old/ctc/jaydiff new file mode 100755 index 0000000..3d8b2f0 Binary files /dev/null and b/old/ctc/jaydiff differ diff --git a/old/ctc/logcombo.awk b/old/ctc/logcombo.awk new file mode 100644 index 0000000..2fb3ad0 --- /dev/null +++ b/old/ctc/logcombo.awk @@ -0,0 +1,58 @@ +#!/usr/bin/awk -f +# lap : 1--overlap 0--non-overlap +# comp: 1--access 0--origin +# pos : 4 | 10 | 11 + +BEGIN { + if(f00=="") { t00=1000; } else { t00=4 } # 状态码 + if(f01=="") { t01=6; f01s=0; f01e=1000; } else { t01=6 } # 边缘响应时间 + if(f02=="") { t02=9; f02s=0; f02e=1000; } else { t02=9 } # 边缘首包 + if(f03=="") { t03=1000; } else { t03=10 } # 边缘错误码 + if(f04=="") { t04=1000; } else { t04=11 } + if(f05=="") { t05=1000; } else { t05=13 } + if(f06=="") { t06=1000; } else { t06=15 } + if(f07=="") { t07=1000; } else { t07=16 } + if(f08=="") { t08=1000; } else { t08=18 } + if(f09=="") { t09=1000; } else { t09=24 } + if(f10=="") { t10=26; f10s=0; f10e=1000; } else { t10=26 } + if(f11=="") { t11=1000; } else { t11=28 } + if(f13=="") { t13=1000; } else { t13=33 } + if(f14=="") { t14=1000; } else { t14=34 } + + if(f28=="") { t28=1000; f28s=0; f28e=1000; } else { t28=5 } + if(f30=="") { t30=1000; } else { t30=7 } + if(f31=="") { t31=1000; } else { t31=8 } + if(f32=="") { t32=1000; } else { t32=10 } + if(f33=="") { t33=1000; } else { t33=11 } + if(f34=="") { t34=1000; } else { t34=46 } + if(f37=="") { t37=1000; f37s=0; f37e=1000; } else { t37=4 } + if(f38=="") { t38=1000; f38s=0; f38e=1000;} else { t38=6 } + + if(comp==1) { idn=56 } else if(comp==0) { idn=50 } + number=0 + +} + +{ + if(comp==0) { + tt28 = $t28 / 1000 + tt37 = $t37 / 1000 + tt38 = $t38 / 1000 + } + if(comp==1 && atype=="combo" && f00==$t00 && f01s<=$t01 && $t01<=f01e && f02s<=$t02 && $t02<=f02e && f03==$t03 && f04==$t04 && f05==$t05 && f06==$t06 && f07==$t07 && f08==$t08 && f09==$t09 && f10s<=$t10 && $t10<=f10e && f11==$t11 && f13==$t13 && f14==$t14 && acc==$idn) { + number++ + } else if(comp==1 && atype=="logs" && f00==$t00 && f01s<=$t01 && $t01<=f01e && f02s<=$t02 && $t02<=f02e && f03==$t03 && f04==$t04 && f05==$t05 && f06==$t06 && f07==$t07 && f08==$t08 && f09==$t09 && f10s<=$t10 && $t10<=f10e && f11==$t11 && f13==$t13 && f14==$t14 && acc==$idn) { + print $0 + } else if(comp==0 && atype=="combo" && f28s<=tt28 && tt28<=f28e && f30==$t30 && f31==$t31 && f32==$t32 && f33==$t33 && f34==$t34 && f37s<=tt37 && tt37<=f37e && f38s<=tt38 && tt38<=f38e && acc==$idn) { + number++ + } else if(comp==0 && atype=="logs" && f28s<=tt28 && tt28<=f28e && f30==$t30 && f31==$t31 && f32==$t32 && f33==$t33 && f34==$t34 && f37s<=tt37 && tt37<=f37e && f38s<=tt38 && tt38<=f38e && acc==$idn) { + print $0 + } +} + +END { + if(atype=="combo") + printf "%-18s%-8s%-s\n", "符合上述条件的日志占比 -- ", number, number/NR*100"%" +} + + diff --git a/old/ctc/logcommon.awk b/old/ctc/logcommon.awk new file mode 100644 index 0000000..f714b15 --- /dev/null +++ b/old/ctc/logcommon.awk @@ -0,0 +1,25 @@ +#!/usr/bin/awk -f +# lap : 1--overlap 0--non-overlap +# comp: 1--access 0--origin +# code: +# non-blank -- specify status code +# blank -- not specify status code +BEGIN { + if(code!="" && comp==1) { sc=4 } + if(code!="" && comp==0) { sc=11 } + if(code=="") { sc=1000 } + if(comp==1) { ac=56 } + if(comp==0) { ac=50 } +} + +{ + if(acc==$ac && code==$sc) + res[$pos]++ +} + +END { + for(i in res) + printf "%-12s%-8s%-s\n", res[i]/NR*100"%", res[i], i +} + + diff --git a/old/ctc/logqps.awk b/old/ctc/logqps.awk new file mode 100644 index 0000000..40ae59d --- /dev/null +++ b/old/ctc/logqps.awk @@ -0,0 +1,27 @@ +#!/usr/bin/awk -f +# lap : 1--overlap 0--non-overlap +# comp: 1--access 0--origin +# pos : 4 | 10 | 11 +BEGIN { + number=0 + if(code!="" && comp==1) { sc=4 } + if(code!="" && comp==0) { sc=11 } + if(code=="") { sc=1000 } + if(comp==1) { ac=56 } + if(comp==0) { ac=50 } +} + +{ + if(acc==$ac && code==$sc) + number++ +} + +END { + if(code != "") + # 如果百分比不是100%说明这个节点有重叠域名访问日志 + printf "%-8s%-15s%-s\n", code, number, number/NR*100"%" + else + printf "%-8s%-15s%-s\n", "QPS", number, number/NR*100"%" +} + + diff --git a/old/ctc/logs.sh b/old/ctc/logs.sh new file mode 100644 index 0000000..4a5a69c --- /dev/null +++ b/old/ctc/logs.sh @@ -0,0 +1,1569 @@ +#!/bin/bash + +function showfunc() { + # 支持的功能项 + opt00="|00. Status Code" + opt01="|01. Respond Time" + opt02="|02. First Dur" + opt03="|03. Error Code" + opt04="|04. Remote IP" + opt05="|05. Client IP" + opt06="|06. Method" + opt07="|07. Protocol" + opt08="|08. TOP URL" + opt09="|09. Upstream Addr" + opt10="|10. Upstream RT" + opt11="|11. Origin IP" + opt12="|12. Combo" + opt13="|13. Referer" + opt14="|14. UA" + opt15="|15. Log" + opt16="|16. QPS" + opt17="|17. -" + opt18="|18. -" + opt19="|19. -" + opt20="|20 -" + opt21="|21 -" + opt22="|22 -" + opt23="|23 -" + opt24="|24 -" + opt25="|25 -" + opt26="|26 -" + opt27="|27 -" + opt28="|28. First Dur" + opt29="|29. Combo" + opt30="|30. Client(NG) IP" + opt31="|31. Origin IP" + opt32="|32. Source SC" + opt33="|33. Status Code" + opt34="|34. Origin URL" + opt35="|35. QPS" + opt36="|36. Log" + opt37="|37. Source CT" + opt38="|38. Source RT" + opt39="|39. -" + opt40="|40. -" + opt41="|41. -" + optA="ACCESS --> " + optO="ORIGIN --> " + fmt="${c_bib}%-10s${c_e}%-17s%-18s%-19s%-17s%-15s%-17s%-19s\n" + printf $fmt "$optA" "$opt00" "$opt01" "$opt02" "$opt03" "$opt04" "$opt05" "$opt06" + printf $fmt "$optA" "$opt07" "$opt08" "$opt09" "$opt10" "$opt11" "$opt12" "$opt13" + printf $fmt "$optA" "$opt14" "$opt15" "$opt16" "$opt17" "$opt18" "$opt19" "$opt20" + echo '------------------------------------------------------------------------------------------------------------------------------------' + printf $fmt "$optO" "$opt28" "$opt29" "$opt30" "$opt31" "$opt32" "$opt33" "$opt34" + printf $fmt "$optO" "$opt35" "$opt36" "$opt37" "$opt38" "$opt39" "$opt40" "$opt41" +} + + +function usage() { + echo -e "${c_bg}本工具实现指定域名&节点的${c_bc}一小时粒度${c_bg}日志搜索相关功能:${c_e}\n" + showfunc && echo + echo -e "${c_by}调用方法 -- logs domain node/IP [0|1]${c_e}" + echo -e "${c_by} param 1 - 要查询的域名${c_e}" + echo -e "${c_by} param 2 - 必须是边缘/父/中心的节点中英文名称或者IP${c_e}" + echo -e "${c_by} param 3 - 可选,存在域名在平台显示为重叠实际却在通用资源池的情况,日志不会带accid,或者反之,可以使用1或0强制指定带或不带accid${c_e}" + # echo -e "${c_by}易混淆解释:${c_e}" + # echo -e "${c_by} Upstream Addr -- 后端代理服务器IP和端口${c_e}" + # echo -e "${c_by} Source SC ------ 源站返回的状态码${c_e}" + # echo -e "${c_by} Source CT ------ 与源站建立连接的时间${c_e}" + # echo -e "${c_by} Source RT ------ 源站的响应时间${c_e}" + # echo -e "${c_by} Remote IP ------ 过滤节点VIP访问次数,查看负载是否均衡${c_e}" + # echo -e "${c_by} Origin IP ------ 对于ACCESS日志,是回上层IP;对于ORIGIN日志,是回上层或回源IP${c_e}" + # echo -e "${c_by} Log ----------- 从每个RIP随机获取一条日志,可指定状态码${c_e}" + # echo -e "${c_by} QPS ------------ 获取每个RIP节点指定时间内的访问次数,而非真正QPS,可自行手动计算${c_e}" + echo + echo -e "${c_br}对于输出百分比的部分,除了状态码类以及时间类的功能,其他选项都是过滤了TOP10,所以可能会出现比例之和小于1的情况;另外,当前仅支持常规CDN以及L1-7格式的日志搜寻,暂不支持直播/安全/quic等,使用过程中遇到任何问题,可以联系fanmf11@chinatelecom.cn反馈${c_e}" + exit 222 +} + +function logfile() { + if [[ -d $trash ]]; then + echo -e "${c_br}对于同一个用户,同一时间只能运行一个实例,请重新运行...${c_e}" + exit 245 + else + mkdir -p $trash + cd $trash && cd .. + docs=`ls` + for doc in $docs; do + [[ -f $doc ]] && rm -rf $doc + done + folders=`ls -t` + while [[ `echo $folders | awk '{print NF}'` -gt 29 ]]; do + folder=`ls -t | tail -1` + rm -rf $folder + folders=`ls -t` + done + cd $trash && touch logs + fi +} + +function onCtrlC () { + # while capture Ctrl+C, kill all background processes silently and exit + exec 3>&2 # 3 is now a copy of 2 + exec 2> /dev/null # 2 now points to /dev/null + sleep 1 # sleep to wait for process to die + exec 2>&3 # restore stderr to saved + exec 3>&- # close saved version + echo + echo -e "${c_bir}Ctrl+C is captured, exiting...\n${c_e}" + exit 101 +} + +function initial() { + # -------------------------------------------------------------------------------- + # 入参正确性检测 + let numOP=$# # number of parameter + OP="prefix "$@ # do a prefix cause '-' char may damage echo command + domain=`echo $OP | awk '{print $2}'` # get first param domain + label=`echo $OP | awk '{print $3}'` # get second param nodename or ip + accsw=`echo $OP | awk '{print $4}'` # get thied param, forcely acc var, 1 -- acc=reqid, 0 -- acc='-' + [[ $numOP -ne 2 && $numOP -ne 3 ]] && usage || logfile + # -------------------------------------------------------------------------------- + # 检查域名是否在平台注册 + res=`cat $data/domain.list | grep -w "$domain"` + [[ $res == '' ]] && { echo -e "${c_br}该域名未在天翼平台配置,一个小时内新增的域名无法查询,退出...${c_e}"; exit 247; } + + # -------------------------------------------------------------------------------- + # 获取正确的label + ips $label > ips.log 2>&1 + # 如果不是天翼IP,否则重新回到工作目录$trash + [[ $? -ne 0 ]] && { cd $trash; cat ips.log; exit 114; } || cd $trash + + # -------------------------------------------------------------------------------- + # 检查是否是重叠域名,并获取正确的解析组 + infos --map $domain $TS + [[ $? -eq 205 || $? -eq 231 ]] && exit 205 + + cd $trash + if [[ `cat map.log | wc -l` -eq 1 ]]; then + map=`cat map.log` + else + maps=`cat map.log | sort | uniq` + count=1 && > remap.log + for map in $maps; do + echo $count": "$map | tee -a remap.log + let count=count+1 + done + echo -ne "${c_bg}存在分区域解析,需确定解析组名称(默认是1):${c_e}\n" + read -t 60 imap + [[ $? -ne 0 ]] && { echo -e "${c_br}60s内无任何输入,退出...${c_e}\n"; exit 102; } + # do a check to see if isp is correct or not + [[ $imap == '' ]] && let imap=1 + map=`cat remap.log | awk -F ':' -v imap=$imap '$1==imap {print $2}'` + [[ $map == '' ]] && { echo -e "${c_br}请输入正确的序号,退出...${c_e}"; exit 165; } + fi + + getlastcover $map > map.log + cat map.log | grep -q 'can not find sys_id' + [[ $? -eq 0 ]] && { echo -e "${c_br}该解析组未在平台配置,退出..."; exit 163; } + + accid=`cat info.log | awk -F ':' '$1==3 {print $2}'` + overlap=`cat $data/domain.list | grep $accid | grep $domain | awk -F ',' '{print $17}'` + [[ $overlap == '"是"' ]] && overlap=1 || overlap=0 + # 因为有些域名系统显示是重叠域名,实际日志并没有带accid,因为用的通用资源池 + if [[ $accsw != "" && $accsw -eq 0 ]]; then + ida='-' + elif [[ $accsw != "" && $accsw -eq 1 ]]; then + ida=$accid + elif [[ $overlap -eq 0 ]]; then + ida='-' + elif [[ $overlap -eq 1 ]]; then + ida=$accid + fi + + # -------------------------------------------------------------------------------- + # 检查是否是英文标签的格式,是的话,保持label原值,否则做进一步操作 + echo $label | grep -Eq "(ct|cu|cm|bgp|ctbgp|cmbgp|cubgp|as|eu|sa|na|cbn|cern)_[a-z]{2,3}_[a-z]{2,20}[0-9]{1,2}_(c|e|n)[0-9]{0,2}" + if [[ $? -eq 0 ]]; then + label=$label + else + # 获取所有相关英文标签 + labels=`cat ips.log | grep -Eo "(ct|cu|cm|bgp|ctbgp|cmbgp|cubgp|as|eu|sa|na|cbn|cern)_[a-z]{2,3}_[a-z]{2,20}[0-9]{1,2}_(c|e|n)[0-9]{0,2}" | sort | uniq` + [[ $labels == "" ]] && { echo -e "${c_bir}请输入正确的边缘/父层/中心节点的IP或者中英文节点名称,退出...${c_e}"; exit 232; } + # 如果有多个匹配,则让用户确认是哪个 + if [[ `echo $labels | awk '{print NF}'` -gt 1 ]]; then + for item in $labels; do + cat map.log | grep -wq $item + [[ $? -eq 0 ]] && echo -e "${c_biy} - $item${c_e}" || echo " - $item" + done + echo -e "${c_bp}输入的 ${c_by}$label${c_bp} 边缘节点中有两个组,请确认具体是哪个:${c_e}" + read -t 60 label_input + # 判断60s内无输入,则自动退出 + [[ $? -ne 0 ]] && { echo -e "${c_br}60s内无任何输入,退出...\n${c_e}"; exit 116; } + # 判断输入信息是否是正确的 + echo $labels | grep -wq $label_input + [[ $? -ne 0 ]] && { echo -e "${c_br}需要从如上选择正确的边缘节点信息,请重新运行,退出...\n${c_e}"; exit 117; } + label=$label_input + # 否则,获取label + else + label=$labels + fi + fi + cat map.log | grep -wq $label + if [[ $? -ne 0 ]]; then + echo -e "${c_by}${label}${c_bc}不在$domain的解析组内,判断是否是父层节点(y/N):${c_e}" + read -t 60 isc + # 判断60s内无输入,则自动退出 + [[ $? -ne 0 ]] && { echo -e "${c_br}60s内无任何输入,退出...\n${c_e}"; exit 116; } + # 判断输入信息是否是正确的 + [[ $isc == 'n' || $isc == 'N' || $isc == 'no' || $isc == 'No' || $isc == 'NO' ]] && exit 234 + fi + + # -------------------------------------------------------------------------------- + # 获取rip_list + ips $label > ips.log 2>&1 && cd $trash + rip_list=`cat ips.log | sed -n '/RIP/, /VIP/p' | grep -Eo "([0-9]{1,3}\.){3}[0-9]{1,3}" | sort | uniq` + + # 60s时间接收输入:要查询的时间 + echo -e "${c_bg}请输入要查询的reqID生成时间,格式为yyyymmddHH(默认当前 - $(date +%Y%m%d%H)): ${c_e} " + read -t 60 time_range + [[ $? -ne 0 ]] && { echo -e "${c_br}60s内无任何输入,退出...\n${c_e}"; exit 105; } + time_check + type_inp +} + +#======================================================================================= +# 功能:获取查询指标 +# 入参:None +# 出参:itype +function type_inp() { + # 60s时间接收输入:要查询的类型 + showfunc + echo -e "${c_bg}请输入要查询的指标(默认00): ${c_e}" + read -t 60 itype + [[ $? -ne 0 ]] && { echo -e "${c_br}60s内无任何输入,退出...\n${c_e}"; exit 122; } + [[ $itype == '' ]] && itype='00' + + + # 60s时间接收输入:要查询的类型 + echo -e "1. 静态/下载/点播/全站(default - v03/ov06)" + echo -e "2. 直播(ACC1/2/3)" + echo -e "3. 安全" + echo -e "4. quic" + echo -e "5. L1/L2/L3/L4/L5/L6/L7(e.g. 查询L3,则输入5.3)" + echo -e "${c_bg}请输入要查询的业务类型(默认1,目前仅支持1/5): ${c_e}" + read -t 60 ptype + [[ $? -ne 0 ]] && { echo -e "${c_br}60s内无任何输入,退出...\n${c_e}"; exit 122; } + [[ $ptype == '' ]] && ptype='1' + # 根据业务类型,指定前缀 + [[ $ptype == '1' ]] && prefix='' + [[ $ptype == '5.1' ]] && prefix='L1_' + [[ $ptype == '5.2' ]] && prefix='L2_' + [[ $ptype == '5.3' ]] && prefix='L3_' + [[ $ptype == '5.4' ]] && prefix='L4_' + [[ $ptype == '5.5' ]] && prefix='L5_' + [[ $ptype == '5.6' ]] && prefix='L6_' + [[ $ptype == '5.7' ]] && prefix='L7_' +} + +#======================================================================================= +# 功能:检查输入的时间范围是否符合格式要求:14天内,不能是未来时间,10位数字 +# 入参:time_range +# 出参:current, year, month, day, hour, time_range +function time_check() { + # 如果入参 time_range 的值是空,或者说函数没有入参 + if [[ $time_range == '' ]]; then + time_range=`date +%Y%m%d%H` + year=${time_range:0:4} + month=${time_range:4:2} + day=${time_range:6:2} + hour=${time_range:8:2} + current='yes' + return 0 + fi + + # 检查入参是否正确:长度,表示的时间范围等 + [[ ! $time_range =~ ^[0-9]{10}$ ]] && { echo -e "${c_br}请输入正确的时间格式,退出...\n${c_e}"; exit 106; } + # 验证入参是10天以内的时间范围 + now=`date +%s` + # 准备工作,后续要用 + year=${time_range:0:4} + month=${time_range:4:2} + day=${time_range:6:2} + hour=${time_range:8:2} + # 将入参转换为秒 + previous=`date -d "$year-$month-$day $hour:00:00" +"%s"` + # 计算当前时间 - 入参时间 + let range_s=now-previous + let range_d=range_s/86400 + # 如果是14天以外的入参时间,则不可查 + [[ $range_d -gt 10 ]] && { echo -e "${c_br}只能查找最近10天以内的日志记录,退出...\n${c_e}"; exit 107; } + # 判断 time_range 是否是当前时间,并用 current 来标识,默认是当前,即 current = yes + [[ $time_range == `date +%Y%m%d%H` ]] && current='yes' || current='no' +} + +function is_sc_specified() { + # 是否指定状态码 + echo -e "${c_bb}直接回车默认过滤所有状态码,是否需要指定(000~999):${c_e}" + read -t 60 sc + # 判断60s内无输入,则自动退出 + [[ $? -ne 0 ]] && { echo -e "${c_br}60s内无任何输入,退出...\n${c_e}"; exit 116; } + # 判断输入信息是否是正确的,当输入的状态码是空,则不做过滤,全部输出 + + if [[ $sc != "" ]]; then + echo $sc | grep -Eq "[0-9]{1,3}" + [[ $? -ne 0 ]] && { echo -e "${c_br}需要指定正确的状态码值,退出...\n${c_e}"; exit 117; } + fi +} + +function cdn_access_common() { + field=$1 + is_sc_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索core_access.log,设定ssh连接超时时长为CT + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log | grep $domain; + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logcommon.awk | sort -nk2 | \ + sort -nk2 | tail > access_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain; + zcat $cdn_access_log/$year$month$day/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logcommon.awk | sort -nk2 | \ + sort -nk2 | tail > access_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat access_$rip && echo + done +} + +function cdn_access_time() { + field=$1 + is_sc_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索core_access.log,设定ssh连接超时时长为CT + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log | grep $domain; + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logtime.awk | \ + sort -nk3 > access_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain; + zcat $cdn_access_log/$year$month$day/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logtime.awk | \ + sort -nk3 > access_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat access_$rip && echo + done +} + +function cdn_access_sc() { + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索core_access.log,设定ssh连接超时时长为CT + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log | grep $domain; + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v acc="$ida" -v pos=4 -f $toolbox/logsc.awk | \ + sort -nk1 | column -t > access_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain; + zcat $cdn_access_log/$year$month$day/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v acc="$ida" -v pos=4 -f $toolbox/logsc.awk | \ + sort -nk1 | column -t > access_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat access_$rip && echo + done +} + +function cdn_access_qps() { + is_sc_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索core_access.log,设定ssh连接超时时长为CT + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log | grep $domain; + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v code="$sc" -v acc="$ida" -f $toolbox/logqps.awk | \ + sort -nk1 | column -t > access_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain; + zcat $cdn_access_log/$year$month$day/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v code="$sc" -v acc="$ida" -f $toolbox/logqps.awk | \ + sort -nk1 | column -t > access_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat access_$rip && echo + done +} + +function is_access_condtion_specified() { + # 是否指定过滤条件 + echo -e "${c_bb}根据如上对应组件的过滤功能,可查询符合特定组合条件的日志占比,格式如下${c_e}" + echo -e "${c_big}00: 404" + echo -e "02: 0-1" + echo -e "03: 2" + echo -e "04: 59.56.177.149" + echo -e "06: Post" + echo -e "08: https://www.ctyun.cn" + echo -e "14: Chrome/54.0 (Windows NT 10.0)${c_e}" + echo -e "${c_bic}Ctrl + D${c_e}" + echo -e "${c_by}每行对应一个条件,Ctrl + D结束输入:${c_e}" + cat > combo.log + echo "" >> combo.log + echo -e "${c_bb}\n正在处理中...${c_e}" + + f00=""; f01=""; f02=""; f03=""; f04=""; f05=""; f06=""; f07=""; f08=""; f09=""; f10=""; f11=""; f13=""; + f14=""; f01s=""; f02s=""; f10s=""; f01e=""; f02e=""; f10e="" + while read line; do + [[ $line == "" ]] && continue + index=`echo $line | awk -F ':' '{print $1}'` + filter=${line:3} + [[ "$index" == '00' ]] && { f00=$filter; continue; } + [[ "$index" == '01' ]] && { f01=$filter; continue; } + [[ "$index" == '02' ]] && { f02=$filter; continue; } + [[ "$index" == '03' ]] && { f03=$filter; continue; } + [[ "$index" == '04' ]] && { f04=$filter; continue; } + [[ "$index" == '05' ]] && { f05=$filter; continue; } + [[ "$index" == '06' ]] && { f06=$filter; continue; } + [[ "$index" == '07' ]] && { f07=$filter; continue; } + [[ "$index" == '08' ]] && { f08=$filter; continue; } + [[ "$index" == '09' ]] && { f09=$filter; continue; } + [[ "$index" == '10' ]] && { f10=$filter; continue; } + [[ "$index" == '11' ]] && { f11=$filter; continue; } + [[ "$index" == '13' ]] && { f13=$filter; continue; } + [[ "$index" == '14' ]] && { f14=$filter; continue; } + echo -e "${c_bir}[Ignored]${c_e} -- $line" + done < combo.log + + # 处理时间类参数,只允许区间参数 + [[ $f01 != "" ]] && { f01s=`echo $f01 | awk -F '-' '{print $1}'`; f01e=`echo $f01 | awk -F '-' '{print $2}'`; } + [[ $f02 != "" ]] && { f02s=`echo $f02 | awk -F '-' '{print $1}'`; f02e=`echo $f02 | awk -F '-' '{print $2}'`; } + [[ $f10 != "" ]] && { f10s=`echo $f10 | awk -F '-' '{print $1}'`; f10e=`echo $f10 | awk -F '-' '{print $2}'`; } + echo +} + +function cdn_access_combo() { + is_access_condtion_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索core_access.log,设定ssh连接超时时长为CT + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log | grep $domain; + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v acc="$ida" -v f00="$f00" -v f01="$f01" -v f02="$f02" -v f03="$f03" \ + -v f04="$f04" -v f05="$f05" -v f06="$f06" -v f07="$f07" -v f08="$f08" -v f09="$f09" -v f10="$f10" -v f11="$f11" \ + -v f13="$f13" -v f14="$f14" -v f01s="$f01s" -v f01e="$f01e" -v f02s="$f02s" -v f02e="$f02e" -v f10s="$f10s" \ + -v f10e="$f10e" -v atype="combo" -f $toolbox/logcombo.awk | \ + sort -nk1 | column -t > access_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain; + zcat $cdn_access_log/$year$month$day/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v acc="$ida" -v f00="$f00" -v f01="$f01" -v f02="$f02" -v f03="$f03" \ + -v f04="$f04" -v f05="$f05" -v f06="$f06" -v f07="$f07" -v f08="$f08" -v f09="$f09" -v f10="$f10" -v f11="$f11" \ + -v f13="$f13" -v f14="$f14" -v f01s="$f01s" -v f01e="$f01e" -v f02s="$f02s" -v f02e="$f02e" -v f10s="$f10s" \ + -v f10e="$f10e" -v atype="combo" -f $toolbox/logcombo.awk | \ + sort -nk1 | column -t > access_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat access_$rip && echo + done +} + +function cdn_access_logs() { + is_access_condtion_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索core_access.log,设定ssh连接超时时长为CT + # 把搜索的结果放进access_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log | grep $domain; + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v acc="$ida" -v f00="$f00" -v f01="$f01" -v f02="$f02" -v f03="$f03" \ + -v f04="$f04" -v f05="$f05" -v f06="$f06" -v f07="$f07" -v f08="$f08" -v f09="$f09" -v f10="$f10" -v f11="$f11" \ + -v f13="$f13" -v f14="$f14" -v f01s="$f01s" -v f01e="$f01e" -v f02s="$f02s" -v f02e="$f02e" -v f10s="$f10s" \ + -v f10e="$f10e" -v atype="logs" -f $toolbox/logcombo.awk | \ + grep -Ev "$hour:(59|00):[0-5][0-9] \+0800" | head -n $((number % 1000)) | tail -n 1 > access_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_access_log/${prefix}core_access.log_*${time_range}* | grep $domain; + zcat $cdn_access_log/$year$month$day/${prefix}core_access.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=1 -v acc="$ida" -v f00="$f00" -v f01="$f01" -v f02="$f02" -v f03="$f03" \ + -v f04="$f04" -v f05="$f05" -v f06="$f06" -v f07="$f07" -v f08="$f08" -v f09="$f09" -v f10="$f10" -v f11="$f11" \ + -v f13="$f13" -v f14="$f14" -v f01s="$f01s" -v f01e="$f01e" -v f02s="$f02s" -v f02e="$f02e" -v f10s="$f10s" \ + -v f10e="$f10e" -v atype="logs" -f $toolbox/logcombo.awk | \ + grep -Ev "$hour:(59|00):[0-5][0-9] \+0800" | head -n $((number % 1000)) | tail -n 1 > access_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat access_$rip && echo + done +} + +function cdn_origin_common() { + field=$1 + is_sc_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索origin.log,设定ssh连接超时时长为CT + # 把搜索的结果放进origin_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log | grep $domain; + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logcommon.awk | \ + sort -nk2 | tail > origin_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain; + zcat $cdn_origin_log/$year$month$day/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logcommon.awk | \ + sort -nk2 | tail > origin_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat origin_$rip && echo + done +} + + +function cdn_origin_time() { + field=$1 + is_sc_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh进每一个rip,搜索origin.log,设定ssh连接超时时长为CT + # 把搜索的结果放进origin_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log | grep $domain; + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logtime.awk | \ + sort -nk3 > origin_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain; + zcat $cdn_origin_log/$year$month$day/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v code="$sc" -v acc="$ida" -v pos="$field" -f $toolbox/logtime.awk | \ + sort -nk3 > origin_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat origin_$rip && echo + done +} + +function cdn_origin_sc() { + [[ $1 == 'sc' ]] && field=11 + [[ $1 == 'ssc' ]] && field=10 + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索origin.log,设定ssh连接超时时长为CT + # 把搜索的结果放进origin_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log | grep $domain; + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v acc="$ida" -v pos="$field" -f $toolbox/logsc.awk | \ + sort -nk1 | column -t > origin_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain; + zcat $cdn_origin_log/$year$month$day/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v acc="$ida" -v pos="$field" -f $toolbox/logsc.awk | \ + sort -nk1 | column -t > origin_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat origin_$rip && echo + done +} + +function cdn_origin_qps() { + is_sc_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索origin.log,设定ssh连接超时时长为CT + # 把搜索的结果放进origin_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log | grep $domain; + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v code="$sc" -v acc="$ida" -f $toolbox/logqps.awk | \ + sort -nk1 | column -t > origin_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain; + zcat $cdn_origin_log/$year$month$day/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v code="$sc" -v acc="$ida" -f $toolbox/logqps.awk | \ + sort -nk1 | column -t > origin_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat origin_$rip && echo + done +} + +function is_origin_condtion_specified() { + # 是否指定过滤条件 + echo -e "${c_bb}根据如上对应组件的过滤功能,可查询符合特定组合条件的日志占比,格式如下${c_e}" + echo -e "${c_big}28: 0-1" + echo -e "31: 59.56.177.149" + echo -e "33: 404" + echo -e "34: https://www.ctyun.cn" + echo -e "${c_bic}Ctrl + D${c_e}" + echo -e "${c_by}每行对应一个条件,Ctrl + D结束输入:${c_e}" + cat > combo.log + echo "" >> combo.log + echo -e "${c_bb}\n正在处理中...${c_e}" + + f28=""; f30=""; f31=""; f32=""; f33=""; f34=""; f37=""; f38=""; f28s=""; f28e=""; f37s=""; f37e=""; f38s=""; f38e="" + while read line; do + [[ $line == "" ]] && continue + index=`echo $line | awk -F ':' '{print $1}'` + filter=${line:3} + [[ "$index" == '28' ]] && { f28=$filter; continue; } + [[ "$index" == '30' ]] && { f30=$filter; continue; } + [[ "$index" == '31' ]] && { f31=$filter; continue; } + [[ "$index" == '32' ]] && { f32=$filter; continue; } + [[ "$index" == '33' ]] && { f33=$filter; continue; } + [[ "$index" == '34' ]] && { f34=$filter; continue; } + [[ "$index" == '37' ]] && { f37=$filter; continue; } + [[ "$index" == '38' ]] && { f38=$filter; continue; } + echo -e "${c_bir}[Ignored]${c_e} -- $line" + done < combo.log + + # 处理时间类参数,只允许区间参数 + [[ $f28 != "" ]] && { f28s=`echo $f28 | awk -F '-' '{print $1}'`; f28e=`echo $f28 | awk -F '-' '{print $2}'`; } + [[ $f37 != "" ]] && { f37s=`echo $f37 | awk -F '-' '{print $1}'`; f37e=`echo $f37 | awk -F '-' '{print $2}'`; } + [[ $f38 != "" ]] && { f38s=`echo $f38 | awk -F '-' '{print $1}'`; f38e=`echo $f38 | awk -F '-' '{print $2}'`; } + echo +} + +function cdn_origin_combo() { + is_origin_condtion_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索origin.log,设定ssh连接超时时长为CT + # 把搜索的结果放进origin_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log | grep $domain; + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v acc="$ida" -v f28="$f28" -v f30="$f30" -v f31="$f31" -v f32="$f32" \ + -v f33="$f33" -v f34="$f34" -v f37="$f37" -v f38="$f38" -v f28s="$f28s" -v f28e="$f28e" -v f37s="$f37s" \ + -v f37e="$f37e" -v f38s="$f38s" -v f38e="$f38e" -v atype="combo" -f $toolbox/logcombo.awk | \ + sort -nk1 | column -t > origin_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain; + zcat $cdn_origin_log/$year$month$day/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v acc="$ida" -v f28="$f28" -v f30="$f30" -v f31="$f31" -v f32="$f32" \ + -v f33="$f33" -v f34="$f34" -v f37="$f37" -v f38="$f38" -v f28s="$f28s" -v f28e="$f28e" -v f37s="$f37s" \ + -v f37e="$f37e" -v f38s="$f38s" -v f38e="$f38e" -v atype="combo" -f $toolbox/logcombo.awk | \ + sort -nk1 | column -t > origin_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat origin_$rip && echo + done +} + +function cdn_origin_logs() { + is_origin_condtion_specified + + # 如果time_range是当前时间 + exec 3>&2 && exec 2> /dev/null + if [[ $current == 'yes' ]]; then + # ssh 进每一个rip,搜索origin.log,设定ssh连接超时时长为CT + # 把搜索的结果放进origin_$rip文件,所有的ssh命令都后台执行 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log | grep $domain; + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v acc="$ida" -v f28="$f28" -v f30="$f30" -v f31="$f31" -v f32="$f32" \ + -v f33="$f33" -v f34="$f34" -v f37="$f37" -v f38="$f38" -v f28s="$f28s" -v f28e="$f28e" -v f37s="$f37s" \ + -v f37e="$f37e" -v f38s="$f38s" -v f38e="$f38e" -v atype="logs" -f $toolbox/logcombo.awk | \ + grep -Ev "$hour:(59|00):[0-5][0-9] \+0800" | head -n $((number % 1000)) | tail -n 1 > origin_${rip} & + done + # 如果time_range不是当前时间 + else + # 简单粗暴地,分别过滤回滚文件和未归档两部分日志文件 + for rip in $rip_list; do + ssh -o ConnectTimeout=$CT $rip " + cat $cdn_origin_log/origin.log_*${time_range}* | grep $domain; + zcat $cdn_origin_log/$year$month$day/origin.log_*${time_range}* | grep $domain" | grep $domain | \ + awk -F '"' -v comp=0 -v acc="$ida" -v f28="$f28" -v f30="$f30" -v f31="$f31" -v f32="$f32" \ + -v f33="$f33" -v f34="$f34" -v f37="$f37" -v f38="$f38" -v f28s="$f28s" -v f28e="$f28e" -v f37s="$f37s" \ + -v f37e="$f37e" -v f38s="$f38s" -v f38e="$f38e" -v atype="logs" -f $toolbox/logcombo.awk | \ + grep -Ev "$hour:(59|00):[0-5][0-9] \+0800" | head -n $((number % 1000)) | tail -n 1 > origin_${rip} & + done + fi + exec 2>&3 && exec 3>&- && wait + + for rip in $rip_list; do + echo -e "=============== ${c_biy}$label: ${c_bic}$rip${c_e} ===============" + cat origin_$rip && echo + done +} + + +# ============================================================================================================ +# Part 00: Status Code +function statusCodeA() { + if [[ $ptype == '1' ]]; then + statusCodeA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + statusCodeA1 + else + : + fi +} + +function statusCodeA1() { + cdn_access_sc +} + +# ============================================================================================================ +# Part 01: Respond Time +function respondTimeA() { + if [[ $ptype == '1' ]]; then + respondTimeA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + respondTimeA1 + else + : + fi +} + +function respondTimeA1() { + cdn_access_time "6" +} + +# ============================================================================================================ +# Part 02: First Dur +function firstDurA() { + if [[ $ptype == '1' ]]; then + firstDurA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + firstDurA1 + else + : + fi +} + +function firstDurA1() { + cdn_access_time "9" +} + +# ============================================================================================================ +# Part 03: Error Code +function errorCodeA() { + if [[ $ptype == '1' ]]; then + errorCodeA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + errorCodeA1 + else + : + fi +} + +function errorCodeA1() { + cdn_access_common "10" +} + +# ============================================================================================================ +# Part 04: Remote IP +function remoteIPA() { + if [[ $ptype == '1' ]]; then + remoteIPA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + remoteIPA1 + else + : + fi +} + +function remoteIPA1() { + cdn_access_common "11" +} + +# ============================================================================================================ +# Part 05: Client IP +function clientIPA() { + if [[ $ptype == '1' ]]; then + clientIPA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + clientIPA1 + else + : + fi +} + +function clientIPA1() { + cdn_access_common "13" +} + +# ============================================================================================================ +# Part 06: Method +function methodA() { + if [[ $ptype == '1' ]]; then + methodA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + methodA1 + else + : + fi +} + +function methodA1() { + cdn_access_common "15" +} + +# ============================================================================================================ +# Part 07: Protocol +function protocolA() { + if [[ $ptype == '1' ]]; then + protocolA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + protocolA1 + else + : + fi +} + +function protocolA1() { + cdn_access_common "16" +} + +# ============================================================================================================ +# Part 08: Top URL +function topUrlA() { + if [[ $ptype == '1' ]]; then + topUrlA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + topUrlA1 + else + : + fi +} + +function topUrlA1() { + cdn_access_common "18" +} + +# ============================================================================================================ +# Part 09: Upstream Addr +function upstreamAddrA() { + if [[ $ptype == '1' ]]; then + upstreamAddrA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + upstreamAddrA1 + else + : + fi +} + +function upstreamAddrA1() { + cdn_access_common "24" +} + +# ============================================================================================================ +# Part 10: Upstream Respond Time +function upstreamRTimeA() { + if [[ $ptype == '1' ]]; then + upstreamRTimeA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + upstreamRTimeA1 + else + : + fi +} + +function upstreamRTimeA1() { + cdn_access_time "26" +} + +# ============================================================================================================ +# Part 11: Origin IP for NG +function originIPA() { + if [[ $ptype == '1' ]]; then + originIPA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + originIPA1 + else + : + fi +} + +function originIPA1() { + cdn_access_common "28" +} + +# ============================================================================================================ +# Part 12: Combo -- multi condition searching +function comboA() { + if [[ $ptype == '1' ]]; then + comboA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + comboA1 + else + : + fi +} + +function comboA1() { + cdn_access_combo +} + +# ============================================================================================================ +# Part 13: Referer +function refererA() { + if [[ $ptype == '1' ]]; then + refererA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + refererA1 + else + : + fi +} + +function refererA1() { + cdn_access_common "33" +} + +# ============================================================================================================ +# Part 14: UA +function uaA() { + if [[ $ptype == '1' ]]; then + uaA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + uaA1 + else + : + fi +} + +function uaA1() { + cdn_access_common "34" +} + +# ============================================================================================================ +# Part 15: Log +function logA() { + if [[ $ptype == '1' ]]; then + logA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + logA1 + else + : + fi +} + +function logA1() { + cdn_access_logs +} + +# ============================================================================================================ +# Part 16: QPS +function qpsA() { + if [[ $ptype == '1' ]]; then + qpsA1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + qpsA1 + else + : + fi +} + +function qpsA1() { + cdn_access_qps +} + +# ============================================================================================================ +# Part 28: Origin First Dur +function firstDurO() { + if [[ $ptype == '1' ]]; then + firstDurO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + firstDurO1 + else + : + fi +} + +function firstDurO1() { + cdn_origin_time "5" +} + +# ============================================================================================================ +# Part 29: Combo -- multi condition searching +function comboO() { + if [[ $ptype == '1' ]]; then + comboO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + comboO1 + else + : + fi +} + +function comboO1() { + cdn_origin_combo +} + + +# ============================================================================================================ +# Part 30: Origin Client(NG) IP +function clientIPO() { + if [[ $ptype == '1' ]]; then + clientIPO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + clientIPO1 + else + : + fi +} + +function clientIPO1() { + cdn_origin_common "7" +} + +# ============================================================================================================ +# Part 31: Origin IP +function originIPO() { + if [[ $ptype == '1' ]]; then + originIPO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + originIPO1 + else + : + fi +} + +function originIPO1() { + cdn_origin_common "8" +} +# ============================================================================================================ +# Part 32: Source Status Code +function sourceSCO() { + if [[ $ptype == '1' ]]; then + sourceSCO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + sourceSCO1 + else + : + fi +} + +function sourceSCO1() { + cdn_origin_sc "ssc" +} + +# ============================================================================================================ +# Part 33: Origin Status Code +function statusCodeO() { + if [[ $ptype == '1' ]]; then + statusCodeO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + statusCodeO1 + else + : + fi +} + +function statusCodeO1() { + cdn_origin_sc "sc" +} + +# ============================================================================================================ +# Part 34: Origin URL +function topUrlO() { + if [[ $ptype == '1' ]]; then + topUrlO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + topUrlO1 + else + : + fi +} + +function topUrlO1() { + cdn_origin_common "46" +} + +# ============================================================================================================ +# Part 35: Origin QPS +function qpsO() { + if [[ $ptype == '1' ]]; then + qpsO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + qpsO1 + else + : + fi +} + +function qpsO1() { + cdn_origin_qps +} + + +# ============================================================================================================ +# Part 36: Origin Random Logs +function logO() { + if [[ $ptype == '1' ]]; then + logO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + logO1 + else + : + fi +} + +function logO1() { + cdn_origin_logs +} + + +# ============================================================================================================ +# Part 37: Source Connection Time +function sourceCTO() { + if [[ $ptype == '1' ]]; then + sourceCTO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + sourceCTO1 + else + : + fi +} + +function sourceCTO1() { + cdn_origin_time "4" +} + +# ============================================================================================================ +# Part 38: Source Respond Time +function SourceRTO() { + if [[ $ptype == '1' ]]; then + sourceRTO1 + elif [[ $ptype == '2' ]]; then + : + elif [[ $ptype == '3' ]]; then + : + elif [[ $ptype == '4' ]]; then + : + elif [[ $ptype == '5.1' || $ptype == '5.2' || $ptype == '5.3' || $ptype == '5.4' || $ptype == '5.5' || $ptype == '5.6' || $ptype == '5.7' ]]; then + sourceRTO1 + else + : + fi +} + +function sourceRTO1() { + cdn_origin_time "6" +} + +# -------------------------------------------------------------------------------- +# set a trap for Ctrl+C +trap 'onCtrlC' INT + +# -------------------------------------------------------------------------------- +# 自定义颜色显示 +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_by='\e[1;33m' # bold yellow +c_bb='\e[1;34m' # bold blue +c_bp='\e[1;35m' # bold purple +c_bc='\e[1;36m' # bold cyan +c_bir='\e[1;3;31m' # bold italic red +c_big='\e[1;3;32m' # bold italic green +c_biy='\e[1;3;33m' # bold italic yellow +c_bib='\e[1;3;34m' # bold italic blue +c_bip='\e[1;3;35m' # bold italic purple +c_bic='\e[1;3;36m' # bold italic cyan +c_e='\e[0m' # reset + + +# -------------------------------------------------------------------------------- +# 初始化设定 +number=`date +%N` +CT=45 +stty erase '^H' # allow backspace +data='/usr/local/script/fanmf11/data' # set data directory path +toolbox='/usr/local/script/fanmf11' # set toobbox directory path +TS=`date +%s%N` # document the start time of the script +host=`whoami` # who use this script +trash="/usr/local/script/fanmf11/trash/$host/$TS" # set trash directory path +livelog='/home/log/cluster_live_log' +liveatslog='/home/log/trafficserver' +cdn_access_log='/home/log/cluster_gateway_log' # * +cdn_origin_log='/home/log/trafficserver' # * + +# -------------------------------------------------------------------------------- +# 初始化,获得必要的变量 +initial $@ + +# -------------------------------------------------------------------------------- +# 主程序分支 +if [[ $itype == '00' ]]; then + statusCodeA +elif [[ $itype == '01' ]]; then + respondTimeA +elif [[ $itype == '02' ]]; then + firstDurA +elif [[ $itype == '03' ]]; then + errorCodeA +elif [[ $itype == '04' ]]; then + remoteIPA +elif [[ $itype == '05' ]]; then + clientIPA +elif [[ $itype == '06' ]]; then + methodA +elif [[ $itype == '07' ]]; then + protocolA +elif [[ $itype == '08' ]]; then + topUrlA +elif [[ $itype == '09' ]]; then + upstreamAddrA +elif [[ $itype == '10' ]]; then + upstreamRTimeA +elif [[ $itype == '11' ]]; then + originIPA +elif [[ $itype == '12' ]]; then + comboA +elif [[ $itype == '13' ]]; then + refererA +elif [[ $itype == '14' ]]; then + uaA +elif [[ $itype == '15' ]]; then + logA +elif [[ $itype == '16' ]]; then + qpsA +elif [[ $itype == '17' ]]; then + : +elif [[ $itype == '18' ]]; then + : +elif [[ $itype == '19' ]]; then + : +elif [[ $itype == '20' ]]; then + : +elif [[ $itype == '21' ]]; then + : +elif [[ $itype == '22' ]]; then + : +elif [[ $itype == '23' ]]; then + : +elif [[ $itype == '24' ]]; then + : +elif [[ $itype == '25' ]]; then + : +elif [[ $itype == '26' ]]; then + : +elif [[ $itype == '27' ]]; then + : +elif [[ $itype == '28' ]]; then + firstDurO +elif [[ $itype == '29' ]]; then + comboO +elif [[ $itype == '30' ]]; then + clientIPO +elif [[ $itype == '31' ]]; then + originIPO +elif [[ $itype == '32' ]]; then + sourceSCO +elif [[ $itype == '33' ]]; then + statusCodeO +elif [[ $itype == '34' ]]; then + topUrlO +elif [[ $itype == '35' ]]; then + qpsO +elif [[ $itype == '36' ]]; then + logO +elif [[ $itype == '37' ]]; then + sourceCTO +elif [[ $itype == '38' ]]; then + SourceRTO +elif [[ $itype == '39' ]]; then + : +elif [[ $itype == '40' ]]; then + : +elif [[ $itype == '41' ]]; then + : +else + : +fi + diff --git a/old/ctc/logsc.awk b/old/ctc/logsc.awk new file mode 100644 index 0000000..ee8816a --- /dev/null +++ b/old/ctc/logsc.awk @@ -0,0 +1,20 @@ +#!/usr/bin/awk -f +# lap : 1--overlap 0--non-overlap +# comp: 1--access 0--origin +# pos : 4 | 10 | 11 +BEGIN { + if(comp==1) { ac=56 } + if(comp==0) { ac=50 } +} + +{ + if(acc==$ac) + res[$pos]++ +} + +END { + for(i in res) + printf "%-8s%-15s%-s\n", i, res[i], res[i]/NR*100"%" +} + + diff --git a/old/ctc/logtime.awk b/old/ctc/logtime.awk new file mode 100644 index 0000000..c1c7c95 --- /dev/null +++ b/old/ctc/logtime.awk @@ -0,0 +1,66 @@ +#!/usr/bin/awk -f +# lap : 1--overlap 0--non-overlap +# comp: 1--access 0--origin +# code: +# non-blank -- specify status code +# blank -- not specify status code +# index = 1 if time duration < 1 +# index = 2 if time duration < 2 +# index = 3 if time duration < 3 +# index = 4 if time duration < 4 +# index = 5 if time duration < 5 +# index = 6 if time duration < 6 +# index = 7 if time duration < 11 +# index = 8 if time duration < 16 +# index = 9 if time duration < 21 + +function timeproc(dur, trans) { + if((dur / trans)<1) + res[1]++ + else if((dur / trans)<2) + res[2]++ + else if((dur / trans)<3) + res[3]++ + else if((dur / trans)<4) + res[4]++ + else if((dur / trans)<5) + res[5]++ + else if((dur / trans)<6) + res[6]++ + else if((dur / trans)<11) + res[7]++ + else if((dur / trans)<16) + res[8]++ + else if((dur / trans)<21) + res[9]++ + else if((dur / trans)>=21) + res[10]++ +} + +BEGIN { + if(code!="" && comp==1) { sc=4 } + if(code!="" && comp==0) { sc=11 } + if(code=="") { sc=1000 } + if(comp==1) { ac=56; trans=1; } + if(comp==0) { ac=50; trans=1000; } +} + +{ + if(acc==$ac && code==$sc) + timeproc($pos, trans) + +} +END { + for(i in res) + if(i==1||i==2||i==3||i==4||i==5||i==6) + printf "%-15s%-8s%-s\n", "time < "i"s", res[i], res[i]/NR*100"%" + else if(i==7) + printf "%-15s%-8s%-s\n", "time < 11s", res[i], res[i]/NR*100"%" + else if(i==8) + printf "%-15s%-8s%-s\n", "time < 16s", res[i], res[i]/NR*100"%" + else if(i==9) + printf "%-15s%-8s%-s\n", "time < 21s", res[i], res[i]/NR*100"%" + else if(i==10) + printf "%-15s%-8s%-s\n", "time >= 21s", res[i], res[i]/NR*100"%" +} + diff --git a/old/ctc/map.sh b/old/ctc/map.sh new file mode 100644 index 0000000..c262854 --- /dev/null +++ b/old/ctc/map.sh @@ -0,0 +1,349 @@ +#!/bin/bash +# 功能实现:根据指定的域名,查询对于应域名所在解析组的相关信息 +# 依赖文件:dna/ +# 存在问题: +# + +# 自定义控制台颜色显示 +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_bc='\e[1;36m' # bold cyan +c_by='\e[1;33m' # bold yellow +c_bp='\e[1;35m' # bold purple +c_bir='\e[1;3;31m' # bold italic red +c_big='\e[1;3;32m' # bold italic green +c_biy='\e[1;3;33m' # bold italic yellow +c_bib='\e[1;3;34m' # bold italic blue +c_bip='\e[1;3;35m' # bold italic purple +c_bic='\e[1;3;36m' # bold italic cyan +c_biw='\e[1;3;30m' # bold italic gray +c_e='\e[0m' # reset + +# 使用说明 +function usage { + echo -e "${c_bib}Usage: ${c_e}" + echo -e "${c_bib} map -d domain vip # 从域名解析组中随机获取一个VIP ${c_e}" + echo -e "${c_bib} map -d domain rip # 从域名解析组中随机获取一个RIP ${c_e}" + echo -e "${c_bib} map -d domain ip # 验证一个IP是否属于域名解析组中的VIP或者RIP ${c_e}" + echo -e "${c_bib} map -d domain label # 验证一个节点中/英文标签名是否包含在域名解析组中 ${c_e}" + echo -e "${c_bib} map -d domain label # 打印域名解析组中所有的节点信息 ${c_e}" + echo -e "${c_bib} map -d domain cover # 输出域名边缘解析组节点资源覆盖情况,可指定区域查询 ${c_e}" + echo -e "${c_bib} map -d domain parent # 输出域名父解析组节点资源覆盖情况,可指定区域查询 ${c_e}\n" + echo -e "${c_bic}[MAP-100] 该脚本工具会根据指定的域名,查询对于应域名所在解析组的相关信息,其中vip/rip/pool/cover/parent均是字符串参数,domain/ip是实际要输入真实值的参数,label既可以是字符串参数,也可以是实际节点中/英文标签名称。${c_e}\n" + exit 100 +} + +function onCtrlC () { + # while capture Ctrl+C, kill all background processes silently and exit + exec 3>&2 # 3 is now a copy of 2 + exec 2> /dev/null # 2 now points to /dev/null + sleep 1 # sleep to wait for process to die + exec 2>&3 # restore stderr to saved + exec 3>&- # close saved version + echo + echo -e "${c_bir}[MAP-101] Ctrl+C is captured, exiting...\n${c_e}" + exit 101 +} + +# 随机获取域名对应解析组的一个VIP +function random_vip() { + # 其实支持的不仅如下,只要输入正确,其他网络运营商也是可以查询的 + echo -ne "${c_bg}请输入ISP类型--【ct|cu|cm|bgp|ctbgp|cubgp|cmbgp】:${c_e}" + read -t 60 isp + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-102] 60s内无任何输入,退出...${c_e}\n"; exit 102; } + # do a check to see if isp is correct or not + [[ $isp == '' ]] && isp='ct' + # 从解析组VIP列过滤以运营商标识开头的节点和VIP + ip_list=`cat map.log | awk '{print $4, $5}' | grep "^${isp}_" | awk '{print $2}' | sort | uniq` + [[ $ip_list == '' ]] && { echo -e "${c_br}[MAP-103] 在解析组$map中,没有找到$isp的网络运营商,退出...${c_e}"; exit 103; } + # 匹配v6和v4的子集 + v6_list=`echo $ip_list | grep -Eo "[0-9a-fA-F:]{11,39}"` + v4_list=`echo $ip_list | grep -Eo "([0-9]{1,3}\.){3}[0-9]{1,3}"` + # 计算分别有多少个 + num_v6=`echo $v6_list | awk '{print NF}'` + num_v4=`echo $v4_list | awk '{print NF}'` + # 获取随机的IP,注意用$RANDOM取模之后,结果有可能是0,最大值不超过总数,所以需要 +1 + [[ num_v6 -ne 0 ]] && rand6=$(( $RANDOM % $num_v6 )) && vip_v6=`echo $v6_list | awk -v v6=$rand6 '{print $(v6+1)}'` + [[ num_v4 -ne 0 ]] && rand4=$(( $RANDOM % $num_v4 )) && vip_v4=`echo $v4_list | awk -v v4=$rand4 '{print $(v4+1)}'` + echo "vip_v6: $vip_v6" + echo "vip_v4: $vip_v4" + echo -e "${c_bip}MAP: $map\n${c_e}" +} + + +# 随机获取域名对应解析组的一个RIP +function random_rip() { + # 其实支持的不仅如下,只要输入正确,其他网络运营商也是可以查询的 + echo -ne "${c_bg}请输入ISP类型--【ct|cu|cm|bgp|ctbgp|cubgp|cmbgp】:${c_e}" + read -t 60 isp + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-104] 60s内无任何输入,退出...${c_e}\n"; exit 104; } + # do a check to see if isp is correct or not + [[ $isp == '' ]] && isp='ct' + # 从解析组VIP列过滤以运营商标识开头的节点和VIP + ip_list=`cat map.log | awk '{print $4, $5}' | grep "^${isp}_" | awk '{print $2}' | sort | uniq` + [[ $ip_list == '' ]] && { echo -e "${c_br}[MAP-106] 在解析组$map中,没有找到$isp的网络运营商,退出...${c_e}"; exit 105; } + # 匹配v6和v4的子集 + v6_list=`echo $ip_list | grep -Eo "[0-9a-fA-F:]{11,39}"` + v4_list=`echo $ip_list | grep -Eo "([0-9]{1,3}\.){3}[0-9]{1,3}"` + # 计算分别有多少个 + num_v6=`echo $v6_list | awk '{print NF}'` + num_v4=`echo $v4_list | awk '{print NF}'` + # 获取随机的IP,注意用$RANDOM取模之后,结果有可能是0,最大值不超过总数,所以需要 +1 + [[ num_v6 -ne 0 ]] && rand6=$(( $RANDOM % $num_v6 )) && vip_v6=`echo $v6_list | awk -v v6=$rand6 '{print $(v6+1)}'` + [[ num_v4 -ne 0 ]] && rand4=$(( $RANDOM % $num_v4 )) && vip_v4=`echo $v4_list | awk -v v4=$rand4 '{print $(v4+1)}'` + # 拿到VPI之后,使用`ips`获取RIP列表 + [[ num_v6 -ne 0 ]] && v6_list=`ips $vip_v6 | sed -n '/RIP/, /VIP/ p' | grep -Eo '([0-9]{1,3}.){3}[0-9]{1,3}'` + [[ num_v4 -ne 0 ]] && v4_list=`ips $vip_v4 | sed -n '/RIP/, /VIP/ p' | grep -Eo '([0-9]{1,3}.){3}[0-9]{1,3}'` + # 计算分别有多少个 + num_v6=`echo $v6_list | awk '{print NF}'` + num_v4=`echo $v4_list | awk '{print NF}'` + # 获取随机的IP,注意用$RANDOM取模之后,结果有可能是0,最大值不超过总数,所以需要 +1 + [[ num_v6 -ne 0 ]] && rand6=$(( $RANDOM % $num_v6 )) && rip_v6=`echo $v6_list | awk -v v6=$rand6 '{print $(v6+1)}'` + [[ num_v4 -ne 0 ]] && rand4=$(( $RANDOM % $num_v4 )) && rip_v4=`echo $v4_list | awk -v v4=$rand4 '{print $(v4+1)}'` + + echo "rip_v6: $rip_v6" + echo "rip_v4: $rip_v4" + echo -e "${c_bip}MAP: $map\n${c_e}" +} + + +# 判断一个IP是否归属域名的解析组,可以是VIP,也可以是RIP +function ip_inmap() { + # 使用`ips`判断IP是否在天翼平台 + ips $item > ips.log 2>&1 + [[ $? -ne 0 ]] && { cat ips.log; echo -e "${c_br}[MAP-106]${c_e}"; exit 106; } + # 判断IP是否是RIP + cat $data/ip.group | awk '{print $1}' | grep -wq $item + is_rip=$? + # 如果是RIP + if [[ $is_rip -eq 0 ]]; then + # 获取对应RIP的英文节点标签名,并判断该节点是否在域名的解析组中 + label=`cat ips.log | grep -Eo "(ct|cu|cm|bgp|ctbgp|cmbgp|cubgp|as|eu|sa|na|cbn|cern)_[a-z]{2,3}_[a-z]{2,20}[0-9]{1,2}_(c|e|m|n)[0-9]{0,2}" | head -n 1` + cat map.log | awk '{print $4}' | sort | uniq | grep -wq $label + if [[ $? -eq 0 ]]; then + echo -e "${c_big}$item是域名$domain对应解析组$map中的IP,并且是一个RIP。\n${c_e}" + else + echo -e "${c_bir}[MAP-107] $item${c_biy}不是${c_bir}域名$domain对应解析组中的IP。\n${c_e}" + exit 107 + fi + # 如果不是RIP,那就是VIP + else + # 直接判断该IP是否在域名的解析组 + cat map.log | awk '{print $5}' | sort | uniq | grep -wq $item + if [[ $? -eq 0 ]]; then + echo -e "${c_big}$item是域名$domain对应解析组$map中的IP,并且是一个VIP。\n${c_e}" + else + echo -e "${c_bir}[MAP-108] $item${c_biy}不是${c_bir}域名$domain对应解析组中的IP。\n${c_e}" + exit 108 + fi + fi +} + +# 判断一个标签是否在域名的解析组中 +function label_inmap() { + # 输出改解析组所有的节点中英文对应标签信息 + cat map.log | awk '{print $14, $4}' | sed '1d' | sort | uniq | awk '{if(NR%3==0) print " | "$0" | "; else printf "%s", " | "$0;} END{print ""}' | column -t + # 查询节点中英文标签名称是否在域名的解析组中,在的话并输出相应信息 + cat map.log | awk '{print $14, $4}' | sort | uniq | grep -wq $item + if [[ $? -eq 0 ]]; then + node=`cat map.log | awk '{print $14, $4}' | sort | uniq | grep -w $item` + echo -e "${c_big}$node${c_by} 是域名 $domain 对应解析组 $map 中的节点,如上是该解析组所有节点列表汇总,可参考。\n${c_e}" + else + echo -e "${c_bir}[MAP-109] $item${c_biy} 不是${c_bir}域名 $domain 对应解析组 $map 中的节点,如上是该解析组所有节点列表汇总,可参考。\n${c_e}" + exit 109 + fi +} + + +# 打印域名对应解析组所有节点信息 +function labels_inmap() { + cat map.log | awk '{print $14, $4}' | sed '1d' | sort | uniq | awk '{if(NR%3==0) print " | "$0" | "; else printf "%s", " | "$0;} END{print ""}' | column -t + echo -e "${c_big}如上是域名 $domain 对应解析组 $map 所有节点列表汇总,可参考。\n${c_e}" +} + +# 输出域名对应解析组的覆盖情况,可指定地区 +function cover() { + # 宽度提示 + width=`tput cols` + if [[ $width -lt 170 ]]; then + echo -e "${c_biy}因该选项输出的每行数据比较多,需要终端宽度大于170,当前终端宽度为$width,输出的界面会不整齐,是否继续(Y/n):${c_e}" + read -t 60 YON + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-110] 60s内无任何输入,退出...${c_e}\n"; exit 110; } + if [[ $YON != '' && $YON != 'y' && $YON != 'Y' && $YON != 'Yes' && $YON != 'yes' && $YON != 'YES' ]]; then + echo -e "${c_br}[MAP-111] 请调整终端宽度之后,重新运行,退出...${c_e}\n" + exit 111 + fi + fi + + # 将需要保留的的字段过滤出来 + cat map.log | sed '1d' | awk '{print $1, $2, $3, $4, $5, $14, $16, $22, $16-$22, $23, $24}' | sort -k 2 > res.map + echo -e "${c_bib}1. 省份维度:31个省市自治区 + 电信|移动|联通,例如-内蒙古电信${c_e}" + echo -e "${c_bib}2. 大区维度:东北|华北|华东|华南|华中|西北|西南 + 电信|移动|联通|长宽|铁通|广电,例如-东北移动${c_e}" + echo -e "${c_bib}3. 全国维度:中国 + 电信|移动|联通|长宽|铁通|广电|其他/香港/澳门/台湾/教育网,例如-中国移动${c_e}" + echo -e "${c_bib}4. 国际维度:日本/韩国/蒙古国/北朝鲜/澳洲/欧洲/非洲/北美洲/南美洲/中亚/西亚/南亚/东南亚/亚洲其他等${c_e}" + echo -e "${c_bib}5. 其他维度:全球/其他/其他广电/其他电信/其他移动/其他联通/其他铁通/其他长宽${c_e}" + echo -ne "${c_bg}请按照如上规则,输入查询条件:${c_e}\n" + read -t 60 query + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-112] 60s内无任何输入,退出...${c_e}\n"; exit 112; } + # 无任何输入,则默认打印所有资源 + if [[ ${query} == '' ]]; then + python3 /usr/local/script/fanmf11/get_infos.py --format-global res.map + # python3 /home/fanmf11/fanmf11/get_infos.py --format-global res.map + # 否则打印指定地区的资源覆盖情况 + else + cat $data/area | grep $query| awk '{print $1}' > view + python3 /usr/local/script/fanmf11/get_infos.py --format-partial $query view res.map $domain $map + # python3 /home/fanmf11/fanmf11/get_infos.py --format-partial $query view res.map $domain $map + fi +} + +# 输出域名对应父解析组的覆盖情况,可指定地区 +function parent() { + # 宽度提示 + width=`tput cols` + if [[ $width -lt 170 ]]; then + echo -e "${c_biy}因该选项输出的每行数据比较多,需要终端宽度大于170,当前终端宽度为$width,输出的界面会不整齐,是否继续(Y/n):${c_e}" + read -t 60 YON + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-113] 60s内无任何输入,退出...${c_e}\n"; exit 113; } + if [[ $YON != '' && $YON != 'y' && $YON != 'Y' && $YON != 'Yes' && $YON != 'yes' && $YON != 'YES' ]]; then + echo -e "${c_br}[MAP-114] 请调整终端宽度之后,重新运行,退出...${c_e}\n" + exit 114 + fi + fi + + # 获取域名的父解析组 + # python3 /usr/local/script/fanmf11/get_infos.py --domain_config_accid map_info.log $domain + infos --parent $domain $TS + cat cmap && echo -e "${c_bg}请选择要查看的父方案序号(e.g. 1, 2, 3...):${c_e}" + read -t 60 index + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-115] 60s内无任何输入,退出...${c_e}\n"; exit 115; } + cat cmap | grep -Eq "^$index\." + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAp-116] 请输入正确的序号,退出...${c_e}\n"; exit 116; } + cmap=`cat cmap | grep -E "^$index\." | awk '{print $2}'` + getlastcover $cmap > cmap.log 2>&1 + + + # 将需要保留的的字段过滤出来 + cat cmap.log | sed '1d' | awk '{print $1, $2, $3, $4, $5, $14, $16, $22, $16-$22, $23, $24}' | sort -k 2 > res.map + echo -e "${c_bib}1. 省份维度:31个省市自治区 + 电信|移动|联通,例如-内蒙古电信${c_e}" + echo -e "${c_bib}2. 大区维度:东北|华北|华东|华南|华中|西北|西南 + 电信|移动|联通|长宽|铁通|广电,例如-东北移动${c_e}" + echo -e "${c_bib}3. 全国维度:中国 + 电信|移动|联通|长宽|铁通|广电|其他/香港/澳门/台湾/教育网,例如-中国移动${c_e}" + echo -e "${c_bib}4. 国际维度:日本/韩国/蒙古国/北朝鲜/澳洲/欧洲/非洲/北美洲/南美洲/中亚/西亚/南亚/东南亚/亚洲其他等${c_e}" + echo -e "${c_bib}5. 其他维度:全球/其他/其他广电/其他电信/其他移动/其他联通/其他铁通/其他长宽${c_e}" + echo -ne "${c_bg}请按照如上规则,输入查询条件:${c_e}\n" + read -t 60 query + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-117] 60s内无任何输入,退出...${c_e}\n"; exit 117; } + # 无任何输入,则默认打印所有资源 + if [[ ${query} == '' ]]; then + python3 /usr/local/script/fanmf11/get_infos.py --format-global res.map + # python3 /home/fanmf11/fanmf11/get_infos.py --format-global res.map + # 否则打印指定地区的资源覆盖情况 + else + cat $data/area | grep $query| awk '{print $1}' > view + python3 /usr/local/script/fanmf11/get_infos.py --format-partial $query view res.map $domain $map + # python3 /home/fanmf11/fanmf11/get_infos.py --format-partial $query view res.map $domain $map + fi +} + +function logfile() { + if [[ -d $trash ]]; then + echo -e "${c_br}[MAP-118]对于同一个用户,同一时间只能运行一个实例,请重新运行...${c_e}" + exit 118 + else + mkdir -p $trash + cd $trash && cd .. + docs=`ls` + for doc in $docs; do + [[ -f $doc ]] && rm -rf $doc + done + folders=`ls -t` + while [[ `echo $folders | awk '{print NF}'` -gt 29 ]]; do + folder=`ls -t | tail -1` + rm -rf $folder + folders=`ls -t` + done + cd $trash && touch map + fi +} + +# -------------------------------------------------------------------------------- +# set a trap for Ctrl+C +trap 'onCtrlC' INT + +# -------------------------------------------------------------------------------- +# 初始化设定 +stty erase '^H' # allow backspace +data='/usr/local/script/fanmf11/data' # set data directory path +toolbox='/usr/local/script/fanmf11/' # set toobbox directory path +map='' +accid='' +TS=`date +%s%N` # document the start time of the script +host=`whoami` # who use this script +trash="/usr/local/script/fanmf11/trash/$host/$TS" # set trash directory path + +# -------------------------------------------------------------------------------- +# 入参正确性检测 +let NumOP=$# # number of parameter +OP="prefix "$@ # do a prefix cause '-' char may damage echo command +dash_d=`echo $OP | awk '{print $2}'` # get first param -d +domain=`echo $OP | awk '{print $3}'` # get second param domain +item=`echo $OP | awk '{print $4}'` # get third param item, can be vip, rip, testip, pool, cover etc. +[[ $NumOP -ne 3 || $dash_d != '-d' ]] && usage || logfile + +# -------------------------------------------------------------------------------- +# 检查域名是否在平台注册 +res=`cat $data/domain.list | grep -w "$domain"` +[[ $res == '' ]] && { echo -e "${c_br}[MAp-119] 该域名未在天翼平台配置,一个小时内新增的域名无法查询,退出...${c_e}"; exit 119; } + +# -------------------------------------------------------------------------------- +# 获取域名解析组信息 +infos --map $domain $TS +[[ $? -eq 205 || $? -eq 231 ]] && { echo -e "${c_br}[MAP-120] exiting...${c_e}"; exit 120; } +# cd $trash && map=`cat map.log` && getlastcover $map > map.log +cd $trash +if [[ `cat map.log | wc -l` -eq 1 ]]; then + map=`cat map.log` +else + maps=`cat map.log | sort | uniq` + count=1 && > remap.log + for map in $maps; do + echo $count": "$map | tee -a remap.log + let count=count+1 + done + echo -ne "${c_bg}存在分区域解析,需确定解析组名称(默认是1):${c_e}\n" + read -t 60 imap + [[ $? -ne 0 ]] && { echo -e "${c_br}[MAP-121] 60s内无任何输入,退出...${c_e}\n"; exit 121; } + # do a check to see if isp is correct or not + [[ $imap == '' ]] && let imap=1 + map=`cat remap.log | awk -F ':' -v imap=$imap '$1==imap {print $2}'` + [[ $map == '' ]] && { echo -e "${c_br}[MAP-122] 请输入正确的序号,退出...${c_e}"; exit 122; } +fi + +getlastcover $map > map.log +cat map.log | grep -q 'can not find sys_id' +[[ $? -eq 0 ]] && { echo -e "${c_br}[MAP-123] 该解析组未在平台配置,退出...${c_e}"; exit 123; } + + + +# -------------------------------------------------------------------------------- +# 随机获取VIP +if [[ $item == 'vip' ]]; then + random_vip +elif [[ $item == 'rip' ]]; then + random_rip +elif [[ $item =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ || $item =~ ^[0-9a-fA-F:]{11,39}$ ]]; then + ip_inmap +elif [[ $item =~ ^(ct|cu|cm|bgp|ctbgp|cmbgp|cubgp|as|eu|sa|na|cbn|cern)_[a-z]{2,3}_[a-z]{2,20}[0-9]{1,2}_(c|dns|e|lvs|m|mysql|n|prets|pretw|redis|testts|ts|tw|uatts1)[0-9]{0,2}$ ]]; then + label_inmap +elif [[ $item == 'label' ]]; then + labels_inmap +elif [[ $item == 'cover' ]]; then + cover +elif [[ $item == 'parent' ]]; then + parent +# 兜底是中文节点名的查询 +else + label_inmap +fi + + diff --git a/old/ctc/normalize.jq b/old/ctc/normalize.jq new file mode 100644 index 0000000..a4407c9 --- /dev/null +++ b/old/ctc/normalize.jq @@ -0,0 +1,13 @@ +# Apply f to composite entities recursively using keys[], and to atoms +def sorted_walk(f): + . as $in + | if type == "object" then + reduce keys[] as $key + ( {}; . + { ($key): ($in[$key] | sorted_walk(f)) } ) | f + elif type == "array" then map( sorted_walk(f) ) | f + else f + end; + +def normalize: sorted_walk(if type == "array" then sort else . end); + +normalize diff --git a/old/ctc/reformat.awk b/old/ctc/reformat.awk new file mode 100644 index 0000000..c699f66 --- /dev/null +++ b/old/ctc/reformat.awk @@ -0,0 +1,31 @@ +#!/usr/bin/awk -f + +BEGIN{ + start1="\"-----BEGINCERTIFICATE-----"; + start2="-----BEGINCERTIFICATE-----"; + end1="-----ENDCERTIFICATE-----"; + end2="-----ENDCERTIFICATE-----\","; +} + +{ + if($0~"https_public_content") { + printf "%s", $1; + for(i=2;i<=NF;i++) { + if($i==start1) + printf "%s", "\"-----BEGIN CERTIFICATE-----\\n"; + else if($i==start2) + printf "%s", "-----BEGIN CERTIFICATE-----\\n"; + else if($i==end1) + printf "%s", "-----END CERTIFICATE-----\\n"; + else if($i==end2) + printf "%s", "-----END CERTIFICATE-----\","; + else if($i=="") + continue + else + printf "%s", $i"\\n" + } + } + else print $0 +} + + diff --git a/old/ctc/tasks.sh b/old/ctc/tasks.sh new file mode 100644 index 0000000..e6da64a --- /dev/null +++ b/old/ctc/tasks.sh @@ -0,0 +1,92 @@ +#!/bin/bash + +function isAlarm() +{ + alarmDescrption=$1 + alarmFile=$2 + alarmDate=`date` + curl 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=ddea3f5f-fbfc-4c21-994a-71e9fc50e4ef' \ + -H 'Content-Type: application/json' \ + -d ' + { + "msgtype": "markdown", + "markdown": { + "content": "**'"$alarmDescrption"'**\n + > 错误文件:'"$alarmFile"'生成错误,请立即查看 + > 告警时间:'"$alarmDate"'" + } + } ' > /dev/null 2>&1 +} + +function domain_list() { + # ---------------------------------------------------------------------------------------- + # 获取平台全量域名信息 + let count=0 + while [[ $count -lt 3 ]]; do + curl 'https://bs.ctcdn.cn/api/v3/manageDomain/export' -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyaWQiOjI1NH0.9Zw66R0R85avd92qzO-3KQ4DJ2zYXL4ght5bH41iTfA' -H 'content-type: application/json;charset=UTF-8' -vo $data/domain.list > $data/domain.list.response 2>&1 + + # 判断响应是否200 + line_number=`cat $data/domain.list | wc -l` + cat $data/domain.list.response | grep -Eq 'HTTP/(1.1|2) 200 OK' + if [[ $? -ne 0 || $line_number -lt 20000 ]]; then + count=$((count+1)) + else + exit + fi + done + isAlarm '【严重】获取全量域名信息失败' 'domain.list' + exit 248 +} + +function renew_backup() { + cd $data + lakes_bak > $data/lakes + curl -so $data/ip.group "http://150.223.254.77:5044/download/ip.group" + + # backups + cd $toolbox + [[ -d '/home/fanmf11/.backups/' ]] && rm -rf /home/fanmf11/.backups/*.tgz || mkdir '/home/fanmf11/.backups/' + cd $toolbox & bt=$(date +%Y%m%d%H%M%S) + # cp $data/lakes $data/lakes-$(date +%d) + # cp $data/ip.group $data/ip.group-$(date +%d) + tar -czf /home/fanmf11/.backups/toolbox-${bt}.tgz ./* + + [[ ! -s $data/lakes ]] && isAlarm '【严重】基础文件生成错误告警' 'lakes' + [[ ! -s $data/ip.group ]] && isAlarm '【严重】基础文件生成错误告警' 'ip.group' + [[ ! -s $backups/toolbox-${bt}.tgz ]] && isAlarm '备份失败告警' "toolbox-${bt}.tgz" +} + + + +function view_check() { + maps=`cat $data/maps` + > $data/area.new + for map in $maps; do + getlastcover $map > $map + cat $map | awk '{print $3}' | sed '1d' | sort | uniq >> $data/area.new + rm $map + done + news=`cat $data/area.new | sort | uniq` + olds=`cat $data/area | awk '{print $1}' | sort | uniq` + > $data/area.new + > $data/area.diff + for new in $news; do + [[ $new == 'find' ]] && continue + echo $new >> $data/area.new + echo $olds | grep -wq $new + [[ $? -ne 0 ]] && { isAlarm '有新的View需要添加' "$new"; echo $new >> $data/area.diff; sleep 1; } + done +} + +data='/usr/local/script/fanmf11/data' +host=`whoami` +toolbox='/usr/local/script/fanmf11' +backups='/home/fanmf11/.backups' + + +[[ $1 == '--renew_backup' ]] && renew_backup +[[ $1 == '--domain_list' ]] && domain_list +[[ $1 == '--new_area' ]] && view_check + + + diff --git a/old/ctc/utool b/old/ctc/utool new file mode 100644 index 0000000..328f4eb --- /dev/null +++ b/old/ctc/utool @@ -0,0 +1,134 @@ +#!/bin/bash +# User specific aliases and functions + +alias cls='clear && ls' +# alias trash='cd /usr/local/script/fanmf11/trash/fanmf11 && ls' +alias fanmf11='cd /usr/local/script/fanmf11 && ls' +alias ..='cd ../ && ls' +alias ...='cd ../.. && ls' +alias l='ls -alh' +alias common='cat /usr/local/script/fanmf11/data/cmds' + +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_by='\e[1;33m' # bold yellow +c_bb='\e[1;34m' # bold blue +c_bp='\e[1;35m' # bold purple +c_bc='\e[1;36m' # bold cyan +c_bir='\e[1;3;31m' # * bold italic red +c_big='\e[1;3;32m' # bold italic cyan +c_bib='\e[1;3;34m' # * bold italic cyan +c_bip='\e[1;3;35m' # bold italic cyan +c_bic='\e[1;3;36m' # bold italic cyan +c_e='\e[0m' # reset + +trash='/usr/local/script/fanmf11/trash' + +function utool() { + + if [[ $1 == '-a' ]]; then + # set -x + [[ $# -lt 1 ]] && { echo -e "${c_bir}[UTOOL-100] Need at least one parameter, exiting...${c_e}"; return 100; } + [[ $# -eq 1 ]] && place='fanmf11' || place="$2" + ls $trash | grep -wq $place + [[ $? -ne 0 ]] && { echo -e "${c_br}[UTOOL-101] $place用户不存在,或该用户从未使用过相关工具,退出...${c_e}"; return 101; } + + > $trash/fanmf11/record.log + items="ips ids map infos logs config" + for item in $items; do + date_lists=`find $trash/$place -name "$item" -type f | sort | uniq | awk -F '/' '{print $8}' | sort | uniq` + for date_list in $date_lists; do + let number=${date_list:0:10} + date_fmt=`date -d@$number +'%Y-%m-%d %H:%M:%S'` + echo "$date_fmt $date_list $item" >> $trash/fanmf11/record.log + done + done + cat $trash/fanmf11/record.log | sort -nk3 | awk '{printf "%-11s%-14s%-25s%-s\n", $1, $2, $3, $4}' + # set +x + elif [[ $1 == '-b' ]]; then + cat /usr/local/script/fanmf11/data/cmds + elif [[ $1 == '-c' ]]; then + : + elif [[ $1 == '-d' ]]; then + : + elif [[ $1 == '-e' ]]; then + : + elif [[ $1 == '-f' ]]; then + : + elif [[ $1 == '-g' ]]; then + : + elif [[ $1 == '-h' ]]; then + usage + elif [[ $1 == '-i' ]]; then + : + elif [[ $1 == '-j' ]]; then + : + elif [[ $1 == '-k' ]]; then + : + elif [[ $1 == '-l' ]]; then + : + elif [[ $1 == '-m' ]]; then + : + elif [[ $1 == '-n' ]]; then + : + elif [[ $1 == '-o' ]]; then + : + elif [[ $1 == '-p' ]]; then + : + elif [[ $1 == '-q' ]]; then + : + elif [[ $1 == '-r' ]]; then + : + elif [[ $1 == '-s' ]]; then + : + elif [[ $1 == '-t' ]]; then + : + elif [[ $1 == '-u' ]]; then + : + elif [[ $1 == '-v' ]]; then + : + elif [[ $1 == '-w' ]]; then + : + elif [[ $1 == '-x' ]]; then + : + elif [[ $1 == '-y' ]]; then + : + elif [[ $1 == '-z' ]]; then + echo -e "${c_bg}直播:120.39.248.231" + echo -e "全站:222.187.236.6" + echo -e "全站:222.187.236.7" + echo -e "点播:113.62.113.33${c_e}" + else + : + fi + +} + + +function usage() { + let col=`tput cols` + if [[ $col -lt 120 ]]; then + echo -e "\e[1;3;31mYour screen width is too small to show the usage info neatly. So make the display window maximized.\e[0m" + read -p "Press any key to continue..." + echo '' + fi + + + echo -e "\e[1;32mDESCRIPTION:\e[0m" + echo -e "\e[3;32mutool -- a self-defined command line interface, which is used to facilitate operating the system, supports the following options. In the description part, where there is a leading asterisk signifies that this option must take an argument.\e[0m" + echo -e "\e[1;4m \e[0m" + echo -e "\e[37;40m|\e[0m\e[1;4;37;40mOption| Description |Option| Description \e[0m\e[37;40m|\e[0m" + echo -e "\e[37;40m| -a |*find dirs of specified item in trash | -n | |\e[0m" + echo -e "\e[37;40m| -b | show some often used commands | -o | |\e[0m" + echo -e "\e[37;40m| -c | | -p | |\e[0m" + echo -e "\e[37;40m| -d | | -q | |\e[0m" + echo -e "\e[37;40m| -e | | -r | |\e[0m" + echo -e "\e[37;40m| -f | | -s | |\e[0m" + echo -e "\e[37;40m| -g | | -t | |\e[0m" + echo -e "\e[37;40m| -h | show usage info | -u | |\e[0m" + echo -e "\e[37;40m| -i | | -v | |\e[0m" + echo -e "\e[37;40m| -j | | -w | |\e[0m" + echo -e "\e[37;40m| -k | | -x | |\e[0m" + echo -e "\e[37;40m| -l | | -y | |\e[0m" + echo -e "\e[37;40m|\e[0m\e[4;37;40m -m | | -z | \e[0m\e[37;40m|\e[0m\n" +} diff --git a/old/github_update.sh b/old/github_update.sh new file mode 100644 index 0000000..993808c --- /dev/null +++ b/old/github_update.sh @@ -0,0 +1,41 @@ +#!/bin/bash +#=================================================================== +# Filename : update_github.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-03-26 18:46 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this scripts +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + +t=`date +%Y%m%d%H%M%S` +echo $t >> /opt/logs/github_update.log + +# first try +echo -n "1-blog: " +cd /opt/source-code/blog && git pull --rebase +echo -n "1-wiki: " +cd /opt/websites/wiki && git pull --rebase +echo -n "1-nav: " +cd /opt/websites/nav && git pull --rebase +echo -n "1-homepage: " +cd /opt/websites/homepage && git pull --rebase + +# check if done +echo -n "2-blog: " +cd /opt/source-code/blog && git pull --rebase +echo -n "2-wiki: " +cd /opt/websites/wiki && git pull --rebase +echo -n "2-nav: " +cd /opt/websites/nav && git pull --rebase +echo -n "2-homepage: " +cd /opt/websites/homepage && git pull --rebase + +echo -e "-----------------------------------------------------------\n" + + diff --git a/old/github_update.sh.homepage b/old/github_update.sh.homepage new file mode 100644 index 0000000..d6c74d1 --- /dev/null +++ b/old/github_update.sh.homepage @@ -0,0 +1,41 @@ +#!/bin/bash +#=================================================================== +# Filename : update_github.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-03-26 18:46 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this scripts +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + +t=`date +%Y%m%d%H%M%S` +echo $t >> /opt/logs/github_update.log + +# first try +# echo -n "1-blog: " +# cd /opt/source-code/blog && git pull --rebase +# echo -n "1-wiki: " +# cd /opt/websites/wiki && git pull --rebase +# echo -n "1-nav: " +# cd /opt/websites/nav && git pull --rebase +echo -n "1-homepage: " +cd /opt/websites/homepage && git pull --rebase + +# check if done +# echo -n "2-blog: " +# cd /opt/source-code/blog && git pull --rebase +# echo -n "2-wiki: " +# cd /opt/websites/wiki && git pull --rebase +# echo -n "2-nav: " +# cd /opt/websites/nav && git pull --rebase +echo -n "2-homepage: " +cd /opt/websites/homepage && git pull --rebase + +echo -e "-----------------------------------------------------------\n" + + diff --git a/old/jekyll_blog_update.sh b/old/jekyll_blog_update.sh new file mode 100644 index 0000000..1e9de76 --- /dev/null +++ b/old/jekyll_blog_update.sh @@ -0,0 +1,17 @@ +#!/bin/bash + + +inotifywait -mrq -e create,delete,move,close_write /opt/source-code/blog --exclude '^.*/avatar.jpg|^.*/\.git' | while read directory action filename; do + echo ==================================================== + echo `date` + echo $directory$filename $action + rm -rf /opt/websites/blog + let numOfAvatar=`ls /opt/websites/nav/assets/images/logos/ | wc -l` + let randNumber=$RANDOM%$numOfAvatar + + cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/websites/homepage/assets/img/logo.jpg -rf + cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/websites/nav/assets/images/logos/avatar.jpg -rf + cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/source-code/blog/img/avatar.jpg -rf + jekyll b -s /opt/source-code/blog/ -d /opt/websites/blog/ + echo -e '\n' +done diff --git a/old/jekyll_python_update.sh b/old/jekyll_python_update.sh new file mode 100644 index 0000000..6832bf9 --- /dev/null +++ b/old/jekyll_python_update.sh @@ -0,0 +1,13 @@ +#!/bin/bash + + +inotifywait -mrq -e create,delete,move,close_write /opt/source-code/document/python | while read directory action filename; do + echo ==================================================== + echo `date` + echo $directory$filename $action + rm -rf /opt/websites/just-the-docs/python + jekyll b -s /opt/source-code/document/python -d /opt/websites/just-the-docs/python + echo -e '\n' +done + + diff --git a/old/koel_update.sh b/old/koel_update.sh new file mode 100644 index 0000000..6b71c4a --- /dev/null +++ b/old/koel_update.sh @@ -0,0 +1,36 @@ +#!/bin/bash +#=================================================================== +# Filename : koel_update.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-10-15 23:34 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + +[[ ! -e /tmp/files_now ]] && touch /tmp/files_now +[[ ! -e /tmp/files_pre_60s ]] && touch /tmp/files_pre_60s +success_flg=1 +ls -aR /opt/media/Music | grep -E "*.(mp3|flac|opus|aac|ogg|m4a)" | sort > /tmp/files_now +diff /tmp/files_now /tmp/files_pre_60s >> /opt/logs/koel_update.log +if [[ $? -ne 0 ]]; then + chown -R www-data:www-data /opt/media/Music + for i in `seq 10`; do + php /opt/source-code/koel/artisan koel:sync > /dev/null + if [[ $? -eq 0 ]]; then + php /opt/source-code/koel/artisan koel:sync >> /opt/logs/koel_update.log + success_flg=0 + break + fi + sleep 2 + done + [[ success_flg -eq 1 ]] && echo "Happening @ $(date) Failed scanning the media dir, need processing that by hand." >> /opt/logs/koel_update.log + echo -e "Happening @ $(date) Sync koel music successfully." >> /opt/logs/koel_update.log +fi +cp /tmp/files_now /tmp/files_pre_60s + diff --git a/old/nav_jpg.sh b/old/nav_jpg.sh new file mode 100644 index 0000000..58e62dc --- /dev/null +++ b/old/nav_jpg.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +html='/opt/websites/nav/index.html' +jpg_max_num=`cat $html | grep -oE "[0-9]+\.jpg" | awk -F '.' '{print $1}' | sort -n | tail -n 1` +line_num=`cat $html | grep -oE "[0-9]+\.jpg" | awk -F '.' '{print $1}' | sort -n | uniq -c | wc -l` +jpg_all_num=`ls -al /opt/websites/nav/assets/images/logos/ | wc -l` +if [[ $((jpg_max_num+1)) -ne $line_num ]]; then + echo -e "\e[1;31mThere must be duplicated jpg files, plz check!\e[0m" + return 2 +fi +echo "Now: $jpg_max_num | MAX: $jpg_all_num | AVAILABLE: $((jpg_all_num-jpg_max_num)) | NEXT: $((jpg_max_num+1))" + + + diff --git a/old/rclone/rclone_alist_automount.sh b/old/rclone/rclone_alist_automount.sh new file mode 100644 index 0000000..2164186 --- /dev/null +++ b/old/rclone/rclone_alist_automount.sh @@ -0,0 +1,116 @@ +#!/bin/bash +#=================================================================== +# Filename : rclone_alist_automount.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-10-19 14:05 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + + +function rclone_alist_reset() { + systemctl restart alist.service + for i in `seq 3`; do + fusermount -uzq /opt/webdav/alist > /dev/null 2>&1 + umount /opt/webdav/alist > /dev/null 2>&1 + sleep 2 + done + ps -ef | grep 'rclone mount Alist' | grep -v grep | awk '{print $2}' > /tmp/rclone/rclone_alist_pids + for rclone_alist_pid in `cat /tmp/rclone/rclone_alist_pids`; do + kill -9 $rclone_alist_pid; + done + nohup /usr/bin/rclone mount Alist:/ /opt/webdav/alist \ + --allow-other --vfs-cache-mode full --vfs-cache-max-size 10G --vfs-read-ahead 100M --dir-cache-time 2m --poll-interval 0 \ + --vfs-cache-max-age 4h --cache-dir /tmp/vfs-cache --bwlimit-file 20M --bwlimit 100M --no-update-modtime --contimeout 30m \ + --log-file /opt/logs/rclone/rclone_alist.log --log-level NOTICE --vfs-read-chunk-size 100M --vfs-read-chunk-size-limit 500M \ + --buffer-size 200M --attr-timeout 5m --transfers=6 --multi-thread-streams=6 --fast-list --timeout 30m \ + --allow-non-empty --no-modtime --max-duration 3h --vfs-read-wait 2s --vfs-write-wait 5s > /dev/null 2>&1 & +} + +alist_log='/opt/logs/rclone/rclone_alist.log' +pid_self=$$ +# get all kinds of states for later decision +num=`cat /proc/mounts | grep /opt/webdav/alist | wc -l` +[[ $num -eq 0 ]] && loaded=0 +[[ $num -eq 1 ]] && loaded=1 +[[ $num -gt 1 ]] && loaded=2 + +ps -ef | grep '/usr/bin/rclone mount Alist' | grep -v grep | awk '{print $2}' > /tmp/rclone/rclone_alist_pids +num=`cat /tmp/rclone/rclone_alist_pids | wc -l` +[[ $num -eq 0 ]] && rclone_running=0 +[[ $num -eq 1 ]] && rclone_running=1 +[[ $num -gt 1 ]] && rclone_running=2 + +sleep 2 +ps -ef | grep '/usr/bin/bash /opt/scripts/rclone/rclone_alist_automount.sh' | grep -v 'grep' > /tmp/rclone/rclone_alist_automount_pids +let num=`cat /tmp/rclone/rclone_alist_automount_pids | sed -e '/^$/d' | wc -l` +[[ $num -eq 1 ]] && script_running=1 +[[ $num -gt 1 ]] && script_running=2 + +# print the states for debug +echo `date` >> $alist_log +echo loaded = $loaded >> $alist_log +echo rclone_running = $rclone_running >> $alist_log +echo script_running = $script_running >> $alist_log +# exit 5 + +# decide if `rclone` command function normally +if [[ $1 == '-f' ]]; then + echo -e "Happening @ $(date) [Alist] Executing BY Hands.\n" >> $alist_log + + if [[ $script_running -eq 1 ]]; then + rclone_alist_reset + elif [[ script_running -eq 2 ]]; then + echo "Happening @ $(date) [Alist] Script rclone_alist_automount.sh -f has already been executing..." | tee -a $alist_log + echo "Happening @ $(date) [Alist] Alist RESET will be done with -f option" | tee -a $alist_log + for rclone_alist_automount_pid in `cat /tmp/rclone/rclone_alist_automount_pids`; do + [[ $rclone_alist_automount_pid != $pid_self ]] && kill -9 $rclone_alist_automount_pid > /dev/null 2>&1 + done + rclone_alist_reset + else + echo "Happening @ $(date) [Alist] In general, this -f case will NOT happen" >> $alist_log + fi +elif [[ $1 == '-c' ]]; then + echo -e "Happening @ $(date) [Alist] Executing BY Cron Service.\n" >> $alist_log + + if [[ $script_running -eq 1 ]]; then + rclone_alist_reset + elif [[ script_running -eq 2 ]]; then + echo "Happening @ $(date) [Alist] Script rclone_alist_automount.sh -c has already been executing..." | tee -a $alist_log + echo "Happening @ $(date) [Alist] Alist RESET will be done on CRON condition." | tee -a $alist_log + for rclone_alist_automount_pid in `cat /tmp/rclone/rclone_alist_automount_pids`; do + [[ $rclone_alist_automount_pid != $pid_self ]] && kill -9 $rclone_alist_automount_pid > /dev/null 2>&1 + done + rclone_alist_reset + else + echo "Happening @ $(date) [Alist] In general, this -c case will NOT happen" >> $alist_log + fi +elif [[ $1 == '' ]]; then + sleep 10 + if [[ script_running -eq 1 ]]; then + if [[ $loaded -ne 1 || $rclone_running -ne 1 ]]; then + echo "Happening @ $(date) [Alist] Executing automatically." >> $alist_log + rclone_alist_reset + fi + elif [[ $script_running -eq 2 ]]; then + echo "Happening @ $(date) [Alist] Script rclone_alist_automount.sh auto has already been executing..." | tee -a $alist_log + echo "Happening @ $(date) [Alist] Nothing will be done at this auto-situation" | tee -a $alist_log + # for rclone_alist_automount_pid in `cat /tmp/rclone_alist_automount_pids`; do + # [[ $rclone_alist_automount_pid != $pid_self ]] && kill -9 $rclone_alist_automount_pid > /dev/null 2>&1 + # done + # rclone_alist_reset + else + echo "Happening @ $(date) [Alist] In general, this auto case will NOT happen" >> $alist_log + fi +else + echo "Happening @ $(date) [Alist] Wrong usage of script/tool, only accept -f | -c or nothing as argument." >> $alist_log +fi + + + diff --git a/old/rclone/rclone_bash_completion.sh b/old/rclone/rclone_bash_completion.sh new file mode 100644 index 0000000..a76ef23 --- /dev/null +++ b/old/rclone/rclone_bash_completion.sh @@ -0,0 +1,342 @@ +#!/bin/bash +#=================================================================== +# Filename : rclone_bash_completion.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-10-27 10:04 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + + +# bash completion V2 for rclone -*- shell-script -*- + +__rclone_debug() +{ + if [[ -n ${BASH_COMP_DEBUG_FILE:-} ]]; then + echo "$*" >> "${BASH_COMP_DEBUG_FILE}" + fi +} + +# Macs have bash3 for which the bash-completion package doesn't include +# _init_completion. This is a minimal version of that function. +__rclone_init_completion() +{ + COMPREPLY=() + _get_comp_words_by_ref "$@" cur prev words cword +} + +# This function calls the rclone program to obtain the completion +# results and the directive. It fills the 'out' and 'directive' vars. +__rclone_get_completion_results() { + local requestComp lastParam lastChar args + + # Prepare the command to request completions for the program. + # Calling ${words[0]} instead of directly rclone allows to handle aliases + args=("${words[@]:1}") + requestComp="${words[0]} __complete ${args[*]}" + + lastParam=${words[$((${#words[@]}-1))]} + lastChar=${lastParam:$((${#lastParam}-1)):1} + __rclone_debug "lastParam ${lastParam}, lastChar ${lastChar}" + + if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then + # If the last parameter is complete (there is a space following it) + # We add an extra empty parameter so we can indicate this to the go method. + __rclone_debug "Adding extra empty parameter" + requestComp="${requestComp} ''" + fi + + # When completing a flag with an = (e.g., rclone -n=) + # bash focuses on the part after the =, so we need to remove + # the flag part from $cur + if [[ "${cur}" == -*=* ]]; then + cur="${cur#*=}" + fi + + __rclone_debug "Calling ${requestComp}" + # Use eval to handle any environment variables and such + out=$(eval "${requestComp}" 2>/dev/null) + + # Extract the directive integer at the very end of the output following a colon (:) + directive=${out##*:} + # Remove the directive + out=${out%:*} + if [ "${directive}" = "${out}" ]; then + # There is not directive specified + directive=0 + fi + __rclone_debug "The completion directive is: ${directive}" + __rclone_debug "The completions are: ${out}" +} + +__rclone_process_completion_results() { + local shellCompDirectiveError=1 + local shellCompDirectiveNoSpace=2 + local shellCompDirectiveNoFileComp=4 + local shellCompDirectiveFilterFileExt=8 + local shellCompDirectiveFilterDirs=16 + + if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then + # Error code. No completion. + __rclone_debug "Received error from custom completion go code" + return + else + if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __rclone_debug "Activating no space" + compopt -o nospace + else + __rclone_debug "No space directive not supported in this version of bash" + fi + fi + if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __rclone_debug "Activating no file completion" + compopt +o default + else + __rclone_debug "No file completion directive not supported in this version of bash" + fi + fi + fi + + # Separate activeHelp from normal completions + local completions=() + local activeHelp=() + __rclone_extract_activeHelp + + if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then + # File extension filtering + local fullFilter filter filteringCmd + + # Do not use quotes around the $completions variable or else newline + # characters will be kept. + for filter in ${completions[*]}; do + fullFilter+="$filter|" + done + + filteringCmd="_filedir $fullFilter" + __rclone_debug "File filtering command: $filteringCmd" + $filteringCmd + elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then + # File completion for directories only + + # Use printf to strip any trailing newline + local subdir + subdir=$(printf "%s" "${completions[0]}") + if [ -n "$subdir" ]; then + __rclone_debug "Listing directories in $subdir" + pushd "$subdir" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return + else + __rclone_debug "Listing directories in ." + _filedir -d + fi + else + __rclone_handle_completion_types + fi + + __rclone_handle_special_char "$cur" : + __rclone_handle_special_char "$cur" = + + # Print the activeHelp statements before we finish + if [ ${#activeHelp} -ne 0 ]; then + printf "\n"; + printf "%s\n" "${activeHelp[@]}" + printf "\n" + + # The prompt format is only available from bash 4.4. + # We test if it is available before using it. + if (x=${PS1@P}) 2> /dev/null; then + printf "%s" "${PS1@P}${COMP_LINE[@]}" + else + # Can't print the prompt. Just print the + # text the user had typed, it is workable enough. + printf "%s" "${COMP_LINE[@]}" + fi + fi +} + +# Separate activeHelp lines from real completions. +# Fills the $activeHelp and $completions arrays. +__rclone_extract_activeHelp() { + local activeHelpMarker="_activeHelp_ " + local endIndex=${#activeHelpMarker} + + while IFS='' read -r comp; do + if [ "${comp:0:endIndex}" = "$activeHelpMarker" ]; then + comp=${comp:endIndex} + __rclone_debug "ActiveHelp found: $comp" + if [ -n "$comp" ]; then + activeHelp+=("$comp") + fi + else + # Not an activeHelp line but a normal completion + completions+=("$comp") + fi + done < <(printf "%s\n" "${out}") +} + +__rclone_handle_completion_types() { + __rclone_debug "__rclone_handle_completion_types: COMP_TYPE is $COMP_TYPE" + + case $COMP_TYPE in + 37|42) + # Type: menu-complete/menu-complete-backward and insert-completions + # If the user requested inserting one completion at a time, or all + # completions at once on the command-line we must remove the descriptions. + # https://github.com/spf13/cobra/issues/1508 + local tab=$'\t' comp + while IFS='' read -r comp; do + [[ -z $comp ]] && continue + # Strip any description + comp=${comp%%$tab*} + # Only consider the completions that match + if [[ $comp == "$cur"* ]]; then + COMPREPLY+=("$comp") + fi + done < <(printf "%s\n" "${completions[@]}") + ;; + + *) + # Type: complete (normal completion) + __rclone_handle_standard_completion_case + ;; + esac +} + +__rclone_handle_standard_completion_case() { + local tab=$'\t' comp + + # Short circuit to optimize if we don't have descriptions + if [[ "${completions[*]}" != *$tab* ]]; then + IFS=$'\n' read -ra COMPREPLY -d '' < <(compgen -W "${completions[*]}" -- "$cur") + return 0 + fi + + local longest=0 + local compline + # Look for the longest completion so that we can format things nicely + while IFS='' read -r compline; do + [[ -z $compline ]] && continue + # Strip any description before checking the length + comp=${compline%%$tab*} + # Only consider the completions that match + [[ $comp == "$cur"* ]] || continue + COMPREPLY+=("$compline") + if ((${#comp}>longest)); then + longest=${#comp} + fi + done < <(printf "%s\n" "${completions[@]}") + + # If there is a single completion left, remove the description text + if [ ${#COMPREPLY[*]} -eq 1 ]; then + __rclone_debug "COMPREPLY[0]: ${COMPREPLY[0]}" + comp="${COMPREPLY[0]%%$tab*}" + __rclone_debug "Removed description from single completion, which is now: ${comp}" + COMPREPLY[0]=$comp + else # Format the descriptions + __rclone_format_comp_descriptions $longest + fi +} + +__rclone_handle_special_char() +{ + local comp="$1" + local char=$2 + if [[ "$comp" == *${char}* && "$COMP_WORDBREAKS" == *${char}* ]]; then + local word=${comp%"${comp##*${char}}"} + local idx=${#COMPREPLY[*]} + while [[ $((--idx)) -ge 0 ]]; do + COMPREPLY[$idx]=${COMPREPLY[$idx]#"$word"} + done + fi +} + +__rclone_format_comp_descriptions() +{ + local tab=$'\t' + local comp desc maxdesclength + local longest=$1 + + local i ci + for ci in ${!COMPREPLY[*]}; do + comp=${COMPREPLY[ci]} + # Properly format the description string which follows a tab character if there is one + if [[ "$comp" == *$tab* ]]; then + __rclone_debug "Original comp: $comp" + desc=${comp#*$tab} + comp=${comp%%$tab*} + + # $COLUMNS stores the current shell width. + # Remove an extra 4 because we add 2 spaces and 2 parentheses. + maxdesclength=$(( COLUMNS - longest - 4 )) + + # Make sure we can fit a description of at least 8 characters + # if we are to align the descriptions. + if [[ $maxdesclength -gt 8 ]]; then + # Add the proper number of spaces to align the descriptions + for ((i = ${#comp} ; i < longest ; i++)); do + comp+=" " + done + else + # Don't pad the descriptions so we can fit more text after the completion + maxdesclength=$(( COLUMNS - ${#comp} - 4 )) + fi + + # If there is enough space for any description text, + # truncate the descriptions that are too long for the shell width + if [ $maxdesclength -gt 0 ]; then + if [ ${#desc} -gt $maxdesclength ]; then + desc=${desc:0:$(( maxdesclength - 1 ))} + desc+="…" + fi + comp+=" ($desc)" + fi + COMPREPLY[ci]=$comp + __rclone_debug "Final comp: $comp" + fi + done +} + +__start_rclone() +{ + local cur prev words cword split + + COMPREPLY=() + + # Call _init_completion from the bash-completion package + # to prepare the arguments properly + if declare -F _init_completion >/dev/null 2>&1; then + _init_completion -n "=:" || return + else + __rclone_init_completion -n "=:" || return + fi + + __rclone_debug + __rclone_debug "========= starting completion logic ==========" + __rclone_debug "cur is ${cur}, words[*] is ${words[*]}, #words[@] is ${#words[@]}, cword is $cword" + + # The user could have moved the cursor backwards on the command-line. + # We need to trigger completion from the $cword location, so we need + # to truncate the command-line ($words) up to the $cword location. + words=("${words[@]:0:$cword+1}") + __rclone_debug "Truncated words[*]: ${words[*]}," + + local out directive + __rclone_get_completion_results + __rclone_process_completion_results +} + +if [[ $(type -t compopt) = "builtin" ]]; then + complete -o default -F __start_rclone rclone +else + complete -o default -o nospace -F __start_rclone rclone +fi + +# ex: ts=4 sw=4 et filetype=sh + diff --git a/old/rclone/rclone_cloudreve_automount.sh b/old/rclone/rclone_cloudreve_automount.sh new file mode 100644 index 0000000..b1fbe3e --- /dev/null +++ b/old/rclone/rclone_cloudreve_automount.sh @@ -0,0 +1,119 @@ +#!/bin/bash +#=================================================================== +# Filename : rclone_cloudreve_automount.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-10-19 14:05 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + + +function rclone_cloudreve_reset() { + for i in `seq 3`; do + fusermount -uzq /opt/webdav/cloudreve > /dev/null 2>&1 + umount /opt/webdav/cloudreve > /dev/null 2>&1 + sleep 2 + done + ps -ef | grep 'rclone mount Cloudreve' | grep -v grep | awk '{print $2}' > /tmp/rclone/rclone_cloudreve_pids + for rclone_cloudreve_pid in `cat /tmp/rclone/rclone_cloudreve_pids`; do + kill -9 $rclone_cloudreve_pid; + done + nohup /usr/bin/rclone mount Cloudreve:/ /opt/webdav/cloudreve \ + --allow-other --vfs-cache-mode full --vfs-cache-max-size 10G --vfs-read-ahead 100M --dir-cache-time 2m\ + --vfs-cache-max-age 4h --cache-dir /tmp/vfs-cache --bwlimit-file 20M --bwlimit 100M --no-update-modtime \ + --log-file /opt/logs/rclone/rclone_cloudreve.log --log-level NOTICE --vfs-read-chunk-size 100M --vfs-read-chunk-size-limit 500M \ + --buffer-size 200M --attr-timeout 5m --transfers=6 --multi-thread-streams=6 --fast-list \ + --allow-non-empty --no-modtime --max-duration 3h --vfs-read-wait 2s --vfs-write-wait 5s > /dev/null 2>&1 & +} + +cloudreve_log='/opt/logs/rclone/rclone_cloudreve.log' +pid_self=$$ +# get all kinds of states for later decision +num=`cat /proc/mounts | grep /opt/webdav/cloudreve | wc -l` +[[ $num -eq 0 ]] && loaded=0 +[[ $num -eq 1 ]] && loaded=1 +[[ $num -gt 1 ]] && loaded=2 + +ps -ef | grep '/usr/bin/rclone mount Cloudreve' | grep -v grep | awk '{print $2}' > /tmp/rclone/rclone_cloudreve_pids +num=`cat /tmp/rclone/rclone_cloudreve_pids | wc -l` +[[ $num -eq 0 ]] && rclone_running=0 +[[ $num -eq 1 ]] && rclone_running=1 +[[ $num -gt 1 ]] && rclone_running=2 + +sleep 2 +ps -ef | grep '/usr/bin/bash /opt/scripts/rclone/rclone_cloudreve_automount.sh' | grep -v 'grep' > /tmp/rclone/rclone_cloudreve_automount_pids +let num=`cat /tmp/rclone/rclone_cloudreve_automount_pids | sed -e '/^$/d' | wc -l` +[[ $num -eq 1 ]] && script_running=1 +if [[ $num -gt 1 ]]; then + script_running=2 + echo `date` >> /tmp/rclone/rclone_cloudreve_abnormal.log + ps -ef | grep '/usr/bin/bash /opt/scripts/rclone/rclone_cloudreve_automount.sh' | grep -v 'grep' >> /tmp/rclone/rclone_cloudreve_abnormal.log +fi + +# print the states for debug +echo `date` >> $cloudreve_log +echo loaded = $loaded >> $cloudreve_log +echo rclone_running = $rclone_running >> $cloudreve_log +echo script_running = $script_running >> $cloudreve_log +# exit 5 + +# decide if `rclone` command function normally +if [[ $1 == '-f' ]]; then + echo -e "Happening @ $(date) [Cloudreve] Executing BY Hands.\n" >> $cloudreve_log + + if [[ $script_running -eq 1 ]]; then + rclone_cloudreve_reset + elif [[ script_running -eq 2 ]]; then + echo "Happening @ $(date) [Cloudreve] Script rclone_cloudreve_automount.sh -f has already been executing..." | tee -a $cloudreve_log + echo "Happening @ $(date) [Cloudreve] Cloudreve RESET will be done with -f option" | tee -a $cloudreve_log + for rclone_cloudreve_automount_pid in `cat /tmp/rclone/rclone_cloudreve_automount_pids`; do + [[ $rclone_cloudreve_automount_pid != $pid_self ]] && kill -9 $rclone_cloudreve_automount_pid > /dev/null 2>&1 + done + rclone_cloudreve_reset + else + echo "Happening @ $(date) [Cloudreve] In general, this case will NOT happen" >> $cloudreve_log + fi +elif [[ $1 == '-c' ]]; then + echo -e "Happening @ $(date) [Cloudreve] Executing BY Cron Service.\n" >> $cloudreve_log + + if [[ $script_running -eq 1 ]]; then + rclone_cloudreve_reset + elif [[ script_running -eq 2 ]]; then + echo "Happening @ $(date) [Cloudreve] Script rclone_cloudreve_automount.sh -c has already been executing..." | tee -a $cloudreve_log + echo "Happening @ $(date) [Cloudreve] Cloudreve RESET will be done on CRON condition." | tee -a $cloudreve_log + for rclone_cloudreve_automount_pid in `cat /tmp/rclone/rclone_cloudreve_automount_pids`; do + [[ $rclone_cloudreve_automount_pid != $pid_self ]] && kill -9 $rclone_cloudreve_automount_pid > /dev/null 2>&1 + done + rclone_cloudreve_reset + else + echo "Happening @ $(date) [Cloudreve] In general, this case will NOT happen" >> $cloudreve_log + fi +elif [[ $1 == '' ]]; then + sleep 10 + if [[ script_running -eq 1 ]]; then + if [[ $loaded -ne 1 || $rclone_running -ne 1 ]]; then + echo "Happening @ $(date) [Cloudreve] Executing automatically." >> $cloudreve_log + rclone_cloudreve_reset + fi + elif [[ $script_running -eq 2 ]]; then + echo "Happening @ $(date) [Cloudreve] Script rclone_cloudreve_automount.sh auto has already been executing..." | tee -a $cloudreve_log + echo "Happening @ $(date) [Cloudreve] Nothing will be done at this auto-situation" | tee -a $cloudreve_log + # for rclone_cloudreve_automount_pid in `cat /tmp/rclone_cloudreve_automount_pids`; do + # [[ $rclone_cloudreve_automount_pid != $pid_self ]] && kill -9 $rclone_cloudreve_automount_pid > /dev/null 2>&1 + # done + # rclone_cloudreve_reset + else + echo "Happening @ $(date) [Cloudreve] In general, this case will NOT happen" >> $cloudreve_log + fi +else + echo "Happening @ $(date) [Cloudreve] Wrong usage of script/tool, only accept -f | -c or nothing as argument." >> $cloudreve_log +fi + + + diff --git a/old/rclone/rclone_onedrive_automount.sh b/old/rclone/rclone_onedrive_automount.sh new file mode 100644 index 0000000..d21f3f3 --- /dev/null +++ b/old/rclone/rclone_onedrive_automount.sh @@ -0,0 +1,116 @@ +#!/bin/bash +#=================================================================== +# Filename : rclone_onedrive_automount.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-10-19 14:05 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + + +function rclone_onedrive_reset() { + for i in `seq 3`; do + fusermount -uzq /opt/webdav/onedrive > /dev/null 2>&1 + umount /opt/webdav/onedrive > /dev/null 2>&1 + sleep 2 + done + ps -ef | grep 'rclone mount Onedrive' | grep -v grep | awk '{print $2}' > /tmp/rclone/rclone_onedrive_pids + for rclone_onedrive_pid in `cat /tmp/rclone/rclone_onedrive_pids`; do + kill -9 $rclone_onedrive_pid; + done + nohup /usr/bin/rclone mount Onedrive:/ /opt/webdav/onedrive \ + --allow-other --vfs-cache-mode full --vfs-cache-max-size 10G --vfs-read-ahead 100M --dir-cache-time 2m --poll-interval 0 \ + --vfs-cache-max-age 4h --cache-dir /tmp/vfs-cache --bwlimit-file 20M --bwlimit 100M --no-update-modtime --contimeout 30m \ + --log-file /opt/logs/rclone/rclone_onedrive.log --log-level NOTICE --vfs-read-chunk-size 100M --vfs-read-chunk-size-limit 500M \ + --buffer-size 200M --attr-timeout 5m --transfers=6 --multi-thread-streams=6 --fast-list --timeout 30m \ + --allow-non-empty --no-modtime --max-duration 3h --vfs-read-wait 2s --vfs-write-wait 5s > /dev/null 2>&1 & +} + +onedrive_log='/opt/logs/rclone/rclone_onedrive.log' +pid_self=$$ +# get all kinds of states for later decision +num=`cat /proc/mounts | grep /opt/webdav/onedrive | wc -l` +[[ $num -eq 0 ]] && loaded=0 +[[ $num -eq 1 ]] && loaded=1 +[[ $num -gt 1 ]] && loaded=2 + +ps -ef | grep '/usr/bin/rclone mount Onedrive' | grep -v grep | awk '{print $2}' > /tmp/rclone/rclone_onedrive_pids +num=`cat /tmp/rclone/rclone_onedrive_pids | wc -l` +[[ $num -eq 0 ]] && rclone_running=0 +[[ $num -eq 1 ]] && rclone_running=1 +[[ $num -gt 1 ]] && rclone_running=2 + +sleep 2 +ps -ef | grep '/usr/bin/bash /opt/scripts/rclone/rclone_onedrive_automount.sh' | grep -v 'grep' > /tmp/rclone/rclone_onedrive_automount_pids +let num=`cat /tmp/rclone/rclone_onedrive_automount_pids | sed -e '/^$/d' | wc -l` +[[ $num -eq 1 ]] && script_running=1 +[[ $num -gt 1 ]] && script_running=2 + +# print the states for debug +echo `date` >> $onedrive_log +echo loaded = $loaded >> $onedrive_log +echo rclone_running = $rclone_running >> $onedrive_log +echo script_running = $script_running >> $onedrive_log +# exit 5 + +# decide if `rclone` command function normally +if [[ $1 == '-f' ]]; then + echo -e "Happening @ $(date) [Onedrive] Executing BY Hands.\n" >> $onedrive_log + + if [[ $script_running -eq 1 ]]; then + rclone_onedrive_reset + elif [[ script_running -eq 2 ]]; then + echo "Happening @ $(date) [Onedrive] Script rclone_onedrive_automount.sh -f has already been executing..." | tee -a $onedrive_log + echo "Happening @ $(date) [Onedrive] Onedrive RESET will be done with -f option" | tee -a $onedrive_log + for rclone_onedrive_automount_pid in `cat /tmp/rclone/rclone_onedrive_automount_pids`; do + [[ $rclone_onedrive_automount_pid != $pid_self ]] && kill -9 $rclone_onedrive_automount_pid > /dev/null 2>&1 + done + rclone_onedrive_reset + else + echo "Happening @ $(date) [Onedrive] In general, this case will NOT happen" >> $onedrive_log + fi +elif [[ $1 == '-c' ]]; then + echo -e "Happening @ $(date) [Onedrive] Executing BY Cron Service.\n" >> $onedrive_log + + if [[ $script_running -eq 1 ]]; then + rclone_onedrive_reset + elif [[ script_running -eq 2 ]]; then + echo "Happening @ $(date) [Onedrive] Script rclone_onedrive_automount.sh -c has already been executing..." | tee -a $onedrive_log + echo "Happening @ $(date) [Onedrive] Onedrive RESET will be done on CRON condition." | tee -a $onedrive_log + for rclone_onedrive_automount_pid in `cat /tmp/rclone/rclone_onedrive_automount_pids`; do + [[ $rclone_onedrive_automount_pid != $pid_self ]] && kill -9 $rclone_onedrive_automount_pid > /dev/null 2>&1 + done + rclone_onedrive_reset + else + echo "Happening @ $(date) [Onedrive] In general, this case will NOT happen" >> $onedrive_log + fi +elif [[ $1 == '' ]]; then + sleep 10 + if [[ script_running -eq 1 ]]; then + if [[ $loaded -ne 1 || $rclone_running -ne 1 ]]; then + echo "Happening @ $(date) [Onedrive] Executing automatically." >> $onedrive_log + rclone_onedrive_reset + fi + elif [[ $script_running -eq 2 ]]; then + echo "Happening @ $(date) [Onedrive] script rclone_onedrive_automount.sh auto has already been executing..." | tee -a $onedrive_log + echo "Happening @ $(date) [Onedrive] Nothing will be done at this auto-situation" | tee -a $onedrive_log + echo "Nothing will be done at this situation" | tee -a $onedrive_log + # for rclone_onedrive_automount_pid in `cat /tmp/rclone_onedrive_automount_pids`; do + # [[ $rclone_onedrive_automount_pid != $pid_self ]] && kill -9 $rclone_onedrive_automount_pid > /dev/null 2>&1 + # done + # rclone_onedrive_reset + else + echo "Happening @ $(date) [Onedrive] In general, this case will NOT happen" >> $onedrive_log + fi +else + echo "Happening @ $(date) [Onedrive] Wrong usage of script/tool, only accept -f | -c or nothing as argument." >> $onedrive_log +fi + + + diff --git a/old/rclone/rclone_sync.sh b/old/rclone/rclone_sync.sh new file mode 100644 index 0000000..c2d277f --- /dev/null +++ b/old/rclone/rclone_sync.sh @@ -0,0 +1,21 @@ +#!/bin/bash +#=================================================================== +# Filename : auto_start_self.sh +# Function : +# Usage : +# Author : Manford Fan +# Date : 2022-04-12 09:50 +# Version : Version 0.1 +# Disclaimer : The author is NOT responsible for any loss caused +# by the user's own operations. +# And More : If you find there are some bugs in this script +# Or you have better ideas, please do contact me +# via E-mail -- mffan0922@163.com +#=================================================================== + +rclone sync -P /opt/media/Kindle/ Onedrive:/A-Book/Kindle/ +rclone sync -P /opt/media/Music/ Onedrive:/B-Media/Music/Koel/ +rclone sync -P Onedrive:/ /opt/webdav/wd/72-Backups/Onedrive/ --exclude=/E-Github/** + + + diff --git a/old/restore.sh b/old/restore.sh new file mode 100644 index 0000000..68cca42 --- /dev/null +++ b/old/restore.sh @@ -0,0 +1,330 @@ +#!/bin/bash + +# =========================================================================== +# This script must be executed by root privilege +if [[ $(id -u) -ne 0 ]]; then + echo -e "\e[1;31mThis script MUST be executed with root privilege.\e[0m\n" + exit 1 +fi + +# =========================================================================== +# Double check if do run this script +echo -e "\e[1;2;31m[VPS USE ONLY] - Are you sure you want to run this script to re-configure you system???\e[0m" +read -p "Yes/No: " YON +[[ $YON != 'Yes' && $YON != 'YES' && $YON != 'yes' && $YON != 'y' && $YON != 'Y' ]] && exit 2 +echo -e "\e[1;2;33m[VPS USE ONLY] - AGAIN, are you sure you want to run this script to re-configure you system???\e[0m" +read -p "Yes/No: " YON +[[ $YON != 'Yes' && $YON != 'YES' && $YON != 'yes' && $YON != 'y' && $YON != 'Y' ]] && exit 3 + +# =========================================================================== +# extract backup files +vps=`find . -name vps*.tar.xz` +if [[ ! -f flg && $vps != '' ]]; then + echo -e "\n\e[1;34mExtracting backups to current dir...\e[0m\n" + tar -I pixz -xmf vps*.xz + touch flg +elif [[ -f flg ]]; then + echo -e "\n\e[1;32mAlready extracted, doing nothing.\e[0m\n" +else + echo -e "\n\e[1;31mThere is no backup file right here, plz check.\e[0m\n" + exit 4 +fi + +# =========================================================================== +# sone prerequisites in aspect of path & content +echo -e "\n\e[1;34mPreparing initial env...\e[0m\n" +rm -rf /opt/* +mkdir -p /opt/logs +mkdir -p /opt/logs/rclone +mkdir -p /opt/temp +mkdir -p /opt/webdav/{alist,onedrive,wd} +mkdir -p /root/.pip +cp -rf configs scripts source-code websites /opt/ +cp /opt/configs/pip.conf /root/.pip + +# =========================================================================== +# set hostame +echo -e "\n\e[1;34mConfig hostname...\e[0m\n" +echo -ne "\e[1;34mPlz specify hostname: \e[0m" +read -t 600 host +hostnamectl set-hostname $host +name=`hostname` +cat /etc/hosts | grep -q $name +[[ $? -ne 0 ]] && sed -i "/^127/ s|$| $name|g" /etc/hosts + +# =========================================================================== +# config self-defined environment variable and function +echo -e "\n\e[1;34mconfig self-defined environment variable and function...\e[0m\n" +cat /root/.bashrc | grep -q 'toolbox' +[[ $? -ne 0 ]] && echo 'source /opt/scripts/utool/toolbox.sh' >> /root/.bashrc +source /root/.bashrc +chmod +x /opt/scripts/utool/utool.py +rm -rf /usr/local/bin/utool +ln -s /opt/scripts/utool/utool.py /usr/local/bin/utool + +# =========================================================================== +# set apt sources +echo -e "\n\e[1;34mConfig apt source list...\e[0m\n" +cat > /etc/apt/sources.list << EOF +deb https://mirrors.tuna.tsinghua.edu.cn/debian/ $(lsb_release -sc) main contrib non-free +deb-src https://mirrors.tuna.tsinghua.edu.cn/debian/ $(lsb_release -sc) main contrib non-free +deb https://mirrors.tuna.tsinghua.edu.cn/debian/ $(lsb_release -sc)-updates main contrib non-free +deb-src https://mirrors.tuna.tsinghua.edu.cn/debian/ $(lsb_release -sc)-updates main contrib non-free +deb https://mirrors.tuna.tsinghua.edu.cn/debian/ $(lsb_release -sc)-backports main contrib non-free +deb-src https://mirrors.tuna.tsinghua.edu.cn/debian/ $(lsb_release -sc)-backports main contrib non-free +deb https://mirrors.tuna.tsinghua.edu.cn/debian-security $(lsb_release -sc)-security main contrib non-free +deb-src https://mirrors.tuna.tsinghua.edu.cn/debian-security $(lsb_release -sc)-security main contrib non-free +EOF +echo -e "\n\e[1;34mUpdating system...\e[0m\n" +apt update && apt upgrade -y + +# =========================================================================== +# install some frequently used software +echo -e "\n\e[1;34mInstalling some tools...\e[0m\n" +apt install lrzsz unzip vim gcc g++ make automake curl wget gnupg2 aria2 jq apt-transport-https \ + ca-certificates lsb-release debian-archive-keyring oathtool ufw ruby ruby-dev qbittorrent-nox\ + git shc tmux htop pwgen imagemagick bash-completion dnsutils ghostscript nethogs ffmpeg iftop \ + python3-pip python3-dev golang net-tools ethtool tcpflow lshw rsync parallel rclone pigz pbzip2 \ + pixz neofetch mlocate ncdu dstat fzf tldr nscd inotify-hookable inotify-tools vsftpd mtr bridge-utils -y + +# =========================================================================== +# update pip3 setuptools and install jupyter lab +echo -e "\n\e[1;34mupdate pip3 setuptools and install jupyter lab...\e[0m\n" +pip3 install --upgrade setuptools -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install --upgrade pip -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install ipython -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install jupyterlab -i https://pypi.tuna.tsinghua.edu.cn/simple +# cp /root/.jupyter/jupyter_lab_config.py /root/.jupyter/jupyter_lab_config_origin.py +# cp /opt/configs/jupyter/jupyter_lab_config.py /root/.jupyter/ +# nohup jupyter lab --allow-root > /dev/null 2>&1 & + +# =========================================================================== +# configure vim +echo -e "\n\e[1;34mConfig vim editor...\e[0m\n" +cd /opt/configs/tools/ +[[ -d vim ]] && rm -rf vim +unzip -q vimConfig.zip +cd vim && bash install.sh +cd .. && rm -rf vim + +# =========================================================================== +# config ssh git ufw and aria2 +echo -e "\n\e[1;34mConfig publickey ssh && git && ufw && aria2...\e[0m\n" +cd /opt/configs/rsa/ +cp -f VPS* Github* config /root/.ssh/ +cat VPS.pub > /root/.ssh/authorized_keys +echo '' >> /root/.ssh/authorized_keys +chmod 600 /root/.ssh/* + +git config --global user.name 'mffan0922' +git config --global user.email 'mffan0922@163.com' + +# ufw allow 22 +# ufw allow 80 +# ufw allow 443 +ufw disable + +cp -rf /opt/configs/aria2/ /etc/ +> /etc/aria2/aria2.session + +# =========================================================================== +# install nginx +echo -e "\n\e[1;34mInstalling nginx...\e[0m\n" +apt install libpcre3 libpcre3-dev openssl libssl-dev zlib1g-dev libgeoip-dev -y +cd /opt/source-code/nginx-1.22.0/ +./configure --prefix=/usr/local/nginx \ +--with-select_module \ +--with-poll_module \ +--with-threads \ +--with-file-aio \ +--with-http_ssl_module \ +--with-http_v2_module \ +--with-http_realip_module \ +--with-http_addition_module \ +--with-http_geoip_module \ +--with-http_sub_module \ +--with-http_dav_module \ +--with-http_flv_module \ +--with-http_mp4_module \ +--with-http_gunzip_module \ +--with-http_gzip_static_module \ +--with-http_auth_request_module \ +--with-http_random_index_module \ +--with-http_secure_link_module \ +--with-http_degradation_module \ +--with-http_slice_module \ +--with-http_stub_status_module \ +--with-mail \ +--with-mail_ssl_module \ +--with-stream \ +--with-stream_ssl_module \ +--with-stream_realip_module \ +--with-stream_geoip_module \ +--with-stream_ssl_preread_module \ +--user=www-data \ +--group=www-data \ +--add-module=/opt/source-code/nginx-1.22.0/modules/headers-more-nginx-module +make -j 4 && make install +[[ -f /usr/sbin/nginx ]] && rm -rf /usr/sbin/nginx +ln -s /usr/local/nginx/sbin/nginx /usr/sbin/nginx +cp -rf /opt/configs/nginx/nginx.conf /usr/local/nginx/conf/ +cp -rf /opt/configs/nginx/nginx.service /lib/systemd/system/ +systemctl enable nginx.service +systemctl start nginx.service + +# =========================================================================== +# get https certificates +echo -e "\n\e[1;34mSetting https...\e[0m\n" +cd /root/ && git clone git@github.com:acmesh-official/acme.sh.git +cd acme.sh && ./acme.sh --install -m mffan0922@163.com +alias acme.sh=~/.acme.sh/acme.sh +./acme.sh --issue --dns dns_ali -d rustle.cc -d *.rustle.cc +cp /root/.acme.sh/rustle.cc/fullchain.cer /opt/configs/certs/rustle.cc.cer +cp /root/.acme.sh/rustle.cc/rustle.cc.key /opt/configs/certs/ + +# =========================================================================== +# install jekyll +echo -e "\n\e[1;34mInstall jekyll blog env...\e[0m\n" +gem install jekyll jekyll-paginate + +# =========================================================================== +echo -e "\n\e[1;34mInstalling mysql server...\e[0m\n" +cd /opt/configs/mysql/ && dpkg -i mysql-apt-config_0.8.23-1_all.deb +apt update && apt upgrade -y +apt install mysql-server -y +# cp -f /opt/configs/mysql/mysql.cnf /etc/mysql/conf.d/ +systemctl restart mysql.service + +# =========================================================================== +# install php8.0 for nextcloud +echo -e "\n\e[1;34mInstall php8.0...\e[0m\n" +wget -O /usr/share/keyrings/php-archive-keyring.gpg https://packages.sury.org/php/apt.gpg +echo "deb [signed-by=/usr/share/keyrings/php-archive-keyring.gpg] https://packages.sury.org/php/ $(lsb_release -sc) main" > /etc/apt/sources.list.d/php.list +apt update && apt upgrade -y +apt install php8.0-fpm php8.0-cli php8.0-mysql php8.0-curl php8.0-gd \ + php8.0-mbstring php8.0-xml php8.0-zip php8.0-imap php8.0-opcache \ + php8.0-soap php8.0-gmp php8.0-bcmath php8.0-intl php8.0-imagick -y + +# =========================================================================== +# configure nextcloud +echo -e "\n\e[1;34mRestore nextcloud env...\e[0m\n" +apt install php8.0-memcache* memcached php8.0-apcu libmagickcore-6.q16-6-extra -y + +echo -e "\n\e[1;34mbackup origin php data and restore previous php data...\e[0m\n" +cp -rf /etc/php/ /tmp/ +cp -rf /opt/configs/php/8.0/fpm/pool.d/www.conf /etc/php/8.0/fpm/pool.d/www.conf +cp -rf /opt/configs/php/8.0/mods-available/apcu.ini /etc/php/8.0/mods-available/apcu.ini +cp -rf /opt/configs/php/8.0/cli/php.ini /etc/php/8.0/cli/php.ini +cp -rf /opt/configs/php/8.0/fpm/php.ini /etc/php/8.0/fpm/php.ini + +# =========================================================================== +# restore mysql data +echo -e "\n\e[1;34mrestore mysql data...\e[0m\n" +cp /opt/configs/mysql/*.gz /root +cd /root && gzip -d sql-*.gz +mysql -uroot < sql-*.sql +rm sql* + +# =========================================================================== +# configure frpc +echo -e "\n\e[1;34mRestore frpc env...\e[0m\n" +cp /opt/source-code/frpc/frpc.service /lib/systemd/system/ +cp /opt/source-code/frpc/frpc-free.service /lib/systemd/system/ +systemctl enable frpc.service +systemctl enable frpc-free.service +systemctl start frpc.service +systemctl start frpc-free.service + +# =========================================================================== +# configure cloudreve +echo -e "\n\e[1;34mRestore cloudreve env...\e[0m\n" +cp /opt/source-code/cloudreve/cloudreve.service /lib/systemd/system/ +systemctl enable cloudreve.service +systemctl start cloudreve.service + +# =========================================================================== +# configure navidrome +echo -e "\n\e[1;34mRestore navidrome env...\e[0m\n" +cp /opt/source-code/navidrome/navidrome.service /lib/systemd/system/ +systemctl enable navidrome.service +systemctl start navidrome.service + +# =========================================================================== +# configure calibre +echo -e "\n\e[1;34mStarting calibre...\e[0m\n" +nohup /usr/bin/python3 /opt/source-code/calibre-web/cps.py > /dev/null 2>&1 & + +# =========================================================================== +# configure blog +echo -e "\n\e[1;34mStarting blog...\e[0m\n" +nohup /usr/bin/ruby2.7 /usr/local/bin/jekyll b -s /opt/source-code/blog/ -d /opt/websites/blog/ --trace --watch --incremental > /dev/null 2>&1 & + +# =========================================================================== +# configure alist +echo -e "\n\e[1;34mConfig alist...\e[0m\n" +cp /opt/source-code/alist/alist.service /lib/systemd/system/ +systemctl enable alist.service +systemctl start alist.service + +# =========================================================================== +# configure rclone +echo -e "\n\e[1;34mConfig rclone...\e[0m\n" +cp -rf /opt/configs/rclone /root/.config/ + +# =========================================================================== +# install php-8.1 & nodejs 16x +echo -e "\n\e[1;34mInstall php-8.1 & nodejs 16x for monica...\e[0m\n" +apt install -y php8.1 php8.1-bcmath php8.1-curl php8.1-gd php8.1-gmp php8.1-intl \ + php8.1-mbstring php8.1-mysql php8.1-redis php8.1-xml php8.1-zip +curl -sSL https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin/ --filename=composer +curl -fsSL https://deb.nodesource.com/setup_16.x | bash - +apt-get install -y nodejs +npm install --global yarn +cd /opt/source-code/monica/ +composer install --no-interaction --no-dev +yarn install +yarn run production +php artisan key:generate +php artisan setup:production -v + +# =========================================================================== +echo -e "\n\e[1;34mRemove no longer required packages...\e[0m\n" +apt autoremove -y + +# =========================================================================== +echo -e "\n\e[1;34mRestart nginx mariadb php and cloudreve services...\e[0m\n" +systemctl restart nginx.service +systemctl restart mysql.service +systemctl restart cloudreve.service + +# =========================================================================== +echo -e "\n\e[1;34mimprove nextcloud performance...\e[0m\n" +chown -R www-data:www-data /opt/websites/ +# cd /opt/websites/nextcloud/ +# sudo -u www-data php8.0 occ config:app:set files max_chunk_size --value 0 +# sudo -u www-data php8.0 occ files:scan --all + +# =========================================================================== +echo -e "\n\e[1;34mConfig crontabs and set correct timezone...\e[0m\n" +cp -f /opt/configs/crontabs/* /var/spool/cron/crontabs/ +timedatectl set-timezone Asia/Shanghai + +# =========================================================================== +echo -e "\n\e[1;31m基本环境已经安装完成,还需要手动配置如下:\e[0m\n" +echo " 1. 查看Homepage/Wiki/Nav站点是否可以正常访问" +echo " 2. 查看blog生成日志是否正常" +echo " 3. 访问nextcloud/cloudreve站点,是否可以正常登陆,并手动优化" +echo " 4. 手动配置Jupyter Lab" +echo " 5. 检查Navidrome是否能正常播放音乐" +echo " 6. 手动运行一次qbittorrent-nox,并配置相关选项" +echo " 7. 需要手动配置koel,并运行" +echo " 8. 检查frp的运行状态" +echo " 9. 直接访问Calibre Web,看是否可以正常访问" +echo " 10. 需要手动配置monica,并运行" +echo " 11. 手动安装jellyfin,因为可能安装包无法下载,需要去腾讯云主机下载传过来再安装" +echo " 12. 访问Alist主页,看是否可以正常访问" +echo " 13. 访问uptime status,看是否可以正常访问" +echo " 14. 手动安装bashit" +echo " 15. 重启系统" + + diff --git a/old/sql_backup.sh b/old/sql_backup.sh new file mode 100644 index 0000000..c7f1cdb --- /dev/null +++ b/old/sql_backup.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +rm -rf /opt/configs/mysql/sql*.gz +filename='sql-'`date +%Y%m%d%H%M`'.sql.gz' +mysql -e "show databases;" -uroot | grep -Ev "Database|information_schema|performance_schema" | xargs mysqldump --skip-lock-tables -uroot --databases | gzip > $filename +mv sql-*.gz /opt/configs/mysql/ + diff --git a/old/v2ray.sh b/old/v2ray.sh new file mode 100644 index 0000000..b747dec --- /dev/null +++ b/old/v2ray.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +env_file='/opt/source-code/v2ray-4.34.0/envfile' + +if [[ $1 == 'start' ]]; then + cat $env_file | grep -q 'https_proxy' + if [[ $? -ne 0 ]]; then + echo 'export http_proxy="http://127.0.0.1:10808"' >> $env_file + echo 'export https_proxy="http://127.0.0.1:10809"' >> $env_file + echo 'export all_proxy="socks://127.0.0.1:10809"' >> $env_file + source $env_file + else + echo -e "\e[1;3;31mv2ray has \e[1;3;32mALREADY\e[1;3;31m been started, do nothing...\e[0m" + exit 11 + fi + /opt/source-code/v2ray-4.34.0/v2ray -config /opt/source-code/v2ray-4.34.0/config.json > /dev/null 2>&1 & + echo -e "\e[1;33mNow you can surfing around~\e[0m" +elif [[ $1 == 'stop' ]]; then + v2ray_pid=`ps -ef | grep '/opt/source-code/v2ray-4.34.0/v2ray' | grep -v grep | awk '{print $2}'` + > $env_file + source $env_file + if [[ $v2ray_pid != '' ]]; then + for vpid in $v2ray_pid; do + kill -9 $vpid > /dev/null 2>&1 + done + echo -e "\e[1;33mWelcome to the real world~\e[0m" + else + echo -e "\e[1;3;31mv2ray has \e[1;3;32mALREADY\e[1;3;31m been stopped, do nothing...\e[0m" + fi +elif [[ $1 == 'renew' ]]; then + read -t 60 -p "Please input valid oversea IP: " ip + sed -i '69s/.*/ "address": "'$ip'",/' /opt/source-code/v2ray-4.34.0/config.json > /dev/null 2>&1 & + sed -i '/azure/{n;s/.*/ Hostname '$ip'/g}' /root/.ssh/config +elif [[ $1 == 'status' ]]; then + cat $env_file | grep -q 'https_proxy' + is_empty=$? + v2ray_pid=`ps -ef | grep '/opt/source-code/v2ray-4.34.0/v2ray' | grep -v grep | awk '{print $2}'` + if [[ $v2ray_pid == '' && $is_empty -ne 0 ]]; then + echo -e "\e[1;36mService is NOT running~\e[0m" + elif [[ $v2ray_pid == '' && $is_empty -eq 0 ]]; then + echo -e "\e[1;35mService is NOT running, BUT need check /opt/source-code/v2ray-4.34.0/envfile content, should be EMPTY\e[0m" + elif [[ $v2ray_pid != '' && $is_empty -eq 0 ]]; then + echo -e "\e[1;32mService is running~\e[0m" + elif [[ $v2ray_pid != '' && $is_empty -ne 0 ]]; then + echo -e "\e[1;35mService is running, BUT need check /opt/source-code/v2ray-4.34.0/envfile content, should NOT be empty~\e[0m" + fi +elif [[ $1 == 'restart' ]]; then + > $env_file + echo 'export http_proxy="http://127.0.0.1:10808"' >> $env_file + echo 'export https_proxy="http://127.0.0.1:10809"' >> $env_file + echo 'export all_proxy="socks://127.0.0.1:10809"' >> $env_file + v2ray_pid=`ps -ef | grep '/opt/source-code/v2ray-4.34.0/v2ray' | grep -v grep | awk '{print $2}'` + if [[ $v2ray_pid == '' ]]; then + /opt/source-code/v2ray-4.34.0/v2ray -config /opt/source-code/v2ray-4.34.0/config.json > /dev/null 2>&1 & + else + : + fi + source $env_file + echo -e "\e[1;35mService restarted, dive deeper~\e[0m" +else + echo -e "\e[1;3;31mOnly accept start|stop|renew as parameter.\e[0m" + exit 1 +fi + + + diff --git a/roll_api/calendar_tips.py b/roll_api/calendar_tips.py new file mode 120000 index 0000000..753f8ca --- /dev/null +++ b/roll_api/calendar_tips.py @@ -0,0 +1 @@ +/opt/scripts/alert/calendar_tips.py \ No newline at end of file diff --git a/roll_api/domain_reg_check.py b/roll_api/domain_reg_check.py new file mode 100644 index 0000000..a047a36 --- /dev/null +++ b/roll_api/domain_reg_check.py @@ -0,0 +1,45 @@ +import base64 +import sys +import time +import requests +import json + + +def reg_check(app_id, app_secret, domain): + api_url = f'https://www.mxnzp.com/api/beian/search?domain={domain}&app_id={app_id}&app_secret={app_secret}' + res = requests.get(api_url) + + res_http_code = res.status_code + res_text = json.loads(res.text) + res_code = res_text['code'] + res_msg = res_text['msg'] + if res_http_code != 200 or res_code == 0: + print(res_msg) + exit(3) + else: + # print(res_text) + print("*" * 50) + print(f"域 名:{res_text['data']['domain']}") + print(f"单 位:{res_text['data']['unit']}") + print(f"类 型:{res_text['data']['type']}") + print(f"备案号:{res_text['data']['icpCode']}") + print(f"名 称:{res_text['data']['name']}") + print(f"审核时间:{res_text['data']['passTime']}\n") + + +def main(): + app_id = "nrsngdkvknqkrwko" + app_secret = "SFFmQWo2dnNBRjdNYkVSclZxa2ZvUT09" + if len(sys.argv) == 1: + print("Must specify one or more domains to check registration.") + exit(2) + + for domain in sys.argv[1:]: + domain = str(base64.b64encode(domain.encode('utf-8')), encoding='utf-8') + reg_check(app_id, app_secret, domain) + time.sleep(1) + + +if __name__ == '__main__': + main() + diff --git a/roll_api/get_ip.py b/roll_api/get_ip.py new file mode 100644 index 0000000..49bc9d9 --- /dev/null +++ b/roll_api/get_ip.py @@ -0,0 +1,45 @@ +import sys +import json +import requests +import time + + +def ip_check(app_id, app_secret, ip): + api_url = f"https://www.mxnzp.com/api/ip/aim_ip?ip={ip}&app_id={app_id}&app_secret={app_secret}" + res = requests.get(api_url) + res_http_code = res.status_code + res_text = json.loads(res.text) + res_code = res_text['code'] + res_msg = res_text['msg'] + if res_http_code != 200 or res_code == 0: + print(f"接口查询失败:{res_msg}") + else: + print('*' * 50) + for key, value in res_text['data'].items(): + print(key.strip().ljust(10), ':', end=' ') + print(str(value).strip()) + print() + + +def main(): + app_id = "nrsngdkvknqkrwko" + app_secret = "SFFmQWo2dnNBRjdNYkVSclZxa2ZvUT09" + + if len(sys.argv) == 1: + print("Must specify at least one IP for information check.") + exit(2) + + # try: + # ip = sys.argv[1] + # except Exception as Err: + # print(f"Error Desc: {Err}. Maybe you need to supply correct ip next time.") + # exit(2) + + for ip in sys.argv[1:]: + ip_check(app_id, app_secret, ip) + time.sleep(1) + + +if __name__ == '__main__': + main() + diff --git a/roll_api/get_self_ip.py b/roll_api/get_self_ip.py new file mode 100644 index 0000000..e25d4f2 --- /dev/null +++ b/roll_api/get_self_ip.py @@ -0,0 +1,28 @@ +import requests +import json + + +def ip_self(app_id, app_secret): + api_url = f'https://www.mxnzp.com/api/ip/self?app_id={app_id}&app_secret={app_secret}' + res = requests.get(api_url) + res_http_code = res.status_code + res_text = json.loads(res.text) + res_code = res_text['code'] + res_msg = res_text['msg'] + if res_http_code != 200 or res_code == 0: + print(f"接口查询失败:{res_msg}\n") + else: + for key, value in res_text['data'].items(): + print(key.strip().ljust(10), ':', end=' ') + print(str(value).strip()) + + +def main(): + app_id = "nrsngdkvknqkrwko" + app_secret = "SFFmQWo2dnNBRjdNYkVSclZxa2ZvUT09" + ip_self(app_id, app_secret) + + +if __name__ == '__main__': + main() + diff --git a/roll_api/love_words.py b/roll_api/love_words.py new file mode 120000 index 0000000..36b6f8f --- /dev/null +++ b/roll_api/love_words.py @@ -0,0 +1 @@ +/opt/scripts/alert/love_words.py \ No newline at end of file diff --git a/roll_api/weather_tips.py b/roll_api/weather_tips.py new file mode 120000 index 0000000..cbe5cb8 --- /dev/null +++ b/roll_api/weather_tips.py @@ -0,0 +1 @@ +/opt/scripts/alert/weather_tips.py \ No newline at end of file diff --git a/todo/todo.cfg b/todo/todo.cfg new file mode 100644 index 0000000..9e55539 --- /dev/null +++ b/todo/todo.cfg @@ -0,0 +1,154 @@ +# === EDIT FILE LOCATIONS BELOW === + +# Your todo.txt directory (this should be an absolute path) +export TODO_DIR="/opt/logs/TODO" +# export TODO_DIR=$(dirname "$0") + +# Your todo/done/report.txt locations +export TODO_FILE="$TODO_DIR/todo.txt" +export DONE_FILE="$TODO_DIR/done.txt" +export REPORT_FILE="$TODO_DIR/report.txt" + +# You can customize your actions directory location +#export TODO_ACTIONS_DIR="$HOME/.todo.actions.d" + +# == EDIT FILE LOCATIONS ABOVE === + +# === COLOR MAP === + +## Text coloring and formatting is done by inserting ANSI escape codes. +## If you have re-mapped your color codes, or use the todo.txt +## output in another output system (like Conky), you may need to +## over-ride by uncommenting and editing these defaults. +## If you change any of these here, you also need to uncomment +## the defaults in the COLORS section below. Otherwise, todo.txt +## will still use the defaults! + +#======> normal <======== +export BLACK='\\033[0;30m' +export RED='\\033[0;31m' +export GREEN='\\033[0;32m' +export YELLOW='\\033[0;33m' +export BLUE='\\033[0;34m' +export PURPLE='\\033[0;35m' +export CYAN='\\033[0;36m' +export GREY='\\033[0;37m' +#======> bold <======== +export BOLD_BLACK='\\033[1;30m' +export BOLD_RED='\\033[1;31m' +export BOLD_GREEN='\\033[1;32m' +export BOLD_YELLOW='\\033[1;33m' +export BOLD_BLUE='\\033[1;34m' +export BOLD_PURPLE='\\033[1;35m' +export BOLD_CYAN='\\033[1;36m' +export BOLD_GREY='\\033[1;37m' +#======> italic <======== +export ITALIC_BLACK='\\033[3;30m' +export ITALIC_RED='\\033[3;31m' +export ITALIC_GREEN='\\033[3;32m' +export ITALIC_YELLOW='\\033[3;33m' +export ITALIC_BLUE='\\033[3;34m' +export ITALIC_PURPLE='\\033[3;35m' +export ITALIC_CYAN='\\033[3;36m' +export ITALIC_GREY='\\033[3;37m' +#======> underline <======== +export UNDERLINE_BLACK='\\033[4;30m' +export UNDERLINE_RED='\\033[3;41m' +export UNDERLINE_GREEN='\\033[4;32m' +export UNDERLINE_YELLOW='\\033[4;33m' +export UNDERLINE_BLUE='\\033[4;34m' +export UNDERLINE_PURPLE='\\033[4;35m' +export UNDERLINE_CYAN='\\033[4;36m' +export UNDERLINE_GREY='\\033[4;37m' +#======> bold & italic <======== +export BOLD_ITALIC_BLACK='\\033[1;3;30m' +export BOLD_ITALIC_RED='\\033[1;3;31m' +export BOLD_ITALIC_GREEN='\\033[1;3;32m' +export BOLD_ITALIC_YELLOW='\\033[1;3;33m' +export BOLD_ITALIC_BLUE='\\033[1;3;34m' +export BOLD_ITALIC_PURPLE='\\033[1;3;35m' +export BOLD_ITALIC_CYAN='\\033[1;3;36m' +export BOLD_ITALIC_GREY='\\033[1;3;37m' +#======> bold & underline <======== +export BOLD_UNDERLINE_BLACK='\\033[1;4;30m' +export BOLD_UNDERLINE_RED='\\033[1;4;31m' +export BOLD_UNDERLINE_GREEN='\\033[1;4;32m' +export BOLD_UNDERLINE_YELLOW='\\033[1;4;33m' +export BOLD_UNDERLINE_BLUE='\\033[1;4;34m' +export BOLD_UNDERLINE_PURPLE='\\033[1;4;35m' +export BOLD_UNDERLINE_CYAN='\\033[1;4;36m' +export BOLD_UNDERLINE_GREY='\\033[1;4;37m' +#======> italic & underline <======== +export ITALIC_UNDERLINE_BLACK='\\033[3;4;30m' +export ITALIC_UNDERLINE_RED='\\033[3;4;31m' +export ITALIC_UNDERLINE_GREEN='\\033[3;4;32m' +export ITALIC_UNDERLINE_YELLOW='\\033[3;4;33m' +export ITALIC_UNDERLINE_BLUE='\\033[3;4;34m' +export ITALIC_UNDERLINE_PURPLE='\\033[3;4;35m' +export ITALIC_UNDERLINE_CYAN='\\033[3;4;36m' +export ITALIC_UNDERLINE_GREY='\\033[3;4;37m' +#======> bold & italic & underline <======== +export BOLD_ITALIC_UNDERLINE_BLACK='\\033[1;3;4;30m' +export BOLD_ITALIC_UNDERLINE_RED='\\033[1;3;4;31m' +export BOLD_ITALIC_UNDERLINE_GREEN='\\033[1;3;4;32m' +export BOLD_ITALIC_UNDERLINE_YELLOW='\\033[1;3;4;33m' +export BOLD_ITALIC_UNDERLINE_BLUE='\\033[1;3;4;34m' +export BOLD_ITALIC_UNDERLINE_PURPLE='\\033[1;3;4;35m' +export BOLD_ITALIC_UNDERLINE_CYAN='\\033[1;3;4;36m' +export BOLD_ITALIC_UNDERLINE_GREY='\\033[1;3;4;37m' +#======> REAL PRI COLOR <======== +PRI_A_COLOR='\\033[1;4;31;47m' +PRI_B_COLOR='\\033[1;4;31;42m' +PRI_C_COLOR='\\033[1;4;31;43m' +PRI_D_COLOR='\\033[1;4;31;44m' +PRI_E_COLOR='\\033[1;4;31;46m' +#======> restore normal <======== +export DEFAULT='\\033[0m' + +# === COLORS === + +## Uncomment and edit to override these defaults. +## Reference the constants from the color map above, +## or use $NONE to disable highlighting. +# +# Priorities can be any upper-case letter. +# A,B,C are highlighted; you can add coloring for more. + +export PRI_A=$PRI_A_COLOR # color for A priority +export PRI_B=$PRI_B_COLOR # color for B priority +export PRI_C=$PRI_C_COLOR # color for C priority +export PRI_D=$PRI_D_COLOR # color for D priority +export PRI_E=$PRI_E_COLOR # color for E priority + +# There is highlighting for tasks that have been done, +# but haven't been archived yet. +# +export COLOR_DONE=$BOLD_ITALIC_UNDERLINE_GREY + +# There is highlighting for projects, contexts, dates, and item numbers. + +export COLOR_PROJECT=$ITALIC_UNDERLINE_PURPLE +export COLOR_CONTEXT=$ITALIC_UNDERLINE_GREEN +export COLOR_DATE=$BOLD_BLUE +export COLOR_NUMBER=$BOLD_PURPLE + +# There is highlighting for metadata key:value pairs e.g. +# DUE:2006-08-01 or note:MYNOTE + +export COLOR_META=$BOLD_CYAN + +# === BEHAVIOR === + +## customize list output +# +# TODOTXT_SORT_COMMAND will filter after line numbers are +# inserted, but before colorization, and before hiding of +# priority, context, and project. +# +# export TODOTXT_SORT_COMMAND='env LC_COLLATE=C sort -f -k2' + +# TODOTXT_FINAL_FILTER will filter list output after colorization, +# priority hiding, context hiding, and project hiding. That is, +# just before the list output is displayed. +# +# export TODOTXT_FINAL_FILTER='cat' diff --git a/todo/todo.sh b/todo/todo.sh new file mode 100755 index 0000000..7ee161f --- /dev/null +++ b/todo/todo.sh @@ -0,0 +1,1531 @@ +#!/usr/bin/env bash + +bash /opt/scripts/todo/watch_todo.sh +[[ $? -ne 0 ]] && exit 3 + +# === HEAVY LIFTING === +shopt -s extglob extquote + +# NOTE: Todo.sh requires the .todo/config configuration file to run. +# Place the .todo/config file in your home directory or use the -d option for a custom location. + +[ -f VERSION-FILE ] && . VERSION-FILE || VERSION="2.12.0" +version() { + cat <<-EndVersion + TODO.TXT Command Line Interface v$VERSION + + Homepage: http://todotxt.org + Code repository: https://github.com/todotxt/todo.txt-cli/ + Contributors: https://github.com/todotxt/todo.txt-cli/graphs/contributors + License: https://github.com/todotxt/todo.txt-cli/blob/master/LICENSE + EndVersion + exit 1 +} + +# Set script name and full path early. +TODO_SH=$(basename "$0") +TODO_FULL_SH="$0" +export TODO_SH TODO_FULL_SH + +oneline_usage="$TODO_SH [-fhpantvV] [-d todo_config] action [task_number] [task_description]" + +usage() +{ + cat <<-EndUsage + Usage: $oneline_usage + Try '$TODO_SH -h' for more information. + EndUsage + exit 1 +} + +shorthelp() +{ + cat <<-EndHelp + Usage: $oneline_usage + + Actions: + add|a "THING I NEED TO DO +project @context" + addm "THINGS I NEED TO DO + MORE THINGS I NEED TO DO" + addto DEST "TEXT TO ADD" + append|app ITEM# "TEXT TO APPEND" + archive + command [ACTIONS] + deduplicate + del|rm ITEM# [TERM] + depri|dp ITEM#[, ITEM#, ITEM#, ...] + done|do ITEM#[, ITEM#, ITEM#, ...] + help [ACTION...] + list|ls [TERM...] + listall|lsa [TERM...] + listaddons + listcon|lsc [TERM...] + listfile|lf [SRC [TERM...]] + listpri|lsp [PRIORITIES] [TERM...] + listproj|lsprj [TERM...] + move|mv ITEM# DEST [SRC] + prepend|prep ITEM# "TEXT TO PREPEND" + pri|p ITEM# PRIORITY + replace ITEM# "UPDATED TODO" + report + shorthelp + + Actions can be added and overridden using scripts in the actions + directory. + EndHelp + + # Only list the one-line usage from the add-on actions. This assumes that + # add-ons use the same usage indentation structure as todo.sh. + addonHelp | grep -e '^ Add-on Actions:' -e '^ [[:alpha:]]' + + cat <<-EndHelpFooter + + See "help" for more details. + EndHelpFooter +} + +help() +{ + cat <<-EndOptionsHelp + Usage: $oneline_usage + + Options: + -@ + Hide context names in list output. Use twice to show context + names (default). + -+ + Hide project names in list output. Use twice to show project + names (default). + -c + Color mode + -d CONFIG_FILE + Use a configuration file other than the default ~/.todo/config + -f + Forces actions without confirmation or interactive input + -h + Display a short help message; same as action "shorthelp" + -p + Plain mode turns off colors + -P + Hide priority labels in list output. Use twice to show + priority labels (default). + -a + Don't auto-archive tasks automatically on completion + -A + Auto-archive tasks automatically on completion + -n + Don't preserve line numbers; automatically remove blank lines + on task deletion + -N + Preserve line numbers + -t + Prepend the current date to a task automatically + when it's added. + -T + Do not prepend the current date to a task automatically + when it's added. + -v + Verbose mode turns on confirmation messages + -vv + Extra verbose mode prints some debugging information and + additional help text + -V + Displays version, license and credits + -x + Disables TODOTXT_FINAL_FILTER + + + EndOptionsHelp + + [ "$TODOTXT_VERBOSE" -gt 1 ] && cat <<-'EndVerboseHelp' + Environment variables: + TODOTXT_AUTO_ARCHIVE is same as option -a (0)/-A (1) + TODOTXT_CFG_FILE=CONFIG_FILE is same as option -d CONFIG_FILE + TODOTXT_FORCE=1 is same as option -f + TODOTXT_PRESERVE_LINE_NUMBERS is same as option -n (0)/-N (1) + TODOTXT_PLAIN is same as option -p (1)/-c (0) + TODOTXT_DATE_ON_ADD is same as option -t (1)/-T (0) + TODOTXT_PRIORITY_ON_ADD=pri default priority A-Z + TODOTXT_VERBOSE=1 is same as option -v + TODOTXT_DISABLE_FILTER=1 is same as option -x + TODOTXT_DEFAULT_ACTION="" run this when called with no arguments + TODOTXT_SORT_COMMAND="sort ..." customize list output + TODOTXT_FINAL_FILTER="sed ..." customize list after color, P@+ hiding + TODOTXT_SOURCEVAR=\$DONE_FILE use another source for listcon, listproj + TODOTXT_SIGIL_BEFORE_PATTERN="" optionally allow chars preceding +p / @c + TODOTXT_SIGIL_VALID_PATTERN=.* tweak the allowed chars for +p and @c + TODOTXT_SIGIL_AFTER_PATTERN="" optionally allow chars after +p / @c + + + EndVerboseHelp + actionsHelp + addonHelp +} + +actionsHelp() +{ + cat <<-EndActionsHelp + Built-in Actions: + add "THING I NEED TO DO +project @context" + a "THING I NEED TO DO +project @context" + Adds THING I NEED TO DO to your todo.txt file on its own line. + Project and context notation optional. + Quotes optional. + + addm "FIRST THING I NEED TO DO +project1 @context + SECOND THING I NEED TO DO +project2 @context" + Adds FIRST THING I NEED TO DO to your todo.txt on its own line and + Adds SECOND THING I NEED TO DO to you todo.txt on its own line. + Project and context notation optional. + + addto DEST "TEXT TO ADD" + Adds a line of text to any file located in the todo.txt directory. + For example, addto inbox.txt "decide about vacation" + + append ITEM# "TEXT TO APPEND" + app ITEM# "TEXT TO APPEND" + Adds TEXT TO APPEND to the end of the task on line ITEM#. + Quotes optional. + + archive + Moves all done tasks from todo.txt to done.txt and removes blank lines. + + command [ACTIONS] + Runs the remaining arguments using only todo.sh builtins. + Will not call any .todo.actions.d scripts. + + deduplicate + Removes duplicate lines from todo.txt. + + del ITEM# [TERM] + rm ITEM# [TERM] + Deletes the task on line ITEM# in todo.txt. + If TERM specified, deletes only TERM from the task. + + depri ITEM#[, ITEM#, ITEM#, ...] + dp ITEM#[, ITEM#, ITEM#, ...] + Deprioritizes (removes the priority) from the task(s) + on line ITEM# in todo.txt. + + done ITEM#[, ITEM#, ITEM#, ...] + do ITEM#[, ITEM#, ITEM#, ...] + Marks task(s) on line ITEM# as done in todo.txt. + + help [ACTION...] + Display help about usage, options, built-in and add-on actions, + or just the usage help for the passed ACTION(s). + + list [TERM...] + ls [TERM...] + Displays all tasks that contain TERM(s) sorted by priority with line + numbers. Each task must match all TERM(s) (logical AND); to display + tasks that contain any TERM (logical OR), use + "TERM1\|TERM2\|..." (with quotes), or TERM1\\\|TERM2 (unquoted). + Hides all tasks that contain TERM(s) preceded by a + minus sign (i.e. -TERM). If no TERM specified, lists entire todo.txt. + + listall [TERM...] + lsa [TERM...] + Displays all the lines in todo.txt AND done.txt that contain TERM(s) + sorted by priority with line numbers. Hides all tasks that + contain TERM(s) preceded by a minus sign (i.e. -TERM). If no + TERM specified, lists entire todo.txt AND done.txt + concatenated and sorted. + + listaddons + Lists all added and overridden actions in the actions directory. + + listcon [TERM...] + lsc [TERM...] + Lists all the task contexts that start with the @ sign in todo.txt. + If TERM specified, considers only tasks that contain TERM(s). + + listfile [SRC [TERM...]] + lf [SRC [TERM...]] + Displays all the lines in SRC file located in the todo.txt directory, + sorted by priority with line numbers. If TERM specified, lists + all lines that contain TERM(s) in SRC file. Hides all tasks that + contain TERM(s) preceded by a minus sign (i.e. -TERM). + Without any arguments, the names of all text files in the todo.txt + directory are listed. + + listpri [PRIORITIES] [TERM...] + lsp [PRIORITIES] [TERM...] + Displays all tasks prioritized PRIORITIES. + PRIORITIES can be a single one (A) or a range (A-C). + If no PRIORITIES specified, lists all prioritized tasks. + If TERM specified, lists only prioritized tasks that contain TERM(s). + Hides all tasks that contain TERM(s) preceded by a minus sign + (i.e. -TERM). + + listproj [TERM...] + lsprj [TERM...] + Lists all the projects (terms that start with a + sign) in + todo.txt. + If TERM specified, considers only tasks that contain TERM(s). + + move ITEM# DEST [SRC] + mv ITEM# DEST [SRC] + Moves a line from source text file (SRC) to destination text file (DEST). + Both source and destination file must be located in the directory defined + in the configuration directory. When SRC is not defined + it's by default todo.txt. + + prepend ITEM# "TEXT TO PREPEND" + prep ITEM# "TEXT TO PREPEND" + Adds TEXT TO PREPEND to the beginning of the task on line ITEM#. + Quotes optional. + + pri ITEM# PRIORITY + p ITEM# PRIORITY + Adds PRIORITY to task on line ITEM#. If the task is already + prioritized, replaces current priority with new PRIORITY. + PRIORITY must be a letter between A and Z. + + replace ITEM# "UPDATED TODO" + Replaces task on line ITEM# with UPDATED TODO. + + report + Adds the number of open tasks and done tasks to report.txt. + + shorthelp + List the one-line usage of all built-in and add-on actions. + + EndActionsHelp +} + +addonHelp() +{ + if [ -d "$TODO_ACTIONS_DIR" ]; then + didPrintAddonActionsHeader= + for action in "$TODO_ACTIONS_DIR"/* + do + if [ -f "$action" ] && [ -x "$action" ]; then + if [ ! "$didPrintAddonActionsHeader" ]; then + cat <<-EndAddonActionsHeader + Add-on Actions: + EndAddonActionsHeader + didPrintAddonActionsHeader=1 + fi + "$action" usage + elif [ -d "$action" ] && [ -x "$action"/"$(basename "$action")" ]; then + if [ ! "$didPrintAddonActionsHeader" ]; then + cat <<-EndAddonActionsHeader + Add-on Actions: + EndAddonActionsHeader + didPrintAddonActionsHeader=1 + fi + "$action"/"$(basename "$action")" usage + fi + done + fi +} + +actionUsage() +{ + for actionName + do + action="${TODO_ACTIONS_DIR}/${actionName}" + if [ -f "$action" ] && [ -x "$action" ]; then + "$action" usage + elif [ -d "$action" ] && [ -x "$action"/"$(basename "$action")" ]; then + "$action"/"$(basename "$action")" usage + else + builtinActionUsage=$(actionsHelp | sed -n -e "/^ ${actionName//\//\\/} /,/^\$/p" -e "/^ ${actionName//\//\\/}$/,/^\$/p") + if [ "$builtinActionUsage" ]; then + echo "$builtinActionUsage" + echo + else + die "TODO: No action \"${actionName}\" exists." + fi + fi + done +} + +dieWithHelp() +{ + case "$1" in + help) help;; + shorthelp) shorthelp;; + esac + shift + + die "$@" +} +die() +{ + echo "$*" + exit 1 +} + +cleaninput() +{ + # Parameters: When $1 = "for sed", performs additional escaping for use + # in sed substitution with "|" separators. + # Precondition: $input contains text to be cleaned. + # Postcondition: Modifies $input. + + # Replace CR and LF with space; tasks always comprise a single line. + input=${input//$'\r'/ } + input=${input//$'\n'/ } + + if [ "$1" = "for sed" ]; then + # This action uses sed with "|" as the substitution separator, and & as + # the matched string; these must be escaped. + # Backslashes must be escaped, too, and before the other stuff. + input=${input//\\/\\\\} + input=${input//|/\\|} + input=${input//&/\\&} + fi +} + +getPrefix() +{ + # Parameters: $1: todo file; empty means $TODO_FILE. + # Returns: Uppercase FILE prefix to be used in place of "TODO:" where + # a different todo file can be specified. + local base + base=$(basename "${1:-$TODO_FILE}") + echo "${base%%.[^.]*}" | tr '[:lower:]' '[:upper:]' +} + +getTodo() +{ + # Parameters: $1: task number + # $2: Optional todo file + # Precondition: $errmsg contains usage message. + # Postcondition: $todo contains task text. + + local item=$1 + [ -z "$item" ] && die "$errmsg" + [ "${item//[0-9]/}" ] && die "$errmsg" + + todo=$(sed "$item!d" "${2:-$TODO_FILE}") + [ -z "$todo" ] && die "$(getPrefix "$2"): No task $item." +} +getNewtodo() +{ + # Parameters: $1: task number + # $2: Optional todo file + # Precondition: None. + # Postcondition: $newtodo contains task text. + + local item=$1 + [ -z "$item" ] && die "Programming error: $item should exist." + [ "${item//[0-9]/}" ] && die "Programming error: $item should be numeric." + + newtodo=$(sed "$item!d" "${2:-$TODO_FILE}") + [ -z "$newtodo" ] && die "$(getPrefix "$2"): No updated task $item." +} + +replaceOrPrepend() +{ + action=$1; shift + case "$action" in + replace) + backref= + querytext="Replacement: " + ;; + prepend) + backref=' &' + querytext="Prepend: " + ;; + esac + shift; item=$1; shift + getTodo "$item" + + if [[ -z "$1" && $TODOTXT_FORCE = 0 ]]; then + echo -n "$querytext" + read -r -i "$todo" -e input + else + input=$* + fi + + # Retrieve existing priority and prepended date + local -r priAndDateExpr='^\((.) \)\{0,1\}\([0-9]\{2,4\}-[0-9]\{2\}-[0-9]\{2\} \)\{0,1\}' + priority=$(sed -e "$item!d" -e "${item}s/${priAndDateExpr}.*/\\1/" "$TODO_FILE") + prepdate=$(sed -e "$item!d" -e "${item}s/${priAndDateExpr}.*/\\2/" "$TODO_FILE") + + if [ "$prepdate" ] && [ "$action" = "replace" ] && [ "$(echo "$input"|sed -e "s/${priAndDateExpr}.*/\\1\\2/")" ]; then + # If the replaced text starts with a [priority +] date, it will replace + # the existing date, too. + prepdate= + fi + + # Temporarily remove any existing priority and prepended date, perform the + # change (replace/prepend) and re-insert the existing priority and prepended + # date again. + cleaninput "for sed" + sed -i.bak -e "$item s/^${priority}${prepdate}//" -e "$item s|^.*|${priority}${prepdate}${input}${backref}|" "$TODO_FILE" + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + getNewtodo "$item" + case "$action" in + replace) + echo "$item $todo" + echo "TODO: Replaced task with:" + echo "$item $newtodo" + ;; + prepend) + echo "$item $newtodo" + ;; + esac + fi +} + +fixMissingEndOfLine() +{ + # Parameters: $1: todo file; empty means $TODO_FILE. + sed -i.bak -e '$a\' "${1:-$TODO_FILE}" +} + +uppercasePriority() +{ + # Precondition: $input contains task text for which to uppercase priority. + # Postcondition: Modifies $input. + lower=( {a..z} ) + upper=( {A..Z} ) + for ((i=0; i<26; i++)) + do + upperPriority="${upperPriority};s/^[(]${lower[i]}[)]/(${upper[i]})/" + done + input=$(echo "$input" | sed "$upperPriority") +} + +#Preserving environment variables so they don't get clobbered by the config file +OVR_TODOTXT_AUTO_ARCHIVE="$TODOTXT_AUTO_ARCHIVE" +OVR_TODOTXT_FORCE="$TODOTXT_FORCE" +OVR_TODOTXT_PRESERVE_LINE_NUMBERS="$TODOTXT_PRESERVE_LINE_NUMBERS" +OVR_TODOTXT_PLAIN="$TODOTXT_PLAIN" +OVR_TODOTXT_DATE_ON_ADD="$TODOTXT_DATE_ON_ADD" +OVR_TODOTXT_PRIORITY_ON_ADD="$TODOTXT_PRIORITY_ON_ADD" +OVR_TODOTXT_DISABLE_FILTER="$TODOTXT_DISABLE_FILTER" +OVR_TODOTXT_VERBOSE="$TODOTXT_VERBOSE" +OVR_TODOTXT_DEFAULT_ACTION="$TODOTXT_DEFAULT_ACTION" +OVR_TODOTXT_SORT_COMMAND="$TODOTXT_SORT_COMMAND" +OVR_TODOTXT_FINAL_FILTER="$TODOTXT_FINAL_FILTER" + +# Prevent GREP_OPTIONS from malforming grep's output +export GREP_OPTIONS="" + +# == PROCESS OPTIONS == +while getopts ":fhpcnNaAtTvVx+@Pd:" Option +do + case $Option in + '@') + ## HIDE_CONTEXT_NAMES starts at zero (false); increment it to one + ## (true) the first time this flag is seen. Each time the flag + ## is seen after that, increment it again so that an even + ## number shows context names and an odd number hides context + ## names. + : $(( HIDE_CONTEXT_NAMES++ )) + if [ $(( HIDE_CONTEXT_NAMES % 2 )) -eq 0 ] + then + ## Zero or even value -- show context names + unset HIDE_CONTEXTS_SUBSTITUTION + else + ## One or odd value -- hide context names + export HIDE_CONTEXTS_SUBSTITUTION='[[:space:]]@[[:graph:]]\{1,\}' + fi + ;; + '+') + ## HIDE_PROJECT_NAMES starts at zero (false); increment it to one + ## (true) the first time this flag is seen. Each time the flag + ## is seen after that, increment it again so that an even + ## number shows project names and an odd number hides project + ## names. + : $(( HIDE_PROJECT_NAMES++ )) + if [ $(( HIDE_PROJECT_NAMES % 2 )) -eq 0 ] + then + ## Zero or even value -- show project names + unset HIDE_PROJECTS_SUBSTITUTION + else + ## One or odd value -- hide project names + export HIDE_PROJECTS_SUBSTITUTION='[[:space:]][+][[:graph:]]\{1,\}' + fi + ;; + a) + OVR_TODOTXT_AUTO_ARCHIVE=0 + ;; + A) + OVR_TODOTXT_AUTO_ARCHIVE=1 + ;; + c) + OVR_TODOTXT_PLAIN=0 + ;; + d) + TODOTXT_CFG_FILE=$OPTARG + ;; + f) + OVR_TODOTXT_FORCE=1 + ;; + h) + # Short-circuit option parsing and forward to the action. + # Cannot just invoke shorthelp() because we need the configuration + # processed to locate the add-on actions directory. + set -- '-h' 'shorthelp' + OPTIND=2 + ;; + n) + OVR_TODOTXT_PRESERVE_LINE_NUMBERS=0 + ;; + N) + OVR_TODOTXT_PRESERVE_LINE_NUMBERS=1 + ;; + p) + OVR_TODOTXT_PLAIN=1 + ;; + P) + ## HIDE_PRIORITY_LABELS starts at zero (false); increment it to one + ## (true) the first time this flag is seen. Each time the flag + ## is seen after that, increment it again so that an even + ## number shows priority labels and an odd number hides priority + ## labels. + : $(( HIDE_PRIORITY_LABELS++ )) + if [ $(( HIDE_PRIORITY_LABELS % 2 )) -eq 0 ] + then + ## Zero or even value -- show priority labels + unset HIDE_PRIORITY_SUBSTITUTION + else + ## One or odd value -- hide priority labels + export HIDE_PRIORITY_SUBSTITUTION="([A-Z])[[:space:]]" + fi + ;; + t) + OVR_TODOTXT_DATE_ON_ADD=1 + ;; + T) + OVR_TODOTXT_DATE_ON_ADD=0 + ;; + v) + : $(( TODOTXT_VERBOSE++ )) + ;; + V) + version + ;; + x) + OVR_TODOTXT_DISABLE_FILTER=1 + ;; + *) + usage + ;; + esac +done +shift $((OPTIND - 1)) + +# defaults if not yet defined +TODOTXT_VERBOSE=${TODOTXT_VERBOSE:-1} +TODOTXT_PLAIN=${TODOTXT_PLAIN:-0} +TODOTXT_CFG_FILE=${TODOTXT_CFG_FILE:-$HOME/.todo/config} +TODOTXT_FORCE=${TODOTXT_FORCE:-0} +TODOTXT_PRESERVE_LINE_NUMBERS=${TODOTXT_PRESERVE_LINE_NUMBERS:-1} +TODOTXT_AUTO_ARCHIVE=${TODOTXT_AUTO_ARCHIVE:-1} +TODOTXT_DATE_ON_ADD=${TODOTXT_DATE_ON_ADD:-0} +TODOTXT_PRIORITY_ON_ADD=${TODOTXT_PRIORITY_ON_ADD:-} +TODOTXT_DEFAULT_ACTION=${TODOTXT_DEFAULT_ACTION:-} +TODOTXT_SORT_COMMAND=${TODOTXT_SORT_COMMAND:-env LC_COLLATE=C sort -f -k2} +TODOTXT_DISABLE_FILTER=${TODOTXT_DISABLE_FILTER:-} +TODOTXT_FINAL_FILTER=${TODOTXT_FINAL_FILTER:-cat} +TODOTXT_GLOBAL_CFG_FILE=${TODOTXT_GLOBAL_CFG_FILE:-/etc/todo/config} +TODOTXT_SIGIL_BEFORE_PATTERN=${TODOTXT_SIGIL_BEFORE_PATTERN:-} # Allow any other non-whitespace entity before +project and @context; should be an optional match; example: \(w:\)\{0,1\} to allow w:@context. +TODOTXT_SIGIL_VALID_PATTERN=${TODOTXT_SIGIL_VALID_PATTERN:-.*} # Limit the valid characters (from the default any non-whitespace sequence) for +project and @context; example: [a-zA-Z]\{3,\} to only allow alphabetic ones that are at least three characters long. +TODOTXT_SIGIL_AFTER_PATTERN=${TODOTXT_SIGIL_AFTER_PATTERN:-} # Allow any other non-whitespace entity after +project and @context; should be an optional match; example: )\{0,1\} to allow (with the corresponding TODOTXT_SIGIL_BEFORE_PATTERN) enclosing in parentheses. + +# Export all TODOTXT_* variables +export "${!TODOTXT_@}" + +# Default color map +export NONE='' +export BLACK='\\033[0;30m' +export RED='\\033[0;31m' +export GREEN='\\033[0;32m' +export BROWN='\\033[0;33m' +export BLUE='\\033[0;34m' +export PURPLE='\\033[0;35m' +export CYAN='\\033[0;36m' +export LIGHT_GREY='\\033[0;37m' +export DARK_GREY='\\033[1;30m' +export LIGHT_RED='\\033[1;31m' +export LIGHT_GREEN='\\033[1;32m' +export YELLOW='\\033[1;33m' +export LIGHT_BLUE='\\033[1;34m' +export LIGHT_PURPLE='\\033[1;35m' +export LIGHT_CYAN='\\033[1;36m' +export WHITE='\\033[1;37m' +export DEFAULT='\\033[0m' + +# Default priority->color map. +export PRI_A=$YELLOW # color for A priority +export PRI_B=$GREEN # color for B priority +export PRI_C=$LIGHT_BLUE # color for C priority +export PRI_X=$WHITE # color unless explicitly defined + +# Default project, context, date, item number, and metadata key:value pairs colors. +export COLOR_PROJECT=$NONE +export COLOR_CONTEXT=$NONE +export COLOR_DATE=$NONE +export COLOR_NUMBER=$NONE +export COLOR_META=$NONE + +# Default highlight colors. +export COLOR_DONE=$LIGHT_GREY # color for done (but not yet archived) tasks + +# Default sentence delimiters for todo.sh append. +# If the text to be appended to the task begins with one of these characters, no +# whitespace is inserted in between. This makes appending to an enumeration +# (todo.sh add 42 ", foo") syntactically correct. +export SENTENCE_DELIMITERS=',.:;' + +[ -e "$TODOTXT_CFG_FILE" ] || { + CFG_FILE_ALT="$HOME/todo.cfg" + + if [ -e "$CFG_FILE_ALT" ] + then + TODOTXT_CFG_FILE="$CFG_FILE_ALT" + fi +} + +[ -e "$TODOTXT_CFG_FILE" ] || { + CFG_FILE_ALT="$HOME/.todo.cfg" + + if [ -e "$CFG_FILE_ALT" ] + then + TODOTXT_CFG_FILE="$CFG_FILE_ALT" + fi +} + +[ -e "$TODOTXT_CFG_FILE" ] || { + CFG_FILE_ALT="${XDG_CONFIG_HOME:-$HOME/.config}/todo/config" + + if [ -e "$CFG_FILE_ALT" ] + then + TODOTXT_CFG_FILE="$CFG_FILE_ALT" + fi +} + +[ -e "$TODOTXT_CFG_FILE" ] || { + CFG_FILE_ALT=$(dirname "$0")"/todo.cfg" + + if [ -e "$CFG_FILE_ALT" ] + then + TODOTXT_CFG_FILE="$CFG_FILE_ALT" + fi +} + +[ -e "$TODOTXT_CFG_FILE" ] || { + CFG_FILE_ALT="$TODOTXT_GLOBAL_CFG_FILE" + + if [ -e "$CFG_FILE_ALT" ] + then + TODOTXT_CFG_FILE="$CFG_FILE_ALT" + fi +} + + +if [ -z "$TODO_ACTIONS_DIR" ] || [ ! -d "$TODO_ACTIONS_DIR" ] +then + TODO_ACTIONS_DIR="$HOME/.todo/actions" + export TODO_ACTIONS_DIR +fi + +[ -d "$TODO_ACTIONS_DIR" ] || { + TODO_ACTIONS_DIR_ALT="$HOME/.todo.actions.d" + + if [ -d "$TODO_ACTIONS_DIR_ALT" ] + then + TODO_ACTIONS_DIR="$TODO_ACTIONS_DIR_ALT" + fi +} + +[ -d "$TODO_ACTIONS_DIR" ] || { + TODO_ACTIONS_DIR_ALT="${XDG_CONFIG_HOME:-$HOME/.config}/todo/actions" + + if [ -d "$TODO_ACTIONS_DIR_ALT" ] + then + TODO_ACTIONS_DIR="$TODO_ACTIONS_DIR_ALT" + fi +} + +# === SANITY CHECKS (thanks Karl!) === +[ -r "$TODOTXT_CFG_FILE" ] || dieWithHelp "$1" "Fatal Error: Cannot read configuration file $TODOTXT_CFG_FILE" + +. "$TODOTXT_CFG_FILE" + +# === APPLY OVERRIDES +if [ -n "$OVR_TODOTXT_AUTO_ARCHIVE" ] ; then + TODOTXT_AUTO_ARCHIVE="$OVR_TODOTXT_AUTO_ARCHIVE" +fi +if [ -n "$OVR_TODOTXT_FORCE" ] ; then + TODOTXT_FORCE="$OVR_TODOTXT_FORCE" +fi +if [ -n "$OVR_TODOTXT_PRESERVE_LINE_NUMBERS" ] ; then + TODOTXT_PRESERVE_LINE_NUMBERS="$OVR_TODOTXT_PRESERVE_LINE_NUMBERS" +fi +if [ -n "$OVR_TODOTXT_PLAIN" ] ; then + TODOTXT_PLAIN="$OVR_TODOTXT_PLAIN" +fi +if [ -n "$OVR_TODOTXT_DATE_ON_ADD" ] ; then + TODOTXT_DATE_ON_ADD="$OVR_TODOTXT_DATE_ON_ADD" +fi +if [ -n "$OVR_TODOTXT_PRIORITY_ON_ADD" ] ; then + TODOTXT_PRIORITY_ON_ADD="$OVR_TODOTXT_PRIORITY_ON_ADD" +fi +if [ -n "$OVR_TODOTXT_DISABLE_FILTER" ] ; then + TODOTXT_DISABLE_FILTER="$OVR_TODOTXT_DISABLE_FILTER" +fi +if [ -n "$OVR_TODOTXT_VERBOSE" ] ; then + TODOTXT_VERBOSE="$OVR_TODOTXT_VERBOSE" +fi +if [ -n "$OVR_TODOTXT_DEFAULT_ACTION" ] ; then + TODOTXT_DEFAULT_ACTION="$OVR_TODOTXT_DEFAULT_ACTION" +fi +if [ -n "$OVR_TODOTXT_SORT_COMMAND" ] ; then + TODOTXT_SORT_COMMAND="$OVR_TODOTXT_SORT_COMMAND" +fi +if [ -n "$OVR_TODOTXT_FINAL_FILTER" ] ; then + TODOTXT_FINAL_FILTER="$OVR_TODOTXT_FINAL_FILTER" +fi + +ACTION=${1:-$TODOTXT_DEFAULT_ACTION} + +[ -z "$ACTION" ] && usage +[ -d "$TODO_DIR" ] || mkdir -p "$TODO_DIR" 2> /dev/null || dieWithHelp "$1" "Fatal Error: $TODO_DIR is not a directory" +( cd "$TODO_DIR" ) || dieWithHelp "$1" "Fatal Error: Unable to cd to $TODO_DIR" +[ -z "$TODOTXT_PRIORITY_ON_ADD" ] \ + || echo "$TODOTXT_PRIORITY_ON_ADD" | grep -q "^[A-Z]$" \ + || die "TODOTXT_PRIORITY_ON_ADD should be a capital letter from A to Z (it is now \"$TODOTXT_PRIORITY_ON_ADD\")." + +[ -z "$TODO_FILE" ] && TODO_FILE="$TODO_DIR/todo.txt" +[ -z "$DONE_FILE" ] && DONE_FILE="$TODO_DIR/done.txt" +[ -z "$REPORT_FILE" ] && REPORT_FILE="$TODO_DIR/report.txt" + +[ -f "$TODO_FILE" ] || [ -c "$TODO_FILE" ] || > "$TODO_FILE" +[ -f "$DONE_FILE" ] || [ -c "$DONE_FILE" ] || > "$DONE_FILE" +[ -f "$REPORT_FILE" ] || [ -c "$REPORT_FILE" ] || > "$REPORT_FILE" + +if [ $TODOTXT_PLAIN = 1 ]; then + for clr in ${!PRI_@}; do + export "$clr"=$NONE + done + PRI_X=$NONE + DEFAULT=$NONE + COLOR_DONE=$NONE + COLOR_PROJECT=$NONE + COLOR_CONTEXT=$NONE + COLOR_DATE=$NONE + COLOR_NUMBER=$NONE + COLOR_META=$NONE +fi + +[[ "$HIDE_PROJECTS_SUBSTITUTION" ]] && COLOR_PROJECT="$NONE" +[[ "$HIDE_CONTEXTS_SUBSTITUTION" ]] && COLOR_CONTEXT="$NONE" + +_addto() { + file="$1" + input="$2" + cleaninput + uppercasePriority + + if [[ "$TODOTXT_DATE_ON_ADD" -eq 1 ]]; then + now=$(date '+%Y-%m-%d') + input=$(echo "$input" | sed -e 's/^\(([A-Z]) \)\{0,1\}/\1'"$now /") + fi + if [[ -n "$TODOTXT_PRIORITY_ON_ADD" ]]; then + if ! echo "$input" | grep -q '^([A-Z])'; then + input=$(echo -n "($TODOTXT_PRIORITY_ON_ADD) " ; echo "$input") + fi + fi + fixMissingEndOfLine "$file" + echo "$input" >> "$file" + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + TASKNUM=$(sed -n '$ =' "$file") + echo "$TASKNUM $input" + echo "$(getPrefix "$file"): $TASKNUM added." + fi +} + +shellquote() +{ + typeset -r qq=\'; printf %s\\n "'${1//\'/${qq}\\${qq}${qq}}'"; +} + +filtercommand() +{ + filter=${1:-} + shift + post_filter=${1:-} + shift + + for search_term + do + ## See if the first character of $search_term is a dash + if [ "${search_term:0:1}" != '-' ] + then + ## First character isn't a dash: hide lines that don't match + ## this $search_term + filter="${filter:-}${filter:+ | }grep -i $(shellquote "$search_term")" + else + ## First character is a dash: hide lines that match this + ## $search_term + # + ## Remove the first character (-) before adding to our filter command + filter="${filter:-}${filter:+ | }grep -v -i $(shellquote "${search_term:1}")" + fi + done + + [ -n "$post_filter" ] && { + filter="${filter:-}${filter:+ | }${post_filter:-}" + } + + printf %s "$filter" +} + +_list() { + local FILE="$1" + ## If the file starts with a "/" use absolute path. Otherwise, + ## try to find it in either $TODO_DIR or using a relative path + if [ "${1:0:1}" == / ]; then + ## Absolute path + src="$FILE" + elif [ -f "$TODO_DIR/$FILE" ]; then + ## Path relative to todo.sh directory + src="$TODO_DIR/$FILE" + elif [ -f "$FILE" ]; then + ## Path relative to current working directory + src="$FILE" + elif [ -f "$TODO_DIR/${FILE}.txt" ]; then + ## Path relative to todo.sh directory, missing file extension + src="$TODO_DIR/${FILE}.txt" + else + die "TODO: File $FILE does not exist." + fi + + ## Get our search arguments, if any + shift ## was file name, new $1 is first search term + + _format "$src" '' "$@" + + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + echo "--" + echo "$(getPrefix "$src"): ${NUMTASKS:-0} of ${TOTALTASKS:-0} tasks shown" + fi +} +getPadding() +{ + ## We need one level of padding for each power of 10 $LINES uses. + LINES=$(sed -n '$ =' "${1:-$TODO_FILE}") + printf %s ${#LINES} +} +_format() +{ + # Parameters: $1: todo input file; when empty formats stdin + # $2: ITEM# number width; if empty auto-detects from $1 / $TODO_FILE. + # Precondition: None + # Postcondition: $NUMTASKS and $TOTALTASKS contain statistics (unless $TODOTXT_VERBOSE=0). + + FILE=$1 + shift + + ## Figure out how much padding we need to use, unless this was passed to us. + PADDING=${1:-$(getPadding "$FILE")} + shift + + ## Number the file, then run the filter command, + ## then sort and mangle output some more + if [[ $TODOTXT_DISABLE_FILTER = 1 ]]; then + TODOTXT_FINAL_FILTER="cat" + fi + items=$( + if [ "$FILE" ]; then + sed = "$FILE" + else + sed = + fi \ + | sed -e ''' + N + s/^/ / + s/ *\([ 0-9]\{'"$PADDING"',\}\)\n/\1 / + /^[ 0-9]\{1,\} *$/d + ''' + ) + + ## Build and apply the filter. + filter_command=$(filtercommand "${pre_filter_command:-}" "${post_filter_command:-}" "$@") + if [ "${filter_command}" ]; then + filtered_items=$(echo -n "$items" | eval "${filter_command}") + else + filtered_items=$items + fi + filtered_items=$( + echo -n "$filtered_items" \ + | sed ''' + s/^ /00000/; + s/^ /0000/; + s/^ /000/; + s/^ /00/; + s/^ /0/; + ''' \ + | eval "${TODOTXT_SORT_COMMAND}" \ + | awk ''' + function highlight(colorVar, color) { + color = ENVIRON[colorVar] + gsub(/\\+033/, "\033", color) + return color + } + { + clr = "" + if (match($0, /^[0-9]+ x /)) { + clr = highlight("COLOR_DONE") + } else if (match($0, /^[0-9]+ \([A-Z]\) /)) { + clr = highlight("PRI_" substr($0, RSTART + RLENGTH - 3, 1)) + clr = (clr ? clr : highlight("PRI_X")) + if (ENVIRON["HIDE_PRIORITY_SUBSTITUTION"] != "") { + $0 = substr($0, 1, RLENGTH - 4) substr($0, RSTART + RLENGTH) + } + } + end_clr = (clr ? highlight("DEFAULT") : "") + + prj_beg = highlight("COLOR_PROJECT") + prj_end = (prj_beg ? (highlight("DEFAULT") clr) : "") + + ctx_beg = highlight("COLOR_CONTEXT") + ctx_end = (ctx_beg ? (highlight("DEFAULT") clr) : "") + + dat_beg = highlight("COLOR_DATE") + dat_end = (dat_beg ? (highlight("DEFAULT") clr) : "") + + num_beg = highlight("COLOR_NUMBER") + num_end = (num_beg ? (highlight("DEFAULT") clr) : "") + + met_beg = highlight("COLOR_META") + met_end = (met_beg ? (highlight("DEFAULT") clr) : "") + + gsub(/[ \t][ \t]*/, "\n&\n") + len = split($0, words, /\n/) + + printf "%s", clr + for (i = 1; i <= len; ++i) { + if (i == 1 && words[i] ~ /^[0-9]+$/ ) { + printf "%s", num_beg words[i] num_end + } else if (words[i] ~ /^[+].*[A-Za-z0-9_]$/) { + printf "%s", prj_beg words[i] prj_end + } else if (words[i] ~ /^[@].*[A-Za-z0-9_]$/) { + printf "%s", ctx_beg words[i] ctx_end + } else if (words[i] ~ /^(19|20)[0-9]{2}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])$/) { + printf "%s", dat_beg words[i] dat_end + } else if (words[i] ~ /^[[:alnum:]]+:[^ ]+$/) { + printf "%s", met_beg words[i] met_end + } else { + printf "%s", words[i] + } + } + printf "%s\n", end_clr + } + ''' \ + | sed ''' + s/'"${HIDE_PROJECTS_SUBSTITUTION:-^}"'//g + s/'"${HIDE_CONTEXTS_SUBSTITUTION:-^}"'//g + s/'"${HIDE_CUSTOM_SUBSTITUTION:-^}"'//g + ''' \ + | eval ${TODOTXT_FINAL_FILTER} \ + ) + [ "$filtered_items" ] && echo "$filtered_items" + + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + NUMTASKS=$( echo -n "$filtered_items" | sed -n '$ =' ) + TOTALTASKS=$( echo -n "$items" | sed -n '$ =' ) + fi + if [ "$TODOTXT_VERBOSE" -gt 1 ]; then + echo "TODO DEBUG: Filter Command was: ${filter_command:-cat}" + fi +} + +listWordsWithSigil() +{ + sigil=$1 + shift + + FILE=$TODO_FILE + [ "$TODOTXT_SOURCEVAR" ] && eval "FILE=$TODOTXT_SOURCEVAR" + eval "$(filtercommand 'cat "${FILE[@]}"' '' "$@")" \ + | grep -o "[^ ]*${sigil}[^ ]\\+" \ + | sed -n \ + -e "s#^${TODOTXT_SIGIL_BEFORE_PATTERN//#/\\#}##" \ + -e "s#${TODOTXT_SIGIL_AFTER_PATTERN//#/\\#}\$##" \ + -e "/^${sigil}${TODOTXT_SIGIL_VALID_PATTERN//\//\\/}$/p" \ + | sort -u +} + +export -f cleaninput getPrefix getTodo getNewtodo shellquote filtercommand _list listWordsWithSigil getPadding _format die + +# == HANDLE ACTION == +action=$( printf "%s\n" "$ACTION" | tr '[:upper:]' '[:lower:]' ) + +## If the first argument is "command", run the rest of the arguments +## using todo.sh builtins. +## Else, run a actions script with the name of the command if it exists +## or fallback to using a builtin +if [ "$action" == command ] +then + ## Get rid of "command" from arguments list + shift + ## Reset action to new first argument + action=$( printf "%s\n" "$1" | tr '[:upper:]' '[:lower:]' ) +elif [ -d "$TODO_ACTIONS_DIR/$action" ] && [ -x "$TODO_ACTIONS_DIR/$action/$action" ] +then + "$TODO_ACTIONS_DIR/$action/$action" "$@" + exit $? +elif [ -d "$TODO_ACTIONS_DIR" ] && [ -x "$TODO_ACTIONS_DIR/$action" ] +then + "$TODO_ACTIONS_DIR/$action" "$@" + exit $? +fi + +## Only run if $action isn't found in .todo.actions.d +case $action in +"add" | "a") + if [[ -z "$2" && $TODOTXT_FORCE = 0 ]]; then + echo -n "Add: " + read -e -r input + else + [ -z "$2" ] && die "usage: $TODO_SH add \"TODO ITEM\"" + shift + input=$* + fi + _addto "$TODO_FILE" "$input" + ;; + +"addm") + if [[ -z "$2" && $TODOTXT_FORCE = 0 ]]; then + echo -n "Add: " + read -e -r input + else + [ -z "$2" ] && die "usage: $TODO_SH addm \"TODO ITEM\"" + shift + input=$* + fi + + # Set Internal Field Seperator as newline so we can + # loop across multiple lines + SAVEIFS=$IFS + IFS=$'\n' + + # Treat each line seperately + for line in $input ; do + _addto "$TODO_FILE" "$line" + done + IFS=$SAVEIFS + ;; + +"addto" ) + [ -z "$2" ] && die "usage: $TODO_SH addto DEST \"TODO ITEM\"" + dest="$TODO_DIR/$2" + [ -z "$3" ] && die "usage: $TODO_SH addto DEST \"TODO ITEM\"" + shift + shift + input=$* + + if [ -f "$dest" ]; then + _addto "$dest" "$input" + else + die "TODO: Destination file $dest does not exist." + fi + ;; + +"append" | "app" ) + errmsg="usage: $TODO_SH append ITEM# \"TEXT TO APPEND\"" + shift; item=$1; shift + getTodo "$item" + + if [[ -z "$1" && $TODOTXT_FORCE = 0 ]]; then + echo -n "Append: " + read -e -r input + else + input=$* + fi + case "$input" in + [$SENTENCE_DELIMITERS]*) appendspace=;; + *) appendspace=" ";; + esac + cleaninput "for sed" + + if sed -i.bak "${item} s|^.*|&${appendspace}${input}|" "$TODO_FILE"; then + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + getNewtodo "$item" + echo "$item $newtodo" + fi + else + die "TODO: Error appending task $item." + fi + ;; + +"archive" ) + # defragment blank lines + sed -i.bak -e '/./!d' "$TODO_FILE" + [ "$TODOTXT_VERBOSE" -gt 0 ] && grep "^x " "$TODO_FILE" + grep "^x " "$TODO_FILE" >> "$DONE_FILE" + sed -i.bak '/^x /d' "$TODO_FILE" + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + echo "TODO: $TODO_FILE archived." + fi + ;; + +"del" | "rm" ) + # replace deleted line with a blank line when TODOTXT_PRESERVE_LINE_NUMBERS is 1 + errmsg="usage: $TODO_SH del ITEM# [TERM]" + item=$2 + getTodo "$item" + + if [ -z "$3" ]; then + if [ $TODOTXT_FORCE = 0 ]; then + echo "Delete '$todo'? (y/n)" + read -e -r ANSWER + else + ANSWER="y" + fi + if [ "$ANSWER" = "y" ]; then + if [ $TODOTXT_PRESERVE_LINE_NUMBERS = 0 ]; then + # delete line (changes line numbers) + sed -i.bak -e "${item}s/^.*//" -e '/./!d' "$TODO_FILE" + else + # leave blank line behind (preserves line numbers) + sed -i.bak -e "${item}s/^.*//" "$TODO_FILE" + fi + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + echo "$item $todo" + echo "TODO: $item deleted." + fi + else + echo "TODO: No tasks were deleted." + fi + else + sed -i.bak \ + -e "${item}s/^\((.) \)\{0,1\} *$3 */\1/g" \ + -e "${item}s/ *$3 *\$//g" \ + -e "${item}s/ *$3 */ /g" \ + -e "${item}s/ *$3 */ /g" \ + -e "${item}s/$3//g" \ + "$TODO_FILE" + getNewtodo "$item" + if [ "$todo" = "$newtodo" ]; then + [ "$TODOTXT_VERBOSE" -gt 0 ] && echo "$item $todo" + die "TODO: '$3' not found; no removal done." + fi + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + echo "$item $todo" + echo "TODO: Removed '$3' from task." + echo "$item $newtodo" + fi + fi + ;; + +"depri" | "dp" ) + errmsg="usage: $TODO_SH depri ITEM#[, ITEM#, ITEM#, ...]" + shift; + [ $# -eq 0 ] && die "$errmsg" + + # Split multiple depri's, if comma separated change to whitespace separated + # Loop the 'depri' function for each item + for item in ${*//,/ }; do + getTodo "$item" + + if [[ "$todo" = \(?\)\ * ]]; then + sed -i.bak -e "${item}s/^(.) //" "$TODO_FILE" + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + getNewtodo "$item" + echo "$item $newtodo" + echo "TODO: $item deprioritized." + fi + else + echo "TODO: $item is not prioritized." + fi + done + ;; + +"do" | "done" ) + errmsg="usage: $TODO_SH do ITEM#[, ITEM#, ITEM#, ...]" + # shift so we get arguments to the do request + shift; + [ "$#" -eq 0 ] && die "$errmsg" + + # Split multiple do's, if comma separated change to whitespace separated + # Loop the 'do' function for each item + for item in ${*//,/ }; do + getTodo "$item" + + # Check if this item has already been done + if [ "${todo:0:2}" != "x " ]; then + now=$(date '+%Y-%m-%d') + # remove priority once item is done + sed -i.bak "${item}s/^(.) //" "$TODO_FILE" + sed -i.bak "${item}s|^|x $now |" "$TODO_FILE" + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + getNewtodo "$item" + echo "$item $newtodo" + echo "TODO: $item marked as done." + fi + else + echo "TODO: $item is already marked done." + fi + done + + if [ $TODOTXT_AUTO_ARCHIVE = 1 ]; then + # Recursively invoke the script to allow overriding of the archive + # action. + "$TODO_FULL_SH" archive + fi + ;; + +"help" ) + shift ## Was help; new $1 is first help topic / action name + if [ $# -gt 0 ]; then + # Don't use PAGER here; we don't expect much usage output from one / few actions. + actionUsage "$@" + else + if [ -t 1 ] ; then # STDOUT is a TTY + if which "${PAGER:-less}" >/dev/null 2>&1; then + # we have a working PAGER (or less as a default) + help | "${PAGER:-less}" && exit 0 + fi + fi + help # just in case something failed above, we go ahead and just spew to STDOUT + fi + ;; + +"shorthelp" ) + if [ -t 1 ] ; then # STDOUT is a TTY + if which "${PAGER:-less}" >/dev/null 2>&1; then + # we have a working PAGER (or less as a default) + shorthelp | "${PAGER:-less}" && exit 0 + fi + fi + shorthelp # just in case something failed above, we go ahead and just spew to STDOUT + ;; + +"list" | "ls" ) + shift ## Was ls; new $1 is first search term + _list "$TODO_FILE" "$@" + ;; + +"listall" | "lsa" ) + shift ## Was lsa; new $1 is first search term + + TOTAL=$( sed -n '$ =' "$TODO_FILE" ) + PADDING=${#TOTAL} + + post_filter_command="${post_filter_command:-}${post_filter_command:+ | }awk -v TOTAL=$TOTAL -v PADDING=$PADDING '{ \$1 = sprintf(\"%\" PADDING \"d\", (\$1 > TOTAL ? 0 : \$1)); print }' " + cat "$TODO_FILE" "$DONE_FILE" | TODOTXT_VERBOSE=0 _format '' "$PADDING" "$@" + + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + TDONE=$( sed -n '$ =' "$DONE_FILE" ) + TASKNUM=$(TODOTXT_PLAIN=1 TODOTXT_VERBOSE=0 _format "$TODO_FILE" 1 "$@" | sed -n '$ =') + DONENUM=$(TODOTXT_PLAIN=1 TODOTXT_VERBOSE=0 _format "$DONE_FILE" 1 "$@" | sed -n '$ =') + echo "--" + echo "$(getPrefix "$TODO_FILE"): ${TASKNUM:-0} of ${TOTAL:-0} tasks shown" + echo "$(getPrefix "$DONE_FILE"): ${DONENUM:-0} of ${TDONE:-0} tasks shown" + echo "total $((TASKNUM + DONENUM)) of $((TOTAL + TDONE)) tasks shown" + fi + ;; + +"listfile" | "lf" ) + shift ## Was listfile, next $1 is file name + if [ $# -eq 0 ]; then + [ "$TODOTXT_VERBOSE" -gt 0 ] && echo "Files in the todo.txt directory:" + cd "$TODO_DIR" && ls -1 -- *.txt + else + FILE="$1" + shift ## Was filename; next $1 is first search term + + _list "$FILE" "$@" + fi + ;; + +"listcon" | "lsc" ) + shift + listWordsWithSigil '@' "$@" + ;; + +"listproj" | "lsprj" ) + shift + listWordsWithSigil '+' "$@" + ;; + +"listpri" | "lsp" ) + shift ## was "listpri", new $1 is priority to list or first TERM + + pri=$(printf "%s\n" "$1" | tr '[:lower:]' '[:upper:]' | grep -e '^[A-Z]$' -e '^[A-Z]-[A-Z]$') && shift || pri="A-Z" + post_filter_command="${post_filter_command:-}${post_filter_command:+ | }grep '^ *[0-9]\+ ([${pri}]) '" + _list "$TODO_FILE" "$@" + ;; + +"move" | "mv" ) + # replace moved line with a blank line when TODOTXT_PRESERVE_LINE_NUMBERS is 1 + errmsg="usage: $TODO_SH mv ITEM# DEST [SRC]" + item=$2 + dest="$TODO_DIR/$3" + src="$TODO_DIR/$4" + + [ -z "$4" ] && src="$TODO_FILE" + [ -z "$dest" ] && die "$errmsg" + + [ -f "$src" ] || die "TODO: Source file $src does not exist." + [ -f "$dest" ] || die "TODO: Destination file $dest does not exist." + + getTodo "$item" "$src" + [ -z "$todo" ] && die "$item: No such item in $src." + if [ $TODOTXT_FORCE = 0 ]; then + echo "Move '$todo' from $src to $dest? (y/n)" + read -e -r ANSWER + else + ANSWER="y" + fi + if [ "$ANSWER" = "y" ]; then + if [ $TODOTXT_PRESERVE_LINE_NUMBERS = 0 ]; then + # delete line (changes line numbers) + sed -i.bak -e "${item}s/^.*//" -e '/./!d' "$src" + else + # leave blank line behind (preserves line numbers) + sed -i.bak -e "${item}s/^.*//" "$src" + fi + fixMissingEndOfLine "$dest" + echo "$todo" >> "$dest" + + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + echo "$item $todo" + echo "TODO: $item moved from '$src' to '$dest'." + fi + else + echo "TODO: No tasks moved." + fi + ;; + +"prepend" | "prep" ) + errmsg="usage: $TODO_SH prepend ITEM# \"TEXT TO PREPEND\"" + replaceOrPrepend 'prepend' "$@" + ;; + +"pri" | "p" ) + item=$2 + newpri=$( printf "%s\n" "$3" | tr '[:lower:]' '[:upper:]' ) + + errmsg="usage: $TODO_SH pri ITEM# PRIORITY +note: PRIORITY must be anywhere from A to Z." + + [ "$#" -ne 3 ] && die "$errmsg" + [[ "$newpri" = @([A-Z]) ]] || die "$errmsg" + getTodo "$item" + + oldpri= + if [[ "$todo" = \(?\)\ * ]]; then + oldpri=${todo:1:1} + fi + + if [ "$oldpri" != "$newpri" ]; then + sed -i.bak -e "${item}s/^(.) //" -e "${item}s/^/($newpri) /" "$TODO_FILE" + fi + if [ "$TODOTXT_VERBOSE" -gt 0 ]; then + getNewtodo "$item" + echo "$item $newtodo" + if [ "$oldpri" != "$newpri" ]; then + if [ "$oldpri" ]; then + echo "TODO: $item re-prioritized from ($oldpri) to ($newpri)." + else + echo "TODO: $item prioritized ($newpri)." + fi + fi + fi + if [ "$oldpri" = "$newpri" ]; then + echo "TODO: $item already prioritized ($newpri)." + fi + ;; + +"replace" ) + errmsg="usage: $TODO_SH replace ITEM# \"UPDATED ITEM\"" + replaceOrPrepend 'replace' "$@" + ;; + +"report" ) + # archive first + # Recursively invoke the script to allow overriding of the archive + # action. + "$TODO_FULL_SH" archive + + TOTAL=$( sed -n '$ =' "$TODO_FILE" ) + TDONE=$( sed -n '$ =' "$DONE_FILE" ) + NEWDATA="${TOTAL:-0} ${TDONE:-0}" + LASTREPORT=$(sed -ne '$p' "$REPORT_FILE") + LASTDATA=${LASTREPORT#* } # Strip timestamp. + if [ "$LASTDATA" = "$NEWDATA" ]; then + echo "$LASTREPORT" + [ "$TODOTXT_VERBOSE" -gt 0 ] && echo "TODO: Report file is up-to-date." + else + NEWREPORT="$(date +%Y-%m-%dT%T) ${NEWDATA}" + echo "${NEWREPORT}" >> "$REPORT_FILE" + echo "${NEWREPORT}" + [ "$TODOTXT_VERBOSE" -gt 0 ] && echo "TODO: Report file updated." + fi + ;; + +"deduplicate" ) + if [ $TODOTXT_PRESERVE_LINE_NUMBERS = 0 ]; then + deduplicateSedCommand='d' + else + deduplicateSedCommand='s/^.*//; p' + fi + + # To determine the difference when deduplicated lines are preserved, only + # non-empty lines must be counted. + originalTaskNum=$( sed -e '/./!d' "$TODO_FILE" | sed -n '$ =' ) + + # Look for duplicate lines and discard the second occurrence. + # We start with an empty hold space on the first line. For each line: + # G - appends newline + hold space to the pattern space + # s/\n/&&/; - double up the first new line so we catch adjacent dups + # /^\([^\n]*\n\).*\n\1/b dedup + # If the first line of the hold space shows up again later as an + # entire line, it's a duplicate. Jump to the "dedup" label, where + # either of the following is executed, depending on whether empty + # lines should be preserved: + # d - Delete the current pattern space, quit this line and + # move on to the next, or: + # s/^.*//; p - Clear the task text, print this line and move on to + # the next. + # s/\n//; - else (no duplicate), drop the doubled newline + # h; - replace the hold space with the expanded pattern space + # P; - print up to the first newline (that is, the input line) + # b - end processing of the current line + sed -i.bak -n \ + -e 'G; s/\n/&&/; /^\([^\n]*\n\).*\n\1/b dedup' \ + -e 's/\n//; h; P; b' \ + -e ':dedup' \ + -e "$deduplicateSedCommand" \ + "$TODO_FILE" + + newTaskNum=$( sed -e '/./!d' "$TODO_FILE" | sed -n '$ =' ) + deduplicateNum=$(( originalTaskNum - newTaskNum )) + if [ $deduplicateNum -eq 0 ]; then + echo "TODO: No duplicate tasks found" + else + echo "TODO: $deduplicateNum duplicate task(s) removed" + fi + ;; + +"listaddons" ) + if [ -d "$TODO_ACTIONS_DIR" ]; then + cd "$TODO_ACTIONS_DIR" || exit $? + for action in * + do + if [ -f "$action" ] && [ -x "$action" ]; then + echo "$action" + elif [ -d "$action" ] && [ -x "$action/$action" ]; then + echo "$action" + fi + done + fi + ;; + +* ) + usage;; +esac + + +bash /opt/scripts/todo/watch_todo.sh diff --git a/todo/todo_completion b/todo/todo_completion new file mode 100755 index 0000000..b403a55 --- /dev/null +++ b/todo/todo_completion @@ -0,0 +1,120 @@ +#!/bin/bash source-this-script +[ "$BASH_VERSION" ] || return + +_todo() +{ + local cur prev opts + COMPREPLY=() + cur="${COMP_WORDS[COMP_CWORD]}" + prev="${COMP_WORDS[COMP_CWORD-1]}" + + local -r OPTS="-@ -@@ -+ -++ -d -f -h -p -P -PP -a -n -t -v -vv -V -x" + local -r COMMANDS="\ + add a addto addm append app archive command del \ + rm depri dp do help list ls listaddons listall lsa listcon \ + lsc listfile lf listpri lsp listproj lsprj move \ + mv prepend prep pri p replace report shorthelp" + local -r MOVE_COMMAND_PATTERN='move|mv' + + local _todo_sh=${_todo_sh:-todo.sh} + local completions + if [ $COMP_CWORD -eq 1 ]; then + completions="$COMMANDS $(eval TODOTXT_VERBOSE=0 $_todo_sh command listaddons 2>/dev/null) $OPTS" + elif [[ $COMP_CWORD -gt 2 && ( \ + "${COMP_WORDS[COMP_CWORD-2]}" =~ ^($MOVE_COMMAND_PATTERN${_todo_file2_actions:+|${_todo_file2_actions}})$ || \ + "${COMP_WORDS[COMP_CWORD-3]}" =~ ^($MOVE_COMMAND_PATTERN${_todo_file3_actions:+|${_todo_file3_actions}})$ ) ]]; then + # "move ITEM# DEST [SRC]" has file arguments on positions 2 and 3. + completions=$(eval TODOTXT_VERBOSE=0 $_todo_sh command listfile 2>/dev/null) + else + case "$prev" in + command) + completions=$COMMANDS;; + help) + completions="$COMMANDS $(eval TODOTXT_VERBOSE=0 $_todo_sh command listaddons 2>/dev/null)";; + -*) completions="$COMMANDS $(eval TODOTXT_VERBOSE=0 $_todo_sh command listaddons 2>/dev/null) $OPTS";; + *) if [[ "$prev" =~ ^(addto|listfile|lf${_todo_file1_actions:+|${_todo_file1_actions}})$ ]]; then + completions=$(eval TODOTXT_VERBOSE=0 $_todo_sh command listfile 2>/dev/null) + else + case "$cur" in + +*) completions=$(eval TODOTXT_VERBOSE=0 $_todo_sh command listproj 2>/dev/null) + COMPREPLY=( $( compgen -W "$completions" -- $cur )) + [ ${#COMPREPLY[@]} -gt 0 ] && return 0 + # Fall back to projects extracted from done tasks. + completions=$(eval 'TODOTXT_VERBOSE=0 TODOTXT_SOURCEVAR=\$DONE_FILE' $_todo_sh command listproj 2>/dev/null) + ;; + @*) completions=$(eval TODOTXT_VERBOSE=0 $_todo_sh command listcon 2>/dev/null) + COMPREPLY=( $( compgen -W "$completions" -- $cur )) + [ ${#COMPREPLY[@]} -gt 0 ] && return 0 + # Fall back to contexts extracted from done tasks. + completions=$(eval 'TODOTXT_VERBOSE=0 TODOTXT_SOURCEVAR=\$DONE_FILE' $_todo_sh command listcon 2>/dev/null) + ;; + *) if [[ "$cur" =~ ^[0-9]+$ ]]; then + declare -a sedTransformations=( + # Remove the (padded) task number; we prepend the + # user-provided $cur instead. + -e 's/^ *[0-9]\{1,\} //' + # Remove the timestamp prepended by the -t option, + # but keep any priority (as it's short and may + # provide useful context). + -e 's/^\((.) \)\{0,1\}[0-9]\{2,4\}-[0-9]\{2\}-[0-9]\{2\} /\1/' + # Remove the done date and (if there) the timestamp. + # Keep the "x" (as it's short and may provide useful + # context) + -e 's/^\([xX] \)\([0-9]\{2,4\}-[0-9]\{2\}-[0-9]\{2\} \)\{1,2\}/\1/' + # Remove any trailing whitespace; the Bash + # completion inserts a trailing space itself. + -e 's/[[:space:]]*$//' + # Finally, limit the output to a single line just as + # a safety check of the ls action output. + -e '1q' + ) + local todo=$( \ + eval TODOTXT_VERBOSE=0 $_todo_sh '-@ -+ -p -x command ls "^ *${cur} "' 2>/dev/null | \ + sed "${sedTransformations[@]}" \ + ) + # Append task text as a shell comment. This + # completion can be a safety check before a + # destructive todo.txt operation. + [ "$todo" ] && COMPREPLY[0]="$cur # $todo" + return 0 + else + return 0 + fi + ;; + esac + fi + ;; + esac + fi + + COMPREPLY=( $( compgen -W "$completions" -- $cur )) + return 0 +} +complete -F _todo todo.sh + +# If you define an alias (e.g. "t") to todo.sh, you need to explicitly enable +# completion for it, too: +#complete -F _todo t +# It is recommended to put this line next to your alias definition in your +# ~/.bashrc (or wherever else you're defining your alias). If you simply +# uncomment it here, you will need to redo this on every todo.txt update! + +# If you have renamed the todo.sh executable, or if it is not accessible through +# PATH, you need to add and use a wrapper completion function, like this: +#_todoElsewhere() +#{ +# local _todo_sh='/path/to/todo2.sh' +# _todo "$@" +#} +#complete -F _todoElsewhere /path/to/todo2.sh + +# If you use aliases to use different configuration(s), you need to add and use +# a wrapper completion function for each configuration if you want to complete +# from the actual configured task locations: +#alias todo2='todo.sh -d "$HOME/todo2.cfg"' +#_todo2() +#{ +# local _todo_sh='todo.sh -d "$HOME/todo2.cfg"' +# _todo "$@" +#} +#complete -F _todo2 todo2 diff --git a/todo/todo_format.py b/todo/todo_format.py new file mode 100644 index 0000000..1998dca --- /dev/null +++ b/todo/todo_format.py @@ -0,0 +1,136 @@ +import os +import time +import re + + +def utf8_length(text): + if text is None: + return 0 + len_text = len(text) + len_text_utf8 = len(text.encode('utf-8')) + # utf-8一个汉字占3个字符,减去原计数就是多出来的2/3,再除以2就是增量。再加回去即可 + size = int((len_text_utf8 - len_text) / 2 + len_text) + return size + + +def tidy_done(): + + with open('/opt/logs/TODO/done.txt', 'r') as donetxt, open('/tmp/tmp_done.txt', 'w') as tmpdone: + lines = donetxt.readlines() + for line in lines: + tmpdone.write(line.strip() + '\n') + + os.system('mv /tmp/tmp_done.txt /opt/logs/TODO/done.txt') + + +def format_col_1(item): + global number_of_days + global item_no + global auto_weekend + + end_time_date = item.strip()[-10:] + try: + end_time_stamp = time.strptime(end_time_date, "%Y-%m-%d") + except Exception as Err: + print('Error Encounted: ', end='') + print(str(Err)) + print(f"Please modify /opt/logs/TODO/todo.txt @ line {item_no} manually, and then run it again.\n") + os._exit(1) + + end_time_stamp = time.mktime(end_time_stamp) + now_time_stamp = int(time.time()) + number_of_days = round((end_time_stamp - now_time_stamp) / 3600 / 24 + 0.75, 2) + # print('number_of_days-1:', number_of_days) + + if auto_weekend and number_of_days < 0: + end_time_stamp += 604800 + end_time_date = time.strftime('%Y-%m-%d', time.localtime(end_time_stamp)) + item = item.strip()[:-10] + end_time_date + number_of_days += 7 + + item_format = re.sub(' +', ' ', item.strip()) + done.write(item_format) + for i in range(16 - len(item_format)): + done.write(' ') + + done.write('|') + + +def format_col_2(item): + done.write(' ') + done.write(item.strip()) + for i in range(12 - 1 - len(item.strip())): + done.write(' ') + + done.write('|') + + +def format_col_3(item): + global number_of_days + # print('number_of_days-3:', number_of_days) + item = 'T:' + str(number_of_days) + + done.write(' ') + done.write(item.strip()) + for i in range(10 - 1 - len(item.strip())): + done.write(' ') + + done.write('|') + + +def format_col_4(item): + done.write(' ') + try: + with open('/tmp/col.log', 'r') as obj_col: + width = int(obj_col.readline().strip()) + except: + width = 125 + + item_format = re.sub(' +', ' ', item.strip()) + len_of_task = utf8_length(item_format) + # get the width of current terminal + left_white = width - 11 - 12 - 10 - 5 - len_of_task - 8 + done.write(item_format + ' ' * left_white + '\n') + + +if __name__ == "__main__": + + number_of_days = 0 + item_no = 1 + # 0 signifies that task without priority, and 1 vice versa + with open('/opt/logs/TODO/todo.txt', 'r') as todo, open('/tmp/tmp.txt', 'w') as done: + for line in todo.readlines(): + col = 1 + auto_weekend = 0 + + if line.strip() == '': + continue + elif 'Happy weekend~' in line: + auto_weekend = 1 + else: + pass + + for item in line.strip().split('|'): + if col == 1: + format_col_1(item) + elif col == 2: + format_col_2(item) + elif col == 3: + format_col_3(item) + elif col == 4: + format_col_4(item) + else: + break + + col += 1 + item_no += 1 + + + os.system('mv /tmp/tmp.txt /opt/logs/TODO/todo.txt') + tidy_done() + + + + + + diff --git a/todo/watch_todo.sh b/todo/watch_todo.sh new file mode 100644 index 0000000..6dfbc9e --- /dev/null +++ b/todo/watch_todo.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +tput cols > /tmp/col.log +python3 /opt/scripts/todo/todo_format.py +result=$? +rm -rf /tmp/col.log + +exit $result + +# while :; do +# echo $COLUMNS > /tmp/col.log +# python3 /opt/scripts/todo/todo_format.py +# sleep 1 +# done + diff --git a/update/backups.sh b/update/backups.sh new file mode 100644 index 0000000..5f76922 --- /dev/null +++ b/update/backups.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +echo "=========================================================================" +echo Start Time: `date` +set -x + +cp -rf /var/spool/cron/crontabs/ /opt/configs/ +cp -rf /usr/local/nginx/conf/nginx.conf /opt/configs/nginx/nginx.conf +cp -rf /usr/local/nginx/conf/domain_confs/ /opt/configs/nginx/ +cp -rf /etc/docker/daemon.json /opt/configs/conf/ +cp -rf /etc/sysctl.conf /opt/configs/conf/ +cp -rf /etc/pip.conf /opt/configs/conf/ +cp -rf /etc/apt/sources.list /opt/configs/conf/ +cp -rf /root/.acme.sh/*ecc /opt/configs/acme/ + +cd /opt && t=`date +%Y%m%dT%H%M%S` +rsync -av apps configs logs scripts websites wd/72-Backups/CrossChain/VPS/ > /opt/logs/rsync/rsync_${t}.log +cd /opt/logs/rsync/ +let count=`ls | wc -l` +if [[ $count -gt 10 ]]; then + rsync_logs=() + for((i=1;i<=$count;i++)); do + rsync_logs[$i]=`ls | sort -r | head -n $i | tail -1` + done + + for i in `seq 11 $count`; do + rm -rf ${rsync_logs[$i]} + done +fi + +# tar -I pixz -cf $backup_dir/vps-${t}.tar.xz -C /opt configs data logs scripts source-code websites > /dev/null 2>&1 +set +x +echo End Time: `date` +echo "=========================================================================" +echo +echo + diff --git a/update/calibre.sh b/update/calibre.sh new file mode 100644 index 0000000..18bd3e7 --- /dev/null +++ b/update/calibre.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +# get the pid of calibre, and write it into file /tmp/calibre_pids +ps -ef | grep '/opt/apps/calibre/venv/bin/cps' | grep -v grep | awk '{print $2}' > /tmp/calibre_pids + +# if not exist, signifying that there is no calibre process +if [[ ! -s /tmp/calibre_pids ]]; then + echo -e "\e[1;31mCannot terminate Calibre process cause there is no such things, will run calibre later automatically.\e[0m" +else + for calibre_pid in `cat /tmp/calibre_pids`; do + kill -9 $calibre_pid > /dev/null 2>&1 + done +fi + +nohup /opt/apps/calibre/venv/bin/python /opt/apps/calibre/venv/bin/cps > /dev/null 2>&1 & +echo -e "\e[1;32mCalibre process started successfully\e[0m" + diff --git a/update/dash_rand_logo.sh b/update/dash_rand_logo.sh new file mode 100644 index 0000000..282fa31 --- /dev/null +++ b/update/dash_rand_logo.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +# 设置图片数量和输出文件名 +IMAGE_NUM_ALL=`ls /opt/apps/localcr/A_dashboard/local/icons | wc -l` +IMAGE_NUM_NEEDED=`cat /root/services.yaml | grep -E '[0-9]{1,3}.jpg' | wc -l` +if [[ $IMAGE_NUM_NEEDED -gt $IMAGE_NUM_ALL ]]; then + alarm='Dashboard:\nThere is NOT enough logos to use, please check ASAP.' + bash /opt/scripts/alert/sendmsg.sh "$alarm" + exit 233 +fi + +# 初始化数组 +declare -a arr=() + +# 随机选择图片名,不重复 +for ((i=0; i<$IMAGE_NUM_NEEDED; i++)); do + while true; do + # 随机生成图片名 + RAND=$((RANDOM%IMAGE_NUM_ALL)) + IMG_NAME=$RAND.jpg + + # 判断图片名是否已经被选择过 + chosen=0 + for num in "${arr[@]}"; do + if [ "$num" == "$IMG_NAME" ]; then + chosen=1 + break + fi + done + + # 如果图片名未被选择过,则将其添加到数组中 + if [ "$chosen" -eq 0 ]; then + arr=(${arr[@]} $IMG_NAME) + break + fi + done +done + +# 需要修改的文件的路径和名称 +filename_origin="/opt/apps/localcr/A_dashboard/local/config/services.yaml" +filename_copy="/opt/apps/localcr/A_dashboard/local/config/services_copy.yaml" +# filename_origin="/root/services.yaml" +# filename_copy="/root/services_copy.yaml" +cp $filename_origin $filename_copy + +# 循环处理所有符合指定模式的行 +let count=0 +lineno=1 +while read line; do + # 获取文件名和路径 + oldname=$(echo $line | grep -Eo '[0-9]{1,3}.jpg') + + if [[ $oldname != '' ]]; then + newname=${arr[$count]} + # 替换文件名 + sed -i "${lineno}s/\/$oldname/\/$newname/" $filename_origin + let count=count+1 + fi + let lineno=lineno+1 +done < $filename_copy + +rm $filename_copy + + diff --git a/update/jekyll_content_update.sh b/update/jekyll_content_update.sh new file mode 100644 index 0000000..79b0fe3 --- /dev/null +++ b/update/jekyll_content_update.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +function bash_update() { + echo $directory$filename $action + rm -rf /opt/websites/just-the-docs/bash + jekyll b -s /opt/apps/document/bash -d /opt/websites/just-the-docs/bash +} + +function python_update() { + echo $directory$filename $action + rm -rf /opt/websites/just-the-docs/python + jekyll b -s /opt/apps/document/python -d /opt/websites/just-the-docs/python +} + + +function blog_update() { + echo $directory$filename $action + rm -rf /opt/websites/blog + jekyll b -s /opt/apps/blog/ -d /opt/websites/blog/ + echo -e '\n' +} + + +echo -e '\n\n==================================================================' >> /opt/logs/jekyll_update.log +date >> /opt/logs/jekyll_update.log +if [[ $1 == 'blog' ]]; then + blog_update >> /opt/logs/jekyll_update.log +elif [[ $1 == 'python' ]]; then + python_update >> /opt/logs/jekyll_update.log +elif [[ $1 == 'bash' ]]; then + bash_update >> /opt/logs/jekyll_update.log +else + echo Wrong >> /opt/logs/jekyll_update.log + exit 2 +fi + + + diff --git a/update/jekyll_update.sh b/update/jekyll_update.sh new file mode 100644 index 0000000..2a59ade --- /dev/null +++ b/update/jekyll_update.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +inotify-hookable \ + --watch-directories /opt/apps/blog \ + --watch-directories /opt/apps/document/python \ + --watch-directories /opt/apps/document/bash \ + --ignore-paths /opt/apps/blog/.git/ \ + --ignore-paths /opt/apps/blog/img/avatar.jpg \ + --on-modify-path-command "(^/opt/apps/blog/.*)=(bash /opt/scripts/update/jekyll_content_update.sh blog)" \ + --on-modify-path-command "(^/opt/apps/document/python/.*)=(bash /opt/scripts/update/jekyll_content_update.sh python)" \ + --on-modify-path-command "(^/opt/apps/document/bash/.*)=(bash /opt/scripts/update/jekyll_content_update.sh 'bash')" + diff --git a/update/nav_rand_logo.sh b/update/nav_rand_logo.sh new file mode 100644 index 0000000..cc5035c --- /dev/null +++ b/update/nav_rand_logo.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +# 设置图片数量和输出文件名 +IMAGE_NUM_ALL=`ls /opt/websites/nav/assets/images/logos | wc -l` +IMAGE_NUM_NEEDED=`cat /opt/websites/nav/index.html | grep -E '[0-9]{1,3}.jpg' | wc -l` +if [[ $IMAGE_NUM_NEEDED -gt $IMAGE_NUM_ALL ]]; then + alarm='Navigation:\nThere is NOT enough logos to use, please check ASAP.' + bash /opt/scripts/alert/sendmsg.sh "$alarm" + exit 233 +fi + +# 初始化数组 +declare -a arr=() + +# 随机选择图片名,不重复 +for ((i=0; i<$IMAGE_NUM_NEEDED; i++)); do + while true; do + # 随机生成图片名 + RAND=$((RANDOM%IMAGE_NUM_ALL)) + IMG_NAME=$RAND.jpg + + # 判断图片名是否已经被选择过 + chosen=0 + for num in "${arr[@]}"; do + if [ "$num" == "$IMG_NAME" ]; then + chosen=1 + break + fi + done + + # 如果图片名未被选择过,则将其添加到数组中 + if [ "$chosen" -eq 0 ]; then + arr=(${arr[@]} $IMG_NAME) + break + fi + done +done + +# 需要修改的文件的路径和名称 +filename_origin="/opt/websites/nav/index.html" +filename_copy="/opt/websites/nav/index_copy.html" +cp $filename_origin $filename_copy + +# 循环处理所有符合指定模式的行 +let count=0 +lineno=1 +while read line; do + # 获取文件名和路径 + oldname=$(echo $line | grep -Eo '[0-9]{1,3}.jpg') + + if [[ $oldname != '' ]]; then + newname=${arr[$count]} + # 替换文件名 + sed -i "${lineno}s/\/$oldname/\/$newname/" $filename_origin + let count=count+1 + fi + let lineno=lineno+1 +done < $filename_copy + +rm $filename_copy + + diff --git a/update/renew.sh b/update/renew.sh new file mode 100644 index 0000000..13ba735 --- /dev/null +++ b/update/renew.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# get latest hosts for accelerate github visiting +curl https://raw.hellogithub.com/hosts >> /opt/logs/hosts + +# renew images.json file for background picture of homepage +cd /opt/websites/homepage/ +node assets/js/bing.js > /dev/null 2>&1 + +# random logo for navigation +bash /opt/scripts/update/nav_rand_logo.sh + +# random logo for dash +bash /opt/scripts/update/dash_rand_logo.sh + +# logo renew +let numOfAvatar=`ls /opt/websites/nav/assets/images/logos/ | wc -l` +let randNumber=$RANDOM%$numOfAvatar +cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/websites/homepage/assets/img/logo.jpg +cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/websites/nav/assets/images/logos/avatar.jpg +cp /opt/websites/nav/assets/images/logos/${randNumber}.jpg /opt/apps/blog/img/avatar.jpg + diff --git a/utool/date2n.sh b/utool/date2n.sh new file mode 100644 index 0000000..252e5d9 --- /dev/null +++ b/utool/date2n.sh @@ -0,0 +1,80 @@ +#!/bin/bash + +# 自定义颜色显示 +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_by='\e[1;33m' # bold yellow +c_bb='\e[1;34m' # bold blue +c_bp='\e[1;35m' # bold purple +c_bc='\e[1;36m' # bold cyan +c_bir='\e[1;3;31m' # * bold italic red +c_big='\e[1;3;32m' # bold italic cyan +c_bib='\e[1;3;34m' # * bold italic cyan +c_bic='\e[1;3;36m' # bold italic cyan +c_e='\e[0m' # reset + +function usage() { + echo -e "${c_bir}将日期转换成十进制和十六进制时间戳,输入日期格式可参考如下,中间空格可替换成[a-zA-Z@#%^*:]中的任意一个单字符:${c_e}" + echo -e " 2023/03/09 09:29:02" + echo -e " 2023-03-09 09:29:02" + echo -e " 09/03/2023 09:29:02" + echo -e " 09/Mar/2023 09:29:02" + exit 4 +} + + +ts=$@ + +if [[ $ts =~ (([J|j]an)|([F|f]eb)|([M|m]ar)|([A|a]pr)|([M|m]ay)|([J|j]un)|([J|j]ul)|([A|a]ug)|([S|s]ep)|([O|o]ct)|([N|n]ov)|([D|d]ec)) ]]; then + [[ ${#ts} -ne 20 ]] && usage +else + [[ ${#ts} -ne 19 ]] && usage +fi + +# 2023/03/09 09:29:02 +# 2023-03-09 09:29:02 +fmt1="^[0-9]{4}[-/][0-9]{2}[-/][0-9]{2}[a-zA-Z@#%^*:]{0,1}[[:space:]]{0,1}[0-9]{2}:[0-9]{2}:[0-9]{2}$" +# 09/03/2023:09:29:02 +fmt2="^[0-9]{2}/[0-9]{2}/[0-9]{4}[a-zA-Z@#%^*:]{0,1}[[:space:]]{0,1}[0-9]{2}:[0-9]{2}:[0-9]{2}$" +# 09/Mar/2023:09:29:02 +fmt3="^[0-9]{2}/(([J|j]an)|([F|f]eb)|([M|m]ar)|([A|a]pr)|([M|m]ay)|([J|j]un)|([J|j]ul)|([A|a]ug)|([S|s]ep)|([O|o]ct)|([N|n]ov)|([D|d]ec))/[0-9]{4}[a-zA-Z@#%^*:]{0,1}[[:space:]]{0,1}[0-9]{2}:[0-9]{2}:[0-9]{2}$" +if [[ $ts =~ $fmt1 || $ts =~ $fmt2 ]]; then + ts=${ts:0:10}' '${ts:11} + dec=`date -d "$ts" +%s` + hex=`echo "obase=16; $dec" | bc` + echo "十进制的时间戳 - $dec" + echo "十六进制时间戳 - 0x$hex - $hex" + echo "十六进制时间戳 - 0x${hex,,} - ${hex,,}" + +elif [[ $ts =~ $fmt3 ]]; then + day=${ts:0:2} + month=${ts:3:3} + left=${ts:7} + [[ $month =~ ^[J|j]an$ ]] && month='01' + [[ $month =~ ^[F|f]eb$ ]] && month='02' + [[ $month =~ ^[M|m]ar$ ]] && month='03' + [[ $month =~ ^[A|a]pr$ ]] && month='04' + [[ $month =~ ^[M|m]ay$ ]] && month='05' + [[ $month =~ ^[J|j]un$ ]] && month='06' + [[ $month =~ ^[J|j]ul$ ]] && month='07' + [[ $month =~ ^[A|a]ug$ ]] && month='08' + [[ $month =~ ^[S|s]ep$ ]] && month='09' + [[ $month =~ ^[O|o]ct$ ]] && month='10' + [[ $month =~ ^[N|n]ov$ ]] && month='11' + [[ $month =~ ^[D|d]ec$ ]] && month='12' + ts=$month'/'$day'/'$left + ts=${ts:0:10}' '${ts:11} + dec=`date -d "$ts" +%s` + hex=`echo "obase=16; $dec" | bc` + echo "十进制的时间 - $dec" + echo "十六进制时间 - 0x${hex} - $hex" + echo "十六进制时间 - 0x${hex,,} - ${hex,,}" +else + echo -e "${c_br}请检查输入的时间符合正常规则,退出...${c_e}" + usage + exit 10 +fi + + + + diff --git a/utool/genpw.sh b/utool/genpw.sh new file mode 100644 index 0000000..b18189a --- /dev/null +++ b/utool/genpw.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +if [[ $# -ne 1 ]]; then + echo -e "\e[1;3;31mNeed ONE and only ONE digital parameter.\e[0m" + exit 1 +fi + +if ! [[ $1 -gt 0 && $1 -lt 80 ]] 2>/dev/null; then + echo -e "\e[1;3;31mNeed ONE DIGITAL parameter, which must be greater than 0 and lower than 80.\e[0m" + exit 2 +fi + +# total 80 bits password +pwgen_0=`pwgen -sync` +pwgen_1=`pwgen -sync` +pwgen_2=`pwgen -sync` +pwgen_3=`pwgen -sync` +pwgen_4=`pwgen -sync` +pwgen_5=`pwgen -sync` +pwgen_6=`pwgen -sync` +pwgen_7=`pwgen -sync` +pwgen_8=`pwgen -sync` +pwgen_9=`pwgen -sync` + +pwgen_80=${pwgen_0}${pwgen_1}${pwgen_2}${pwgen_3}${pwgen_4}${pwgen_5}${pwgen_6}${pwgen_7}${pwgen_8}${pwgen_9} +echo ${pwgen_80:0:${1}} + + diff --git a/utool/ipports.sh b/utool/ipports.sh new file mode 100644 index 0000000..b7c5dfe --- /dev/null +++ b/utool/ipports.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +if [[ $1 == 'port' ]]; then + find /opt/apps/localcr/ -type f -name "*:*:*" | \ + awk -F '/' '{print $NF}' | \ + awk -F':' 'BEGIN{print "Local", "Container", "Application"} {print $1, $2, $3}' | \ + column -t | \ + sort -nk 1 +elif [[ $1 == 'ip' ]]; then + find /opt/apps/localcr/ -type f -name "10.10.0.*" | awk -F '/' '{print $(NF-1), $NF}' | column -t | sort -k2 +else + : +fi diff --git a/utool/number2d.sh b/utool/number2d.sh new file mode 100644 index 0000000..a5364d8 --- /dev/null +++ b/utool/number2d.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +# 自定义颜色显示 +c_br='\e[1;31m' # bold red +c_bg='\e[1;32m' # bold green +c_by='\e[1;33m' # bold yellow +c_bb='\e[1;34m' # bold blue +c_bp='\e[1;35m' # bold purple +c_bc='\e[1;36m' # bold cyan +c_bir='\e[1;3;31m' # * bold italic red +c_big='\e[1;3;32m' # bold italic cyan +c_bib='\e[1;3;34m' # * bold italic cyan +c_bic='\e[1;3;36m' # bold italic cyan +c_e='\e[0m' # reset + + +number=$1 + +if [[ $number =~ ^[0-9]{1,11}$ ]]; then + date -d@$number +'%Y-%m-%d %H:%M:%S' +elif [[ $number =~ ^0[x|X][0-9a-fA-F]{1,9}$ ]]; then + hex=`printf "%d" $number` + date -d@$hex +'%Y-%m-%d %H:%M:%S' +elif [[ $number =~ ^[0-9a-fA-F]{1,9}$ ]]; then + hex=`printf "%d" '0x'$number` + date -d@$hex +'%Y-%m-%d %H:%M:%S' +else + echo -e "${c_br}请输入正确的十进制--11位以内,或者十六进制数字--9位以内:${c_e}" + echo -e " 十进制的时间戳 - 1678523385" + echo -e " 十六进制时间戳 - 0x640c3bf9/0X640C3BF9/0x640C3bf9/640c3bf9" + echo -e "${c_bb}有歧义时,使用'0x'或者'0X'来区分十进制和十六进制${c_e}" + exit 233 +fi + + diff --git a/utool/toolbox.sh b/utool/toolbox.sh new file mode 100644 index 0000000..f5edb46 --- /dev/null +++ b/utool/toolbox.sh @@ -0,0 +1,56 @@ +# You may uncomment the following lines if you want `ls' to be colorized: +export LS_OPTIONS='--color=auto' +eval "`dircolors`" +alias ls='ls $LS_OPTIONS' +alias ll='ls $LS_OPTIONS -ahl' +alias l='ls $LS_OPTIONS -lA' + +PS1='\e[1;32m[ $? \u@\h \W]\$ \e[0m' +alias cls='clear' +alias ll='ls -alh' +alias l='ll' +alias ..='cd ..; ls' +alias ...='cd ../..; ls' +alias cdblog='cd /opt/source-code/blog/_posts' +alias cdscripts='cd /opt/scripts' +alias cdconfigs='cd /opt/configs' +alias cdfrp='cd /opt/source-code/frpc/' +alias cdalist='cd /opt/webdav/alist/CTC-Client/Manford/ && ls' +alias cdone='cd /opt/webdav/onedrive/ && ls' +alias cdwd='cd /opt/webdav/wd/ && ls' +alias cdman='cd /opt/source-code/manford && ls' +alias conf='cd /usr/local/nginx/conf' +alias so='source ~/.bashrc' +alias python='python3' +alias py='python3' +alias vi='vim' +alias g='vim' +alias txl='tmux ls' +alias txn='tmux new -s' +alias txa='tmux at -t' +alias acme.sh=~/.acme.sh/acme.sh +alias tt='/opt/scripts/todo/todo.sh -d /opt/scripts/todo/todo.cfg' +alias tdate='date +%Y-%m-%d' +alias jtddate='date +"%Y-%m-%d %H:%M:%S"' +complete -F _todo tt +alias cdpython='cd /opt/source-code/document/python/ && ls' +export TIMETAGGER_CREDENTIALS='timetagger:$2a$08$s6ZkrdZGmwNADKY3K9X0jOgGWu4XMSVCGs4qbqMTupYRaUM2n4RKq' + +export Ali_Key='LTAI5tMoM6J3Nzoi6JbT9waY' +export Ali_Secret='N89cC4JpxdBL1Hqr8WhefIPRVcKEAs' + +export CF_Key="f0971f82ebc8d6dfffc1a4871759f6b17fd1a" +export CF_Email="xgdfmf@gmail.com" + +export EDITOR='/usr/bin/vim' + +if [ -f /etc/bash_completion ]; then + . /etc/bash_completion +fi +# [[ ! -d /tmp/rclone ]] && mkdir -p /tmp/rclone + +#. /opt/scripts/rclone/rclone_bash_completion.sh +. /opt/scripts/todo/todo_completion + +source /opt/source-code/v2ray-4.34.0/envfile + diff --git a/utool/usage.sh b/utool/usage.sh new file mode 100644 index 0000000..f3399b5 --- /dev/null +++ b/utool/usage.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +let col=`tput cols` +if [[ $col -lt 120 ]]; then + echo -e "\e[1;3;31mYour screen width is too small to show the usage info neatly. So make the display window maximized.\e[0m" + read -p "Press any key to continue..." + echo '' +fi + + +echo -e "\e[1;32mDESCRIPTION:\e[0m" +echo -e "\e[3;32mutool -- a self-defined command line interface, which is used to facilitate operating the system, supports the following options. In the description part, where there is a leading asterisk signifies that this option must take an argument.\e[0m" +echo -e "\e[1;4m \e[0m" +echo -e "\e[37;40m|\e[0m\e[1;4;37;40mOption| Description |Option| Description \e[0m\e[37;40m|\e[0m" +echo -e "\e[37;40m| -a | print all local ports of using for now | -n | |\e[0m" +echo -e "\e[37;40m| -b | generate two-factor key of GITHUB | -o | |\e[0m" +echo -e "\e[37;40m| -c | print all intranet IP using in docker | -p |*generate password of length 1~79 |\e[0m" +echo -e "\e[37;40m| -d | show external IP of this machine | -q | |\e[0m" +echo -e "\e[37;40m| -e |*show IP location | -r | |\e[0m" +echo -e "\e[37;40m| -f |*ganerate md5 value of the input string | -s | |\e[0m" +echo -e "\e[37;40m| -g | | -t | |\e[0m" +echo -e "\e[37;40m| -h | show this help information | -u | |\e[0m" +echo -e "\e[37;40m| -i |*get domain's registration info | -v | |\e[0m" +echo -e "\e[37;40m| -j |*convert a (hexa)decimal to specific time format | -w | |\e[0m" +echo -e "\e[37;40m| -k |*convert multi-formats time to a decimal | -x | |\e[0m" +echo -e "\e[37;40m| -l | | -y | |\e[0m" +echo -e "\e[37;40m|\e[0m\e[4;37;40m -m | | -z | \e[0m\e[37;40m|\e[0m\n" + + diff --git a/utool/utool.py b/utool/utool.py new file mode 100755 index 0000000..5ec7bb0 --- /dev/null +++ b/utool/utool.py @@ -0,0 +1,178 @@ +#!/usr/bin/python3 +# coding: utf-8 +import os +import sys +import time + +c_title = '\033[1;4;31;42m' # title color +c_br = '\033[1;31m' # bold red +c_bg = '\033[1;32m' # bold green +c_by = '\033[1;33m' # bold yellow +c_bb = '\033[1;34m' # bold blue +c_bp = '\033[1;35m' # bold purple +c_bc = '\033[1;36m' # bold cyan +c_bir= '\033[1;3;31m' # * bold italic red +c_bib = '\033[1;3;34m' # * bold italic cyan +c_bic = '\033[1;3;36m' # bold italic cyan +c_e = '\033[0m' # reset + +if len(sys.argv) == 2: + opt_1 = sys.argv[1] + opt_rest = None +elif len(sys.argv) > 2: + opt_1 = sys.argv[1] + opt_rest = sys.argv[2:] +else: + opt_1 = None + opt_rest = None + +opt_index = ['-do_not_use_this', '-a', '-b', '-c', '-d', '-e', '-f', '-g', '-h', '-i', '-j', '-k', '-l', '-m', + '-n', '-o', '-p', '-q', '-r', '-s', '-t', '-u', '-v', '-w', '-x', '-y', '-z'] + +if opt_1 not in opt_index: + os.system('/usr/bin/bash /opt/scripts/utool/usage.sh') + exit(100) + +if opt_1 == '-a': + if opt_rest != None: + print(f"This option({opt_1}) will print all ports of now using for local, and accept NO parameter.") + exit(opt_index.index(opt_1)) + + os.system('/usr/bin/bash /opt/scripts/utool/ipports.sh port') + +elif opt_1 == '-b': + if opt_rest != None: + print(f"This option({opt_1}) will generate a two-factor auth-key for github login, and accept NO parameter.") + exit(opt_index.index(opt_1)) + + os.system("/usr/bin/oathtool -b --totp 'G3NHHFO2L2LZ5W2R'") + +elif opt_1 == '-c': + if opt_rest != None: + print(f"This option({opt_1}) will print all intranet IP using in docker, and accept NO parameter.") + exit(opt_index.index(opt_1)) + + os.system('/usr/bin/bash /opt/scripts/utool/ipports.sh ip') + +elif opt_1 == '-d': + if opt_rest != None: + print(f"This option({opt_1}) will return the current IP of local machine, and accept NO parameter.") + exit(opt_index.index(opt_1)) + + os.system("/usr/bin/python3 /opt/scripts/roll_api/get_self_ip.py") + +elif opt_1 == '-e': + if opt_rest == None: + print(f"This option({opt_1}) will return the IP info, and accept at least one IP parameter.") + exit(opt_index.index(opt_1)) + + for opt_2 in opt_rest: + os.environ['opt_2'] = opt_2 + os.system('/usr/bin/python3 /opt/scripts/roll_api/get_ip.py $opt_2') + +elif opt_1 == '-f': + if len(sys.argv) < 3: + print(f"{c_br}需要至少一个字符串作为输入,退出...{c_e}\n") + exit(opt_index.index(opt_1)) + + param='' + for item in sys.argv[2:]: + param += item + + os.environ['param'] = param + print(f"{c_by}Warning: 输入字符串中间的任何空白符将会被删除,要保留的话,需手动将字符串用单引号括起来{c_e}") + print(f"本次计算MD5值的字符串为 - {param}\n本次计算得到的MD5的值为 - ", end='') + # print(f"本次计算得到的MD5的值为 - ", end='') + os.system('echo -n $param | md5sum | cut -d " " -f 1') + +elif opt_1 == '-g': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-h': + os.system('/usr/bin/bash /opt/scripts/utool/usage.sh') + exit(opt_index.index(opt_1)) +elif opt_1 == '-i': + if opt_rest == None: + print(f"This option({opt_1}) will return the domain registration info, and accept at least one domain parameter.") + exit(opt_index.index(opt_1)) + + for opt_2 in opt_rest: + os.environ['opt_2'] = opt_2 + os.system('/usr/bin/python3 /opt/scripts/roll_api/domain_reg_check.py $opt_2') + +elif opt_1 == '-j': + if len(sys.argv) == 2: + print(f"{c_br}-j选项:将十进制或者十六进制数字作为入参,转换成标准时间格式,只接收第一个参数,其他参数将被丢弃,有歧义时,使用'0x'或者'0X'来区分十进制和十六进制,退出...{c_e}\n") + exit(opt_index.index(opt_1)) + + os.environ['param'] = sys.argv[2] + os.system('/usr/bin/bash /opt/scripts/utool/number2d.sh $param') + +elif opt_1 == '-k': + if len(sys.argv) < 3: + print(f"{c_br}-k选项:将输入的时间转换成十进制和十六进制,需要一个字符串格式的时间作为输入,退出...{c_e}\n") + exit(opt_index.index(opt_1)) + + param='' + for item in sys.argv[2:]: + param += item + ' ' + + os.environ['param'] = param + os.system('/usr/bin/bash /opt/scripts/utool/date2n.sh $param') + +elif opt_1 == '-l': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-m': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-n': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-o': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-p': + if opt_rest == None: + opt_rest = ['',] + + opt_2 = opt_rest[0] + os.environ['opt_2'] = opt_2 + os.system('/usr/bin/bash /opt/scripts/utool/genpw.sh $opt_2') + +elif opt_1 == '-q': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-r': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-s': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-t': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-u': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-v': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-w': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-x': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-y': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +elif opt_1 == '-z': + print(f"This option({opt_1}) is reserved now, nothing is bound on it.") + exit(opt_index.index(opt_1)) +else: + print("This line SHOULD NOT be executed, please check carefully.") + exit(255) + + +