sysom1/sysom_server/sysom_diagnosis/service_scripts/loadtask_post

233 lines
8.1 KiB
Python
Executable File

#!/usr/bin/python3
# coding=utf-8
import collections
from functools import reduce
import sys
import os.path
import re
import json
import operator
def exectue_cmd(command):
command = command.replace("\n", "")
command_fd = os.popen(command, "r")
ret = command_fd.read()
command_fd.close()
return ret
def stack(name, file):
fd = open(file, "r")
satck_str = fd.read().split("-----")
for i, ek in enumerate(satck_str):
if int(i) is not 0:
if name in satck_str[i]:
result = ek
fd.close()
return result
fd.close()
return "NULL"
def cpuflamegraph(file, index):
fd = open(file, "r")
graph = fd.read().split(
"####################################################################################")
if len(graph) > index:
return graph[index]
else:
return "FlameGraph failed!"
fd.close()
def parse_log(file):
postprocess_result = {
"code": 0,
"err_msg": "",
"result": {}
}
parse_data = collections.OrderedDict()
new_parse_data = collections.OrderedDict()
reason = collections.OrderedDict()
tasks = {}
count = {}
task_stack = {}
d_task_list = []
r_task_list = []
cpuflamegraph_str = ""
cpuflamegraph_folded_str = ""
fd = open(file, "r")
for line in fd.readlines():
if "global_cpuflamegraph" in line:
cpuflamegraph_str = cpuflamegraph(file, 2)
continue
if "folded" in line:
cpuflamegraph_folded_str = cpuflamegraph(file, 3)
continue
if "load reason" in line:
if "sys" in line:
reason["sys"] = "true"
else:
reason["sys"] = "false"
if "irq" in line:
reason["irq"] = "true"
else:
reason["irq"] = "false"
if "softirq" in line:
reason["softirq"] = "true"
else:
reason["softirq"] = "false"
if "io" in line:
reason["io"] = "true"
else:
reason["io"] = "false"
continue
if "load_proc" in line:
reason["loadavg"] = line.split()[1]
if "Task_Name" in line:
task_name = re.search(r'Task_Name:\s*(\S+)', line, re.I)
if task_name:
if task_name.group(1) in tasks:
tasks[task_name.group(1)] = tasks[task_name.group(1)] + 1
else:
tasks[task_name.group(1)] = 1
if "[<0>]" in stack(task_name.group(1), file):
task_stack[task_name.group(1)] = stack(
task_name.group(1), file)
continue
if "Time" in line:
time = re.search(r'Time: (.+)', line, re.I)
if time:
time_str = time.group(1)
continue
if "uninterrupt_cnt" in line:
count["uninterrupt tasks"] = line.split()[1]
continue
if "running_cnt" in line:
count["runnig tasks"] = line.split()[1]
continue
fd.close()
task_sort = sorted(tasks.items(), key=lambda kv: kv[1], reverse=True)
stack_sort = sorted(task_stack.items(), key=lambda kv: kv[1], reverse=True)
for i, el in enumerate(task_sort):
is_dtask = False
for j, ek in enumerate(stack_sort):
if el[0] == ek[0]:
d_task = {}
d_task["task"] = el[0]
d_task["weight"] = el[1]
d_task["stack"] = reduce(
operator.add, ek[1].split("[<0>] ")[1:-1], ' ')
d_task_list.append(d_task)
is_dtask = True
if not is_dtask:
r_task = {}
r_task["task"] = el[0]
r_task["weight"] = el[1]
r_task_list.append(r_task)
parse_data["result"] = reason
parse_data["task_count"] = count
parse_data["uninterrupt load"] = d_task_list
parse_data["running load"] = r_task_list
parse_data["flamegraph"] = cpuflamegraph_str
parse_data["flamegraph_folded"] = cpuflamegraph_folded_str
new_parse_data["dataresult"] = {"data": {}}
new_parse_data["dataresult"]["data"] = [
{"key": "System LoadAvg", "value": reason["loadavg"]},
{"key": "Sys Influences", "value": reason["sys"]},
{"key": "Hardirq Influences", "value": reason["irq"]},
{"key": "Softirq Influences", "value": reason["softirq"]},
{"key": "IO Influences", "value": reason["io"]}
]
sys_summary = "NULL"
hardirq_summary = "NULL"
softirq_summary = "NULL"
io_summary = "NULL"
new_parse_data["summary"] = {}
new_parse_data["summary"]["suggestion"] = "N/A"
if reason["sys"] == "false":
sys_summary = " is normal;"
else:
sys_summary = " is abnormal;"
new_parse_data["summary"]["suggestion"] = "sys utils is high,use nosched tool to deep diagnosis;"
if reason["irq"] == "false":
hardirq_summary = " is normal;"
else:
hardirq_summary = " is abnormal;"
new_parse_data["summary"]["suggestion"] = "irq utils is high,use irqoff tool to deep diagnosis;"
if reason["softirq"] == "false":
softirq_summary = " is normal;"
else:
softirq_summary = " is abnormal;"
new_parse_data["summary"]["suggestion"] = "softirq utils is high,use softirq tool to deep diagnosis;"
if reason["io"] == "false":
io_summary = " is normal;"
else:
io_summary = " is abnormal;"
new_parse_data["summary"]["suggestion"] = "io utils is high,use iosdiag tool to deep diagnosis;"
if int(float(count["uninterrupt tasks"])) >= 5:
new_parse_data["summary"]["suggestion"] = "D task counts is more than 5,please analyse calltrace of tasks by D task pie chart"
if reason["sys"] == "false" and reason["irq"] == "false" and reason["softirq"] == "false" and reason["io"] == "false":
new_parse_data["summary"]["status"] = "normal"
else:
new_parse_data["summary"]["status"] = "warning"
new_parse_data["summary"]["cause"] = "Load Influences Result:"+"sys utils" + sys_summary + \
"hardirq" + hardirq_summary + "softirq" + softirq_summary + "io" + io_summary
new_parse_data["datataskcount"] = {"data": []}
if "uninterrupt tasks" in count.keys() and "runnig tasks" in count.keys():
new_parse_data["datataskcount"]["data"] = [
{"key": "uninterrupt_tasks", "value": int(float(count["uninterrupt tasks"]))},
{"key": "runnig_tasks", "value": int(float(count["runnig tasks"]))},
]
else:
new_parse_data["datataskcount"]["data"] = [
{"key": "uninterrupt_tasks", "value": 0},
{"key": "runnig_tasks", "value": 0},
]
new_parse_data["datataskcount"]["data"]
new_parse_data["datauninterruptload"] = {"data": []}
for i in range(len(d_task_list)):
tmp_d_task = {}
tmp_d_task["key"] = d_task_list[i]["task"]
tmp_d_task["value"] = d_task_list[i]["weight"]
tmp_d_task["stack"] = d_task_list[i]["stack"]
new_parse_data["datauninterruptload"]["data"].append(tmp_d_task)
new_parse_data["datarunningload"] = {"data": []}
for i in range(len(r_task_list)):
tmp_r_task = {}
tmp_r_task["key"] = r_task_list[i]["task"]
tmp_r_task["value"] = r_task_list[i]["weight"]
new_parse_data["datarunningload"]["data"].append(tmp_r_task)
new_parse_data["dataflamegraph"] = {"data": []}
new_parse_data["dataflamegraph"]["data"] = [
{"key": 0, "value": cpuflamegraph_str}]
new_parse_data["dataflamegraph_folded_str"] = {
"data": [{"key": 0, "value": cpuflamegraph_folded_str}]
}
postprocess_result["result"] = new_parse_data
out = json.dumps(postprocess_result, indent=4)
#data = {"catalogue": parse_data["Global lamegraph"]}
#files = {"file": open(parse_data["Global lamegraph"], 'rb')}
#requests.post("http://127.0.0.1:8001/api/v1/host/upload_file/", data=data, files=files)
print(out)
if __name__ == '__main__':
parse_log(sys.argv[1])