123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274 |
- '''
- Created on 25.06.2018
- @author: Bernhard
- Should read all log*.log-files and output
- -) Server
- -) Computer
- -) Start-Time
- -) End-Time
- -) Duration (according to file)
- -) finished (yes/no)
- -) Mode (recycling or allnew)
- -) Products
- -) Users
- -) Average of all API-Calls (basically multiply each average * number and build an average by dividing this number
- '''
- import codecs
- import csv
- import glob, os
- import datetime
- import getopt
- import sys
- def args_read(l_search_parameter):
- l_args = sys.argv[1:]
- try:
- opts, args = getopt.getopt(l_args,"d:;",["dir=",
- "mode="
- ])
- except getopt.GetoptError as err_det:
- print ("Error in reading parameters:" + str(err_det))
- print ("Call with --dir= or without any parameters")
- print ("--mode=recycle|allnew|nginx to ignore files <> this mode")
- sys.exit("Abgebrochen wegen Fehler in Aufrufparametern")
- if opts:
- for opt, arg in opts:
- if l_search_parameter == opt: #in ("-u", "--usage"):
- return arg
- def f_get_logfiles_from_dir(l_dir):
- l_files = []
-
- os.chdir(l_dir)
- for file in glob.glob("log*.log"):
- l_files.append (file)
-
- return l_files
- class read_csv():
- def __init__(self, csv):
- self.csv = csv
- self.attribs = {}
- self.attribs["file"] = csv.split("\\")[-1]
- self.attribs["start"] = ""
- self.attribs["end"] = ""
- self.attribs["duration"] = ""
- self.attribs["server"] = ""
- self.attribs["computer"] = ""
- self.attribs["users"] = 0
- self.attribs["products"] = 0
- self.attribs["mode"] = ""
- self.attribs["linecount"] = 0
- self.attribs["info"] = 0
- self.attribs["warning"] = 0
- self.attribs["critical"] = 0
- self.attribs["debug"] = 0
- self.attribs["error"] = 0
- # Default values for All-Calls, All-Duration, Absolute Min, Absolute Max)
- self.attribs["API-AVG"] = [0,0,0,9999,0]
- self.attribs["API-Calls"] = []
- # They're used later to create the variable header of all API-Calls, that were executed
- # so that we can print Headers for them
- self.api_detail_headers={}
- def load(self):
- with codecs.open(filename=self.csv, mode='r', encoding="utf-8", errors="ignore") as l_csvfile:
- l_reader = csv.reader(l_csvfile)
- for l_row in l_reader:
- # Start the interpretation of the file
- l_string = ""
- for n in range(0, len(l_row)):
- l_string = l_string + ":" + l_row[n]
- # Set Start time
- l_string = l_string[1:]
-
- if l_string[0:4] != "2018":
- continue
-
- if len(self.attribs["start"]) == 0:
- self.attribs["start"] = l_string[0:23]
- # Any record can be the last :)
- self.attribs["end"] = l_string[0:23]
- self.attribs["linecount"] += 1
-
- l_string = l_string[24:]
-
- # Now determine message-type (Info, warn, etc.)
- l_msgty = l_string.split(" ")[0]
- if len(l_msgty) > 0:
- l_msgty = l_msgty.lower()
- self.attribs[l_msgty] += 1
-
- # Find "Products"
- if "f_products_import: Products:" in l_string:
- self.attribs["products"] = int(l_string.split(" ")[-1])
-
- # Server:
- if "set_default: Server:" in l_string:
- self.attribs["server"] = l_string.split(" ")[-1]
- elif "Server: " in l_string:
- self.attribs["server"] = l_string.split(" ")[-1]
-
- if "get_l_users: Users: " in l_string:
- self.attribs["users"] = int(l_string.split(" ")[-1])
-
- if "f_daily_recycle: Mode Recycling!" in l_string:
- self.attribs["mode"] = "recycling"
- if "f_nginx: Starting load test on nginx" in l_string:
- self.attribs["mode"] = "nginx"
-
- if "printreport: Computer:" in l_string:
- self.attribs["computer"] = l_string.split(" ")[-1]
-
- if "printreport" in l_string:
- self.__printreport(l_string)
-
- if len(self.attribs["end"]) > 13 and len(self.attribs["start"]) > 13:
- self.attribs["duration"] = ( datetime.datetime.strptime(self.attribs["end"], '%Y-%m-%d %H:%M:%S:%f')
- - datetime.datetime.strptime(self.attribs["start"], '%Y-%m-%d %H:%M:%S:%f') )
-
- if self.attribs["mode"] == "":
- self.attribs["mode"] = "allnew"
-
- def report(self):
- for n, x in self.attribs.items():
- print (n + ":" + str(x))
-
- def __printreport(self, l_string):
- l_line = l_string.split("printreport:")[-1]
- if "# Avg" in l_line:
- return
- if "-------" in l_line:
- return
- if "Server" in l_line:
- return
- if "Computer" in l_line:
- return
- if "zzmake" in l_line:
- return
- if len(l_line) < 20:
- return
- l_array = l_line.split("|")
- l_array[0] = l_array[0].lstrip().rstrip()
- l_api = l_array[0].split(" ")[0]
- l_count = int(l_array[0].split(" ")[-1])
- # Now the AVG, Sum, Min, Max, Last
- l_avg = float(l_array[1])
- l_total = float(l_array[2])
- l_min = float(l_array[3])
- l_max = float(l_array[4])
- l_last = float(l_array[5])
- self.attribs["API-Calls"].append([l_api, l_count, l_avg, l_total, l_min, l_max, l_last])
- self.api_detail_headers[l_api] = ""
- self.attribs["API-AVG"][0] = self.attribs["API-AVG"][0] + l_count
- self.attribs["API-AVG"][1] = round(self.attribs["API-AVG"][1] + ( l_count * l_avg ),2)
- self.attribs["API-AVG"][2] = round(self.attribs["API-AVG"][1] / self.attribs["API-AVG"][0],4)
- if l_min < self.attribs["API-AVG"][3]:
- self.attribs["API-AVG"][3] = l_min
- if l_max > self.attribs["API-AVG"][4]:
- self.attribs["API-AVG"][4] = l_max
- def get_all_attribs(self):
- return self.attribs
- def get_all_api_names(self):
- return self.api_detail_headers
- def l_export_csvs(l_csvs, l_export_filename):
- with codecs.open(filename=l_export_filename, mode='wb', encoding='utf-8-sig') as l_csv:
- # write header
- l_csv_writer = csv.writer(l_csv, delimiter=";", quoting = csv.QUOTE_ALL)
- l_header = []
- l_api_header = {}
- # read all files to get attribs for header:
- for l_csv_item in l_csvs.values():
- # If this is not one of the file-modes we're searching for, then ignore also the header
- if args_read(l_search_parameter="--mode"):
- l_temp = l_csv_item.get_all_attribs()
- if args_read(l_search_parameter="--mode") != l_temp["mode"]:
- continue
- for key in l_csv_item.get_all_api_names().keys():
- l_api_header[key] = ""
- # Write header to CSV
- for l_key in l_csv_item.get_all_attribs().keys():
- if l_key == "API-AVG":
- l_header.append("Total Calls")
- l_header.append("Total Duration")
- l_header.append("Total Average")
- l_header.append("Total Min")
- l_header.append("Total Max")
- elif l_key == "API-Calls":
- # That's the last column - enter here all our collected Headers:
- for h_key in l_api_header.keys():
- # append to Header-Line and save position within the header-array, so that we can
- # later find the position for a specific API-Name and write into this column and the next ones
- l_header.append(h_key+" #")
- l_api_header[h_key] = len(l_header)-1
- l_header.append(h_key + " Avg")
- l_header.append(h_key + " Tot")
- else:
- l_header.append(l_key)
- l_csv_writer.writerow(l_header)
-
- # Write all concatenated infos to CSV
- for l_csv_in in l_csvs.values():
- # If this is not one of the file-modes we're searching for, then ignore also the header
- if args_read(l_search_parameter="--mode"):
- l_temp = l_csv_in.get_all_attribs()
- if args_read(l_search_parameter="--mode") != l_temp["mode"]:
- continue
- l_row_out = []
- l_pos = 0
- for l_row_key, l_row_item in l_csv_in.get_all_attribs().items():
- if l_row_key == "API-AVG":
- for n in range(0, 5):
- l_row_out.append(str(l_row_item[n]).replace(".", ","))
- l_pos += 1
- elif l_row_key == "API-Calls":
- if len(l_row_item) > 0:
- # We need to find our destination column (hopefully in dictionary)
- # find out, now many columns we must skip and place the value there
- for l_api_row in l_row_item:
- for n in range(0,l_api_header[l_row_item[0][0]] - l_pos):
- l_row_out.append(";")
- l_row_out.append(l_api_row[1]) # Number of calls
- l_row_out.append(str(l_api_row[2]).replace(".", ",")) # AVG / call
- l_row_out.append(str(l_api_row[3]).replace(".", ",")) # Total duration
- l_pos+=3
- else:
- l_row_out.append(l_row_item)
- l_pos+=1
- l_csv_writer.writerow(l_row_out)
-
-
- l_dir = args_read("--dir")
- if not l_dir:
- l_dir = "C:\\Users\\Bernhard\\cs_earthsquad\\CloudStation\\pwa\\Testcases\\API\\ES_API_TEST\\pyAPITest\\dist\\log\\"
- else:
- l_dir = l_dir + "\\"
- l_csvs = {}
-
- for l_logfile in f_get_logfiles_from_dir(l_dir):
- print("File " + l_logfile)
- l_test_csv = read_csv(l_dir + l_logfile)
- l_test_csv.load()
- #l_test_csv.report()
- l_csvs[l_logfile] = l_test_csv
-
- l_export_csvs(l_csvs, "log_summary.csv")
- print("----")
- print("Log_summary.csv created")
|