• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3"""
4Copyright (c) 2023 Huawei Device Co., Ltd.
5Licensed under the Apache License, Version 2.0 (the "License");
6you may not use this file except in compliance with the License.
7You may obtain a copy of the License at
8
9http://www.apache.org/licenses/LICENSE-2.0
10
11Unless required by applicable law or agreed to in writing, software
12distributed under the License is distributed on an "AS IS" BASIS,
13WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14See the License for the specific language governing permissions and
15limitations under the License.
16
17Description: Use ark to execute workload test suite
18"""
19
20import argparse
21import datetime
22import logging
23import os
24import sys
25import xml.etree.ElementTree as ET
26from collections import namedtuple
27from openpyxl import Workbook, load_workbook
28from openpyxl.styles import PatternFill, Alignment
29
30
31class Constants:
32    DETERIORATION_BOUNDARY_VALUE = 0.05
33    RET_OK = 0
34    RET_ERROR = 1
35    logger = None
36
37
38def get_logger(logger_name, log_file_path, level=logging.INFO):
39    formatter = logging.Formatter(fmt='[%(asctime)s]  [%(levelname)s]   %(message)s',
40                                  datefmt='%Y-%b-%d  %H:%M:%S')
41
42    fh = logging.FileHandler(encoding='utf-8', mode='a', filename=log_file_path)
43    fh.setFormatter(formatter)
44    fh.setLevel(logging.DEBUG)
45    ch = logging.StreamHandler()
46    ch.setFormatter(formatter)
47    ch.setLevel(logging.DEBUG)
48    log = logging.getLogger(logger_name)
49    log.addHandler(fh)
50    log.addHandler(ch)
51    log.setLevel(level)
52
53    return log
54
55
56def get_today_test_result(xml_folder_path):
57    xml_list = []
58    test_retult_list = []
59    for root, _, files in os.walk(xml_folder_path):
60        for file in files:
61            file_path = os.path.join(root, file)
62            xml_list.append(file_path)
63
64    sorted_xml_list = sorted(xml_list)
65    CaseTestDataType = namedtuple('test', ['result', 'exec_time'])
66    for _, xml_file in enumerate(sorted_xml_list):
67        test_result = {}
68        tree = ET.parse(xml_file)
69        root = tree.getroot()
70        for testsuite in root.findall('testsuite'):
71            if testsuite.get('name') != "JSNApiSplTest":
72                continue
73            # ergodic testcase
74            for testcase in testsuite.findall('testcase'):
75                time_str = testcase.get('time')
76                time = float(time_str.strip())
77                case_test_data = CaseTestDataType(testcase.get('result'), time)
78                test_result[testcase.get('name')] = case_test_data
79            test_retult_list.append(test_result)
80
81    return test_retult_list
82
83
84def get_yestoday_test_data(yestoday_daily_report):
85    test_data = {}
86    try:
87        wb = load_workbook(yestoday_daily_report)
88        ws = wb.worksheets[0]
89    except FileNotFoundError:
90        return test_data
91    CaseTestDataType = namedtuple('test', ['result', 'exec_time'])
92    for row_num in range(2, ws.max_row + 1):
93        js_case_name_tmp = str(ws.cell(row=row_num, column=1).value)
94        excu_status = str(ws.cell(row=row_num, column=2).value)
95        excu_time = str(ws.cell(row=row_num, column=8).value)
96        case_data = CaseTestDataType(excu_status, excu_time)
97        test_data[js_case_name_tmp] = case_data
98
99    return test_data
100
101
102def get_yesday_average_time(yestoday_data, workload_case_name):
103    yesday_average_cost_time = "0"
104    if len(yestoday_data) > 0:
105        for key in yestoday_data:
106            if workload_case_name in key:
107                yesday_average_cost_time = str(yestoday_data[workload_case_name].exec_time)
108                break
109
110    return float(yesday_average_cost_time)
111
112
113def generate_daily_report(daily_report_file, today_data, yestoday_data):
114    '''
115        report_file, daily
116        today_data, list. element is dictionary. include five cost times info.value of dict includes the info of
117            excute status and exute_time
118        yestoday_data: dictionary. values of dict are yesterday workload use cases excuting average cost times
119    '''
120    Constants.logger.info("begin to generate report.")
121    if len(today_data) == 0:
122        Constants.logger.error("no today testing data, please check it!.")
123        return Constants.RET_ERROR
124    wb = load_workbook(daily_report_file)
125    ws = wb.worksheets[0]
126    for workload_case_name in today_data[0].keys():
127        notes = ' '
128        first_cost_time = today_data[0][workload_case_name].exec_time
129        if today_data[0][workload_case_name].result == 'completed':
130            excute_status = 'pass'
131        else:
132            excute_status = today_data[0][workload_case_name].result
133        second_cost_time = today_data[1][workload_case_name].exec_time
134        third_cost_time = today_data[2][workload_case_name].exec_time
135        fourth_cost_time = today_data[3][workload_case_name].exec_time
136        fifth_cost_time = today_data[4][workload_case_name].exec_time
137        time_list = [first_cost_time, second_cost_time, third_cost_time, third_cost_time, fourth_cost_time,
138                     fourth_cost_time, fifth_cost_time]
139        time_list_len = len(time_list)
140        if time_list_len == 0:
141            today_average_cost_time = 0
142        else:
143            today_average_cost_time = sum(time_list) / time_list_len
144        yesday_average_cost_time = get_yesday_average_time(yestoday_data, workload_case_name)
145
146        try:
147            tmp = today_average_cost_time / yesday_average_cost_time
148            if tmp >= (1.0 + Constants.DETERIORATION_BOUNDARY_VALUE):
149                is_degraded_str = str(True)
150            else:
151                is_degraded_str = str(False)
152        except ZeroDivisionError:
153            is_degraded_str = 'NA'
154
155        new_row = [workload_case_name, excute_status, first_cost_time, second_cost_time, third_cost_time,
156                   fourth_cost_time, fifth_cost_time, today_average_cost_time, yesday_average_cost_time,
157                   is_degraded_str, notes]
158
159        ws.append(new_row)
160        if is_degraded_str == str(True):
161            ws.cell(row=ws.max_row, column=10).fill = PatternFill(start_color='FF0000', end_color='FF0000',
162                                                                  fill_type='solid')
163    wb.save(daily_report_file)
164    Constants.logger.info("generate report successfully with no summary infomation.")
165    return Constants.RET_OK
166
167
168def get_degraded_num(daily_report_file):
169    wb = load_workbook(daily_report_file)
170    ws = wb.worksheets[0]
171    degraded_num = 0
172    for row_num in range(2, ws.max_row + 1):
173        is_degraded = str(ws.cell(row=row_num, column=10).value)
174        if is_degraded == str(True):
175            degraded_num += 1
176    return degraded_num
177
178
179def get_summary_info(xml_folder_path, daily_report_file):
180    local_summary_data_dict = {}
181    xml_list = []
182    for root, _, files in os.walk(xml_folder_path):
183        for file in files:
184            file_ext = os.path.splitext(file)[1]
185            if file_ext == '.xml':
186                file_path = os.path.join(root, file)
187                xml_list.append(file_path)
188
189    sorted_xml_list = sorted(xml_list)
190
191    # calculate average excute cost time
192    excute_time_list = []
193    for _, xml in enumerate(sorted_xml_list):
194        tmp_tree = ET.parse(xml)
195        tmp_root = tmp_tree.getroot()
196        for testsuite in tmp_root.findall('testsuite'):
197            if testsuite.get('name') != "JSNApiSplTest":
198                continue
199            time_str = testsuite.get('time')
200            time = float(time_str.strip())
201            excute_time_list.append(time)
202            break
203    num = len(excute_time_list)
204    if num == 0:
205        today_average_excute_time = 0
206    else:
207        today_average_excute_time = sum(excute_time_list) / num
208    tree = ET.parse(sorted_xml_list[0])
209    root = tree.getroot()
210    for testsuite in root.findall('testsuite'):
211        if testsuite.get('name') != "JSNApiSplTest":
212            continue
213
214        total_count = int(testsuite.get('tests')) + int(testsuite.get('failures')) + int(testsuite.get('disabled')) + \
215            int(testsuite.get('skipped')) + int(testsuite.get('errors'))
216        local_summary_data_dict['total_count'] = total_count
217        local_summary_data_dict['pass_count'] = testsuite.get('tests')
218        local_summary_data_dict['failures_count'] = testsuite.get('failures')
219        local_summary_data_dict['disabled_count'] = testsuite.get('disabled')
220        local_summary_data_dict['skipped_count'] = testsuite.get('skipped')
221        local_summary_data_dict['errors_count'] = testsuite.get('errors')
222
223        local_summary_data_dict['date'] = testsuite.get('timestamp')
224        break
225
226    local_summary_data_dict['degraded_num'] = get_degraded_num(daily_report_file)
227    local_summary_data_dict["today_average_excute_time"] = today_average_excute_time
228    local_summary_data_dict["degraded percentage upper limit"] = Constants.DETERIORATION_BOUNDARY_VALUE
229    return local_summary_data_dict
230
231
232def init_percentage_varibales_1(summary_data):
233    fixed_str = "0%"
234    percentage = fixed_str
235    percentage1 = fixed_str
236    percentage2 = fixed_str
237    total_num = int(summary_data['total_count'])
238    if total_num > 0:
239        percentage = str(round((int(summary_data["pass_count"]) / total_num) * 100, 2)) + '%'
240        percentage1 = str(round((int(summary_data["failures_count"]) / total_num) * 100, 2)) + '%'
241        percentage2 = str(round((int(summary_data["disabled_count"]) / total_num) * 100, 2)) + '%'
242
243    return percentage, percentage1, percentage2
244
245
246def init_percentage_varibales_2(summary_data):
247    fixed_str = "0%"
248    percentage3 = fixed_str
249    percentage4 = fixed_str
250    percentage5 = fixed_str
251
252    total_num = int(summary_data['total_count'])
253    if total_num > 0:
254        percentage3 = str(round((int(summary_data["skipped_count"]) / total_num) * 100, 2)) + '%'
255        percentage4 = str(round((int(summary_data["errors_count"]) / total_num) * 100, 2)) + '%'
256        percentage5 = str(round((int(summary_data["degraded_num"]) / total_num) * 100, 2)) + '%'
257    return percentage3, percentage4, percentage5
258
259
260def append_summary_info(daily_report_file, summary_data):
261    """
262        summary info:
263            totle count:        percentage:
264            pass count:         percentage:
265            failures count:     percentage:
266            disabled count:     percentage:
267            skipped count:      percentage:
268            errors count:       percentage:
269            degraded count:     percentage:
270            today average excute time(s):
271            degraded percentage upper limit:
272            date:
273    """
274    Constants.logger.info("begin to append summary infomation to  today report.")
275    wb = load_workbook(daily_report_file)
276    ws = wb.worksheets[0]
277    percentage_str = 'percentage:'
278    # append 3 blank
279    blank_num = 3
280    for _ in range(blank_num):
281        new_row = [' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
282        ws.append(new_row)
283
284    new_row = ['summary info:', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
285    ws.append(new_row)
286
287    total_num = int(summary_data['total_count'])
288    percentage, percentage1, percentage2 = init_percentage_varibales_1(summary_data)
289    percentage3, percentage4, percentage5 = init_percentage_varibales_2(summary_data)
290    new_row = ['totle count:', total_num, ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
291    ws.append(new_row)
292
293    new_row = ['pass count:', summary_data["pass_count"], 'percentage:', percentage, ' ', ' ', ' ',
294               ' ', ' ', ' ', ' ']
295    ws.append(new_row)
296
297    new_row = ['failures count:', summary_data["failures_count"], percentage_str, percentage1,
298               ' ', ' ', ' ', ' ', ' ', ' ', ' ']
299    ws.append(new_row)
300
301    new_row = ['disabled count:', summary_data["disabled_count"], percentage_str, percentage2,
302               ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
303    ws.append(new_row)
304
305    new_row = ['skipped count:', summary_data["skipped_count"], percentage_str, percentage3, ' ',
306               ' ', ' ', ' ', ' ', ' ', ' ']
307    ws.append(new_row)
308
309    new_row = ['errors count:', summary_data["errors_count"], percentage_str, percentage4, ' ', ' ',
310               ' ', ' ', ' ', ' ', ' ']
311    ws.append(new_row)
312
313    new_row = ['Degraded count:', int(summary_data["degraded_num"]), percentage_str,
314               percentage5, ' ', ' ', ' ', ' ', ' ', ' ', ' ']
315    ws.append(new_row)
316
317    new_row = ['Today average excute time(s):', float(summary_data["today_average_excute_time"]), ' ', ' ', ' ', ' ',
318               ' ', ' ', ' ', ' ', ' ']
319    ws.append(new_row)
320
321    new_row = ['Degraded percentage upper limit:', float(summary_data["degraded percentage upper limit"]), ' ', ' ',
322               ' ', ' ', ' ', ' ', ' ', ' ', ' ']
323    ws.append(new_row)
324
325    new_row = ['Date:', summary_data["date"], ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
326    ws.append(new_row)
327
328    for i in range(2, ws.max_row + 1):
329        align = Alignment(horizontal='left', vertical='center', wrap_text=True, indent=0, text_rotation=0)
330        ws.cell(row=i, column=2).alignment = align
331
332    wb.save(daily_report_file)
333
334
335def get_args():
336    parser = argparse.ArgumentParser()
337    parser.add_argument(
338        "--work_path",
339        "-w",
340        required=True,
341        help="work folder",
342    )
343
344    parser.add_argument(
345        "--rerport_folder_path",
346        "-o",
347        default=None,
348        required=True,
349        help="report save path, default current folder",
350    )
351    args = parser.parse_args()
352
353    if not os.path.exists(args.work_path):
354        Constants.logger.error("parameter --work is not exit. Please check it! work path: %s", args.work_path)
355        raise RuntimeError(f"error bad  parameters  --work. work: {args.work_path}")
356
357    if not os.path.isdir(args.work_path):
358        Constants.logger.error("parameter --work is not folder. Please check it! work path: %s", args.work_path)
359        raise RuntimeError(f"error bad  parameters  --work. work: {args.work_path}")
360
361    return args
362
363
364def init_report(daily_report_file):
365    Constants.logger.info("begin to initialize today report")
366    try:
367        wb = load_workbook(daily_report_file)
368        ws = wb.worksheets[0]
369
370    except FileNotFoundError:
371        headers_row = ['用例名称', '执行状态', '用例执行耗时-1(s)', '用例执行耗时-2(s)', '用例执行耗时-3(s)',
372                       '用例执行耗时-4(s)', '用例执行耗时-5(s)', '当日用例平均耗时(s)', '昨日用例平均耗时(s)',
373                       '是否劣化', '备注']
374        wb = Workbook()
375        ws = wb.active
376
377        ws.column_dimensions['A'].width = 35.0
378        ws.column_dimensions['B'].width = 12.0
379        ws.column_dimensions['C'].width = 22.0
380        ws.column_dimensions['D'].width = 22.0
381        ws.column_dimensions['E'].width = 22.0
382        ws.column_dimensions['F'].width = 22.0
383        ws.column_dimensions['G'].width = 22.0
384        ws.column_dimensions['H'].width = 20.0
385        ws.column_dimensions['I'].width = 20.0
386        ws.column_dimensions['J'].width = 20.0
387        ws.column_dimensions['K'].width = 20.0
388        ws.append(headers_row)
389        ws.freeze_panes = 'A2'
390        wb.save(daily_report_file)
391    Constants.logger.info("initialize today report successfully")
392
393
394def append_date_label(target_str, date_input):
395    formatted_date = date_input.strftime('%Y%m%d')
396    new_str = target_str + "_{}".format(formatted_date)
397
398    return new_str
399
400
401def get_given_date_report_name(date_input):
402    report_name_head = "ffi_workload_daily_report"
403    report_name_head = append_date_label(report_name_head, date_input)
404    return report_name_head + ".xlsx"
405
406
407def get_given_date_report_path(rerport_folder_path, date_input):
408    report_file_name = get_given_date_report_name(date_input)
409    report_file_path = os.path.join(rerport_folder_path, report_file_name)
410    return report_file_path
411
412
413if __name__ == "__main__":
414    """
415        command format: python3  get_ffi_workload_report.py  -i work_path -o report_folder_path
416    """
417    paras = get_args()
418    work_path = paras.work_path
419    log_path = os.path.join(work_path, "test.log")
420    if os.path.exists(log_path):
421        os.remove(log_path)
422
423    Constants.logger = get_logger("workloadtest", log_path)
424    Constants.logger.info("begin to get ffi workoad report.")
425    Constants.logger.info("execute arguments: %s", paras)
426    xml_path = os.path.join(work_path, "xmls")
427    if not os.path.exists(xml_path) or not os.path.isdir(xml_path):
428        Constants.logger.error("bad work parameter--work, please check it! work: %s", xml_path)
429        sys.exit()
430    today = datetime.date.today()
431    yesterday = today - datetime.timedelta(days=1)
432    today_daily_report_path = get_given_date_report_path(paras.rerport_folder_path, today)
433    yesterday_daily_report_path = get_given_date_report_path(paras.rerport_folder_path, yesterday)
434
435    if os.path.exists(today_daily_report_path):
436        os.remove(today_daily_report_path)
437
438    init_report(today_daily_report_path)
439    today_test_data = get_today_test_result(xml_path)
440    yestoday_test_data = get_yestoday_test_data(yesterday_daily_report_path)
441    ret = generate_daily_report(today_daily_report_path, today_test_data, yestoday_test_data)
442    if ret == Constants.RET_OK:
443        summary_data_dict = get_summary_info(paras.work_path, today_daily_report_path)
444        append_summary_info(today_daily_report_path, summary_data_dict)
445        Constants.logger.info("run worklaod performance use cases finished. Please check details in report.")
446    else:
447        Constants.logger.info("run worklaod performance use cases failed. Please check details in log.")
448