2424import argparse
2525import configparser
2626import binascii
27-
27+ from enum import Enum
2828from lib .logger import logger_init
2929from lib import helper
3030
5656pipManager = None
5757
5858
59+ class Result (Enum ):
60+ Testcount = "Total"
61+ Pass = "pass"
62+ Cancel = "cancel"
63+ Error = "errors"
64+ Failures = "failures"
65+ Skip = "skip"
66+ Warn = "warn"
67+ Interrupt = "interrupt"
68+
69+ class Testsuite_status (Enum ):
70+ Total = "Total"
71+ Run = "Run"
72+ Not_Run = "Not_Run"
73+ Cant_Run = "Cant_Run"
74+
75+ count_result = { _ .value : 0 for _ in Result }
76+ count_testsuites_status = { _ .value : 0 for _ in Testsuite_status }
77+
5978class TestSuite ():
6079 """
6180 Class for Testsuite
@@ -75,7 +94,7 @@ def __init__(self, name, resultdir, vt_type, test=None, mux=None, args=None,
7594 self .test = test
7695 self .mux = mux
7796 self .args = args
78- self .run = " Not_Run"
97+ self .run = Testsuite_status . Not_Run . value
7998 self .runsummary = None
8099 self .runlink = None
81100 if use_test_dir :
@@ -382,12 +401,14 @@ def run_test(testsuite, avocado_bin, nrunner):
382401 status = os .system (cmd )
383402 status = int (bin (int (status ))[2 :].zfill (16 )[:- 8 ], 2 )
384403 if status >= 2 :
385- testsuite .runstatus ("Not_Run" , "Command execution failed" )
404+ testsuite .runstatus (Testsuite_status .Not_Run .value , "Command execution failed" )
405+ count_testsuites_status [Testsuite_status .Not_Run .value ] += 1
386406 return
387407 except Exception as error :
388408 logger .error ("Running testsuite %s failed with error\n %s" ,
389409 testsuite .name , error )
390- testsuite .runstatus ("Not_Run" , "Command execution failed" )
410+ testsuite .runstatus (Testsuite_status .Not_Run .value , "Command execution failed" )
411+ count_testsuites_status [Testsuite_status .Not_Run .value ] += 1
391412 return
392413 logger .info ('' )
393414 result_link = testsuite .jobdir ()
@@ -396,13 +417,17 @@ def run_test(testsuite, avocado_bin, nrunner):
396417 result_link += "/job.log\n "
397418 with open (result_json , encoding = "utf-8" ) as filep :
398419 result_state = json .load (filep )
399- for state in [ 'pass' , 'cancel' , 'errors' , 'failures' , 'skip' , 'warn' , 'interrupt' ] :
420+ for state in count_result :
400421 if state in result_state .keys ():
422+ count_result [Result .Testcount .value ] += int (result_state [state ])
423+ count_result [state ] += int (result_state [state ])
401424 result_link += "| %s %s |" % (state .upper (),
402425 str (result_state [state ]))
403- testsuite .runstatus ("Run" , "Successfully executed" , result_link )
426+ testsuite .runstatus (Testsuite_status .Run .value , "Successfully executed" , result_link )
427+ count_testsuites_status [Testsuite_status .Run .value ] += 1
404428 else :
405- testsuite .runstatus ("Not_Run" , "Unable to find job log file" )
429+ testsuite .runstatus (Testsuite_status .Not_Run .value , "Unable to find job log file" )
430+ count_testsuites_status [Testsuite_status .Not_Run .value ] += 1
406431 return
407432
408433
@@ -726,8 +751,10 @@ def parse_test_config(test_config_file, avocado_bin, enable_kvm):
726751 Testsuites [test_suite ] = TestSuite (test_suite , outputdir ,
727752 args .vt_type ,
728753 use_test_dir = args .testdir )
729- Testsuites [test_suite ].runstatus (" Cant_Run" ,
754+ Testsuites [test_suite ].runstatus (Testsuite_status . Cant_Run . value ,
730755 "Config file not present" )
756+ count_testsuites_status [Testsuite_status .Cant_Run .value ] += 1
757+ Testsuites_list .append (test_suite )
731758 continue
732759 for test in test_list :
733760 for l_key in ['mux' , 'args' ]:
@@ -748,12 +775,14 @@ def parse_test_config(test_config_file, avocado_bin, enable_kvm):
748775 use_test_dir = args .testdir )
749776 Testsuites_list .append (str (test_suite ))
750777 if not Testsuites [test_suite ].config ():
751- Testsuites [test_suite ].runstatus (" Cant_Run" ,
778+ Testsuites [test_suite ].runstatus (Testsuite_status . Cant_Run . value ,
752779 "Config file not present" )
780+ count_testsuites_status [Testsuite_status .Cant_Run .value ] += 1
753781 continue
754782 # Run Tests
783+ count_testsuites_status [Testsuite_status .Total .value ] = len (Testsuites_list )
755784 for test_suite in Testsuites_list :
756- if not Testsuites [test_suite ].run == " Cant_Run" :
785+ if not Testsuites [test_suite ].run == Testsuite_status . Cant_Run . value :
757786 run_test (Testsuites [test_suite ], avocado_bin , args .nrunner )
758787 if args .interval :
759788 time .sleep (int (args .interval ))
@@ -782,6 +811,15 @@ def parse_test_config(test_config_file, avocado_bin, enable_kvm):
782811 10 ),
783812 Testsuites [test_suite ].runsummary ))
784813 summary_output .append (Testsuites [test_suite ].runlink )
814+
815+ summary_output .append ("\n Test suites status:\n " )
816+ for k , val in count_testsuites_status .items ():
817+ summary_output .append ('%s %s' % (k .upper ().ljust (20 ), val ))
818+
819+ summary_output .append ("\n Final count summary for tests run:\n " )
820+ for k , val in count_result .items ():
821+ summary_output .append ('%s %s' % (k .upper ().ljust (20 ), val ))
822+
785823 logger .info ("\n " .join (summary_output ))
786824
787825 if os .path .isdir ("/tmp/mux/" ):
0 commit comments