Index: test/dosep.py =================================================================== --- test/dosep.py +++ test/dosep.py @@ -64,44 +64,49 @@ eTimedOut, ePassed, eFailed = 124, 0, 1 output_lock = None -test_counter = None -total_tests = None +directory_counter = None +total_directories = None +test_name_len = None dotest_options = None output_on_success = False -def setup_global_variables(lock, counter, total, options): - global output_lock, test_counter, total_tests, dotest_options +def setup_global_variables(lock, counter, total, name_len, options): + global output_lock, directory_counter, total_directories, test_name_len + global dotest_options output_lock = lock - test_counter = counter - total_tests = total + directory_counter = counter + total_directories = total + test_name_len = name_len dotest_options = options def report_test_failure(name, command, output): global output_lock with output_lock: - print >> sys.stderr, "\n" + print >> sys.stderr print >> sys.stderr, output + print >> sys.stderr, "[%s FAILED]" % name print >> sys.stderr, "Command invoked: %s" % ' '.join(command) - update_progress("FAILED", name) + update_progress(name) def report_test_pass(name, output): global output_lock, output_on_success with output_lock: if output_on_success: - print >> sys.stderr, "\n" + print >> sys.stderr print >> sys.stderr, output - update_progress("PASSED", name) + print >> sys.stderr, "[%s PASSED]" % name + update_progress(name) -def update_progress(test_name, result): - global output_lock, test_counter, total_tests +def update_progress(test_name=""): + global output_lock, directory_counter, total_directories, test_name_len with output_lock: - if test_name != None: - sys.stderr.write("\n[%s %s] - %d out of %d test suites processed" % - (result, test_name, test_counter.value, total_tests)) - else: - sys.stderr.write("\n%d out of %d test suites processed" % - (test_counter.value, total_tests)) - test_counter.value += 1 + counter_len = len(str(total_directories)) + sys.stderr.write( + "\r%*d out of %d directories processed - %-*s" % + (counter_len, directory_counter.value, total_directories, + test_name_len.value, test_name)) + if len(test_name) > test_name_len.value: + test_name_len.value = len(test_name) sys.stdout.flush() sys.stderr.flush() @@ -187,6 +192,8 @@ if eTimedOut == exit_status: timed_out.append(name) failed.append(name) + with directory_counter.get_lock(): + directory_counter.value += 1 return (timed_out, failed, passed, fail_sub_count, pass_sub_count) in_q = None @@ -213,19 +220,23 @@ for root, dirs, files in os.walk(test_subdir, topdown=False): test_work_items.append((root, files, test_directory, dotest_argv)) - global output_lock, test_counter, total_tests + global output_lock, directory_counter, total_directories, test_name_len output_lock = multiprocessing.RLock() - total_tests = len(test_work_items) - test_counter = multiprocessing.Value('i', 0) - print >> sys.stderr, "Testing: %d tests, %d threads" % (total_tests, num_threads) - update_progress(None, None) + total_directories = len(test_work_items) + directory_counter = multiprocessing.Value('i', 0) + test_name_len = multiprocessing.Value('i', 0) + print >> sys.stderr, "Testing: %d directories, %d threads" % ( + total_directories, num_threads) + update_progress() # Run the items, either in a pool (for multicore speedup) or # calling each individually. if num_threads > 1: - pool = multiprocessing.Pool(num_threads, - initializer = setup_global_variables, - initargs = (output_lock, test_counter, total_tests, dotest_options)) + pool = multiprocessing.Pool( + num_threads, + initializer=setup_global_variables, + initargs=(output_lock, directory_counter, total_directories, + test_name_len, dotest_options)) test_results = pool.map(process_dir_worker, test_work_items) else: test_results = []