[dexter] Don't generate results files by default

Dexter saves various files to a new results directory each time it is run
(including when it's run by lit tests) and there isn't a way to opt-out. This
patch reconfigures the behaviour to be opt-in by removing the default
`--results-directory` location. Now results are only saved if
`--results-directory` is specified.

Reviewed By: jmorse

Differential Revision: https://reviews.llvm.org/D119545
This commit is contained in:
OCHyams 2022-02-11 15:45:07 +00:00
parent 52fbb786a6
commit 2bd62e0b04
5 changed files with 91 additions and 58 deletions

View File

@ -46,10 +46,8 @@ class TestToolBase(ToolBase):
'--results-directory',
type=str,
metavar='<directory>',
default=os.path.abspath(
os.path.join(get_root_directory(), '..', 'results',
datetime.now().strftime('%Y-%m-%d-%H%M-%S'))),
help='directory to save results')
default=None,
help='directory to save results (default: none)')
def handle_options(self, defaults):
options = self.context.options
@ -86,14 +84,15 @@ class TestToolBase(ToolBase):
'<d>could not find test path</> <r>"{}"</>'.format(
options.test_path))
options.results_directory = os.path.abspath(options.results_directory)
if not os.path.isdir(options.results_directory):
try:
os.makedirs(options.results_directory, exist_ok=True)
except OSError as e:
raise Error(
'<d>could not create directory</> <r>"{}"</> <y>({})</>'.
format(options.results_directory, e.strerror))
if options.results_directory:
options.results_directory = os.path.abspath(options.results_directory)
if not os.path.isdir(options.results_directory):
try:
os.makedirs(options.results_directory, exist_ok=True)
except OSError as e:
raise Error(
'<d>could not create directory</> <r>"{}"</> <y>({})</>'.
format(options.results_directory, e.strerror))
def go(self) -> ReturnCode: # noqa
options = self.context.options

View File

@ -192,9 +192,12 @@ class Tool(TestToolBase):
steps_changed = steps_str != prev_steps_str
prev_steps_str = steps_str
# If this is the first pass, or something has changed, write a text
# file containing verbose information on the current status.
if current_limit == 0 or score_difference or steps_changed:
# If a results directory has been specified and this is the first
# pass or something has changed, write a text file containing
# verbose information on the current status.
if options.results_directory and (current_limit == 0 or
score_difference or
steps_changed):
file_name = '-'.join(
str(s) for s in [
'status', test_name, '{{:0>{}}}'.format(
@ -231,31 +234,33 @@ class Tool(TestToolBase):
current_bisect_pass_summary[pass_info[1]].append(
score_difference)
per_pass_score_path = os.path.join(
options.results_directory,
'{}-per_pass_score.csv'.format(test_name))
if options.results_directory:
per_pass_score_path = os.path.join(
options.results_directory,
'{}-per_pass_score.csv'.format(test_name))
with open(per_pass_score_path, mode='w', newline='') as fp:
writer = csv.writer(fp, delimiter=',')
writer.writerow(['Source File', 'Pass', 'Score'])
with open(per_pass_score_path, mode='w', newline='') as fp:
writer = csv.writer(fp, delimiter=',')
writer.writerow(['Source File', 'Pass', 'Score'])
for path, pass_, score in per_pass_score:
writer.writerow([path, pass_, score])
self.context.o.blue('wrote "{}"\n'.format(per_pass_score_path))
for path, pass_, score in per_pass_score:
writer.writerow([path, pass_, score])
self.context.o.blue('wrote "{}"\n'.format(per_pass_score_path))
pass_summary_path = os.path.join(
options.results_directory, '{}-pass-summary.csv'.format(test_name))
pass_summary_path = os.path.join(
options.results_directory, '{}-pass-summary.csv'.format(test_name))
self._write_pass_summary(pass_summary_path,
current_bisect_pass_summary)
self._write_pass_summary(pass_summary_path,
current_bisect_pass_summary)
def _handle_results(self) -> ReturnCode:
options = self.context.options
pass_summary_path = os.path.join(options.results_directory,
'overall-pass-summary.csv')
if options.results_directory:
pass_summary_path = os.path.join(options.results_directory,
'overall-pass-summary.csv')
self._write_pass_summary(pass_summary_path,
self._all_bisect_pass_summary)
self._write_pass_summary(pass_summary_path,
self._all_bisect_pass_summary)
return ReturnCode.OK
def _clang_opt_bisect_build(self, opt_bisect_limits):

View File

@ -176,6 +176,7 @@ class Tool(TestToolBase):
"""Returns the path to the test results directory for the test denoted
by test_name.
"""
assert self.context.options.results_directory != None
return os.path.join(self.context.options.results_directory,
self._get_results_basename(test_name))
@ -193,22 +194,25 @@ class Tool(TestToolBase):
def _record_steps(self, test_name, steps):
"""Write out the set of steps out to the test's .txt and .json
results file.
results file if a results directory has been specified.
"""
output_text_path = self._get_results_text_path(test_name)
with open(output_text_path, 'w') as fp:
self.context.o.auto(str(steps), stream=Stream(fp))
if self.context.options.results_directory:
output_text_path = self._get_results_text_path(test_name)
with open(output_text_path, 'w') as fp:
self.context.o.auto(str(steps), stream=Stream(fp))
output_dextIR_path = self._get_results_pickle_path(test_name)
with open(output_dextIR_path, 'wb') as fp:
pickle.dump(steps, fp, protocol=pickle.HIGHEST_PROTOCOL)
output_dextIR_path = self._get_results_pickle_path(test_name)
with open(output_dextIR_path, 'wb') as fp:
pickle.dump(steps, fp, protocol=pickle.HIGHEST_PROTOCOL)
def _record_score(self, test_name, heuristic):
"""Write out the test's heuristic score to the results .txt file.
"""Write out the test's heuristic score to the results .txt file
if a results directory has been specified.
"""
output_text_path = self._get_results_text_path(test_name)
with open(output_text_path, 'a') as fp:
self.context.o.auto(heuristic.verbose_output, stream=Stream(fp))
if self.context.options.results_directory:
output_text_path = self._get_results_text_path(test_name)
with open(output_text_path, 'a') as fp:
self.context.o.auto(heuristic.verbose_output, stream=Stream(fp))
def _record_test_and_display(self, test_case):
"""Output test case to o stream and record test case internally for
@ -272,19 +276,20 @@ class Tool(TestToolBase):
if num_tests != 0:
print("@avg: ({:.4f})".format(score_sum/num_tests))
summary_path = os.path.join(options.results_directory, 'summary.csv')
with open(summary_path, mode='w', newline='') as fp:
writer = csv.writer(fp, delimiter=',')
writer.writerow(['Test Case', 'Score', 'Error'])
has_failed = lambda test: test.score < options.fail_lt or test.error
if any(map(has_failed, self._test_cases)):
return_code = ReturnCode.FAIL
for test_case in self._test_cases:
if (test_case.score < options.fail_lt or
test_case.error is not None):
return_code = ReturnCode.FAIL
if options.results_directory:
summary_path = os.path.join(options.results_directory, 'summary.csv')
with open(summary_path, mode='w', newline='') as fp:
writer = csv.writer(fp, delimiter=',')
writer.writerow(['Test Case', 'Score', 'Error'])
writer.writerow([
test_case.name, '{:.4f}'.format(test_case.score),
test_case.error
])
for test_case in self._test_cases:
writer.writerow([
test_case.name, '{:.4f}'.format(test_case.score),
test_case.error
])
return return_code

View File

@ -0,0 +1,27 @@
// Purpose:
// Check the `clang-opt-bisect` tool runs with --results-directory.
//
// RUN: true
// RUN: %dexter_base clang-opt-bisect \
// RUN: --debugger %dexter_regression_test_debugger \
// RUN: --builder %dexter_regression_test_builder \
// RUN: --cflags "%dexter_regression_test_cflags" \
// RUN: --ldflags "%dexter_regression_test_ldflags" \
// RUN: --results-directory=%t \
// RUN: -- %s \
// RUN: | FileCheck %s
//// Clean up those results files.
// RUN: rm %t/clang-opt-bisect-results.cpp-pass-summary.csv
// RUN: rm %t/clang-opt-bisect-results.cpp-per_pass_score.csv
// RUN: rm %t/overall-pass-summary.csv
// RUN: rm %t/*.dextIR
// RUN: rm %t/*.txt
// RUN: rmdir %t
// CHECK: running pass 0
// CHECK: wrote{{.*}}per_pass_score
// CHECK: wrote{{.*}}pass-summary
// CHECK: wrote{{.*}}overall-pass-summary
int main() {
return 0;
}

View File

@ -10,9 +10,6 @@
// RUN: -- %s \
// RUN: | FileCheck %s
// CHECK: running pass 0
// CHECK: wrote{{.*}}per_pass_score
// CHECK: wrote{{.*}}pass-summary
// CHECK: wrote{{.*}}overall-pass-summary
int main() {
return 0;