[commit: ghc] wip/perf-testsuite: Correct error causing incorrect differentiation between compiler and regular performance tests (8dc8fa1)

git at git.haskell.org git at git.haskell.org
Tue Aug 29 15:12:16 UTC 2017


Repository : ssh://git@git.haskell.org/ghc

On branch  : wip/perf-testsuite
Link       : http://ghc.haskell.org/trac/ghc/changeset/8dc8fa16031230a267cf3eca2d235b9475c9f31c/ghc

>---------------------------------------------------------------

commit 8dc8fa16031230a267cf3eca2d235b9475c9f31c
Author: Jared Weakly <jweakly at pdx.edu>
Date:   Mon Aug 28 11:36:39 2017 -0700

    Correct error causing incorrect differentiation between compiler and regular performance tests


>---------------------------------------------------------------

8dc8fa16031230a267cf3eca2d235b9475c9f31c
 testsuite/driver/perf_notes.py | 16 +++++++++++++---
 testsuite/driver/testlib.py    | 23 +++++++++++------------
 2 files changed, 24 insertions(+), 15 deletions(-)

diff --git a/testsuite/driver/perf_notes.py b/testsuite/driver/perf_notes.py
index a2bc8dd..d0b229e 100644
--- a/testsuite/driver/perf_notes.py
+++ b/testsuite/driver/perf_notes.py
@@ -154,13 +154,24 @@ def _comparison(name, opts, metric, deviation, is_compiler_test):
 
     if tests == [] or test == []:
         # There are no prior metrics for this test.
-        opts.stats_range_fields[metric] = (0,0)
+        if isinstance(metric, str):
+            if metric == 'all':
+                for field in testing_metrics:
+                    opts.stats_range_fields[field] = (0,0)
+            else:
+                opts.stats_range_fields[metric] = (0,0)
+        if isinstance(metric, list):
+            for field in metric:
+                opts.stats_range_fields[field] = (0,0)
+
         return
 
+    if is_compiler_test:
+        opts.is_compiler_test = True
+
     # Compiler performance numbers change when debugging is on, making the results
     # useless and confusing. Therefore, skip if debugging is on.
     if config.compiler_debugged and is_compiler_test:
-        opts.is_compiler_test = True
         opts.skip = 1
 
     # 'all' is a shorthand to test for bytes allocated, peak megabytes allocated, and max bytes used.
@@ -193,7 +204,6 @@ def evaluate_metric(opts, test, field, deviation, contents, way):
         test_env = config.test_env
         config.accumulate_metrics.append('\t'.join([test_env, test, way, field, str(val)]))
 
-    print("WTF 01")
     if expected == 0:
         return my_passed('no prior metrics for this test')
 
diff --git a/testsuite/driver/testlib.py b/testsuite/driver/testlib.py
index b32cbf5..d57dbf9 100644
--- a/testsuite/driver/testlib.py
+++ b/testsuite/driver/testlib.py
@@ -1109,11 +1109,7 @@ def simple_build(name, way, extra_hc_opts, should_fail, top_mod, link, addsuf, b
     else:
         to_do = '-c' # just compile
 
-    print("SANITY CHECK")
-    print(name)
-    print(opts.is_compiler_test)
     stats_file = name + '.comp.stats'
-    print(stats_file)
     if opts.is_compiler_test:
         extra_hc_opts += ' +RTS -V0 -t' + stats_file + ' --machine-readable -RTS'
     if backpack:
@@ -1147,13 +1143,13 @@ def simple_build(name, way, extra_hc_opts, should_fail, top_mod, link, addsuf, b
 
     # ToDo: if the sub-shell was killed by ^C, then exit
 
-    statsResult = checkStats(name, way, stats_file, opts.stats_range_fields)
-    print(stats_file)
-    print(opts.stats_range_fields)
-    print(statsResult)
+    if opts.is_compiler_test:
+        statsResult = checkStats(name, way, stats_file, opts.stats_range_fields)
+    else:
+        statsResult = passed()
 
-    if badResult(statsResult):
-        return statsResult
+    # if badResult(statsResult):
+    #     return statsResult
 
     if should_fail:
         if exit_code == 0:
@@ -1162,7 +1158,7 @@ def simple_build(name, way, extra_hc_opts, should_fail, top_mod, link, addsuf, b
         if exit_code != 0:
             return failBecause('exit code non-0')
 
-    return passed()
+    return statsResult
 
 # -----------------------------------------------------------------------------
 # Run a program and check its output
@@ -1229,7 +1225,10 @@ def simple_run(name, way, prog, extra_run_opts):
     if check_prof and not check_prof_ok(name, way):
         return failBecause('bad profile')
 
-    return checkStats(name, way, stats_file, opts.stats_range_fields)
+    if not opts.is_compiler_test:
+        return checkStats(name, way, stats_file, opts.stats_range_fields)
+    else:
+        return passed()
 
 def rts_flags(way):
     args = config.way_rts_flags.get(way, [])



More information about the ghc-commits mailing list