1 # ----------------------------------------------------------------------------------------------------
   2 #
   3 # Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
   4 # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5 #
   6 # This code is free software; you can redistribute it and/or modify it
   7 # under the terms of the GNU General Public License version 2 only, as
   8 # published by the Free Software Foundation.
   9 #
  10 # This code is distributed in the hope that it will be useful, but WITHOUT
  11 # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12 # FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13 # version 2 for more details (a copy is included in the LICENSE file that
  14 # accompanied this code).
  15 #
  16 # You should have received a copy of the GNU General Public License version
  17 # 2 along with this work; if not, write to the Free Software Foundation,
  18 # Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19 #
  20 # Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21 # or visit www.oracle.com if you need additional information or have any
  22 # questions.
  23 #
  24 # ----------------------------------------------------------------------------------------------------
  25 
  26 from outputparser import OutputParser, ValuesMatcher
  27 import re, mx, mx_graal, os, sys, StringIO, subprocess
  28 from os.path import isfile, join, exists
  29 
  30 gc = 'UseSerialGC'
  31 
  32 dacapoSanityWarmup = {
  33     'avrora':     [0, 0, 3, 6, 13],
  34     'batik':      [0, 0, 5, 5, 20],
  35     'eclipse':    [0, 0, 0, 0, 0],
  36     'fop':        [4, 8, 10, 20, 30],
  37     'h2':         [0, 0, 5, 5, 8],
  38     'jython':     [0, 0, 5, 10, 13],
  39     'luindex':    [0, 0, 5, 10, 10],
  40     'lusearch':   [0, 4, 5, 5, 8],
  41     'pmd':        [0, 0, 5, 10, 13],
  42     'sunflow':    [0, 2, 5, 10, 15],
  43     'tomcat':     [0, 0, 5, 10, 15],
  44     'tradebeans': [0, 0, 5, 10, 13],
  45     'tradesoap':  [0, 0, 5, 10, 15],
  46     'xalan':      [0, 0, 5, 10, 18],
  47 }
  48 
  49 dacapoScalaSanityWarmup = {
  50     'actors':     [0, 0, 2, 5, 5],
  51     'apparat':    [0, 0, 2, 5, 5],
  52     'factorie':   [0, 0, 2, 5, 5],
  53     'kiama':      [0, 4, 3, 13, 15],
  54     'scalac':     [0, 0, 5, 15, 20],
  55     'scaladoc':   [0, 0, 5, 15, 15],
  56     'scalap':     [0, 0, 5, 15, 20],
  57     'scalariform':[0, 0, 6, 15, 20],
  58     'scalatest':  [0, 0, 2, 10, 12],
  59     'scalaxb':    [0, 0, 5, 15, 25],
  60 # (gdub) specs sometimes returns a non-zero value event though there is no apparent failure
  61     'specs':      [0, 0, 0, 0, 0],
  62     'tmt':        [0, 0, 3, 10, 12]
  63 }
  64 
  65 dacapoGateBuildLevels = {
  66     'avrora':     ['product', 'fastdebug', 'debug'],
  67     'batik':      ['product', 'fastdebug', 'debug'],
  68     # (lewurm): does not work with JDK8
  69     'eclipse':    [],
  70     'fop':        ['fastdebug', 'debug'],
  71     'h2':         ['product', 'fastdebug', 'debug'],
  72     'jython':     ['product', 'fastdebug', 'debug'],
  73     'luindex':    ['product', 'fastdebug', 'debug'],
  74     'lusearch':   ['product'],
  75     'pmd':        ['product', 'fastdebug', 'debug'],
  76     'sunflow':    ['fastdebug', 'debug'],
  77     'tomcat':     ['product', 'fastdebug', 'debug'],
  78     'tradebeans': ['product', 'fastdebug', 'debug'],
  79     # tradesoap is too unreliable for the gate, often crashing with concurrency problems:
  80     # http://sourceforge.net/p/dacapobench/bugs/99/
  81     'tradesoap':  [],
  82     'xalan':      ['product', 'fastdebug', 'debug'],
  83 }
  84 
  85 dacapoScalaGateBuildLevels = {
  86     'actors':     ['product', 'fastdebug', 'debug'],
  87     'apparat':    ['product', 'fastdebug', 'debug'],
  88     'factorie':   ['product', 'fastdebug', 'debug'],
  89     'kiama':      ['fastdebug', 'debug'],
  90     'scalac':     ['product', 'fastdebug', 'debug'],
  91     'scaladoc':   ['product', 'fastdebug', 'debug'],
  92     'scalap':     ['product', 'fastdebug', 'debug'],
  93     'scalariform':['product', 'fastdebug', 'debug'],
  94     'scalatest':  ['product', 'fastdebug', 'debug'],
  95     'scalaxb':    ['product', 'fastdebug', 'debug'],
  96     'specs':      ['product', 'fastdebug', 'debug'],
  97     'tmt':        ['product', 'fastdebug', 'debug'],
  98 }
  99 
 100 specjvm2008Names = [
 101     'startup.helloworld',
 102     'startup.compiler.compiler',
 103     'startup.compiler.sunflow',
 104     'startup.compress',
 105     'startup.crypto.aes',
 106     'startup.crypto.rsa',
 107     'startup.crypto.signverify',
 108     'startup.mpegaudio',
 109     'startup.scimark.fft',
 110     'startup.scimark.lu',
 111     'startup.scimark.monte_carlo',
 112     'startup.scimark.sor',
 113     'startup.scimark.sparse',
 114     'startup.serial',
 115     'startup.sunflow',
 116     'startup.xml.transform',
 117     'startup.xml.validation',
 118     'compiler.compiler',
 119     'compiler.sunflow',
 120     'compress',
 121     'crypto.aes',
 122     'crypto.rsa',
 123     'crypto.signverify',
 124     'derby',
 125     'mpegaudio',
 126     'scimark.fft.large',
 127     'scimark.lu.large',
 128     'scimark.sor.large',
 129     'scimark.sparse.large',
 130     'scimark.fft.small',
 131     'scimark.lu.small',
 132     'scimark.sor.small',
 133     'scimark.sparse.small',
 134     'scimark.monte_carlo',
 135     'serial',
 136     'sunflow',
 137     'xml.transform',
 138     'xml.validation'
 139 ]
 140 
 141 def _noneAsEmptyList(a):
 142     if a is None:
 143         return []
 144     return a
 145 
 146 class SanityCheckLevel:
 147     Fast, Gate, Normal, Extensive, Benchmark = range(5)
 148 
 149 def getSPECjbb2005(benchArgs=None):
 150     benchArgs = [] if benchArgs is None else benchArgs
 151 
 152     specjbb2005 = mx.get_env('SPECJBB2005')
 153     if specjbb2005 is None or not exists(join(specjbb2005, 'jbb.jar')):
 154         mx.abort('Please set the SPECJBB2005 environment variable to a SPECjbb2005 directory')
 155 
 156     score = re.compile(r"^Valid run, Score is  (?P<score>[0-9]+)$", re.MULTILINE)
 157     error = re.compile(r"VALIDATION ERROR")
 158     success = re.compile(r"^Valid run, Score is  [0-9]+$", re.MULTILINE)
 159     matcher = ValuesMatcher(score, {'group' : 'SPECjbb2005', 'name' : 'score', 'score' : '<score>'})
 160     classpath = ['jbb.jar', 'check.jar']
 161     return Test("SPECjbb2005", ['spec.jbb.JBBmain', '-propfile', 'SPECjbb.props'] + benchArgs, [success], [error], [matcher], vmOpts=['-Xms3g', '-XX:+' + gc, '-XX:-UseCompressedOops', '-cp', os.pathsep.join(classpath)], defaultCwd=specjbb2005)
 162 
 163 def getSPECjbb2013(benchArgs=None):
 164 
 165     specjbb2013 = mx.get_env('SPECJBB2013')
 166     if specjbb2013 is None or not exists(join(specjbb2013, 'specjbb2013.jar')):
 167         mx.abort('Please set the SPECJBB2013 environment variable to a SPECjbb2013 directory')
 168 
 169     jops = re.compile(r"^RUN RESULT: hbIR \(max attempted\) = [0-9]+, hbIR \(settled\) = [0-9]+, max-jOPS = (?P<max>[0-9]+), critical-jOPS = (?P<critical>[0-9]+)$", re.MULTILINE)
 170     # error?
 171     success = re.compile(r"org.spec.jbb.controller: Run finished", re.MULTILINE)
 172     matcherMax = ValuesMatcher(jops, {'group' : 'SPECjbb2013', 'name' : 'max', 'score' : '<max>'})
 173     matcherCritical = ValuesMatcher(jops, {'group' : 'SPECjbb2013', 'name' : 'critical', 'score' : '<critical>'})
 174     return Test("SPECjbb2013", ['-jar', 'specjbb2013.jar', '-m', 'composite'] +
 175                 _noneAsEmptyList(benchArgs), [success], [], [matcherCritical, matcherMax],
 176                 vmOpts=['-Xmx6g', '-Xms6g', '-Xmn3g', '-XX:+UseParallelOldGC', '-XX:-UseAdaptiveSizePolicy', '-XX:-UseBiasedLocking', '-XX:-UseCompressedOops'], defaultCwd=specjbb2013)
 177 
 178 def getSPECjbb2015(benchArgs=None):
 179 
 180     specjbb2015 = mx.get_env('SPECJBB2015')
 181     if specjbb2015 is None or not exists(join(specjbb2015, 'specjbb2015.jar')):
 182         mx.abort('Please set the SPECJBB2015 environment variable to a SPECjbb2015 directory')
 183 
 184     jops = re.compile(r"^RUN RESULT: hbIR \(max attempted\) = [0-9]+, hbIR \(settled\) = [0-9]+, max-jOPS = (?P<max>[0-9]+), critical-jOPS = (?P<critical>[0-9]+)$", re.MULTILINE)
 185     # error?
 186     success = re.compile(r"org.spec.jbb.controller: Run finished", re.MULTILINE)
 187     matcherMax = ValuesMatcher(jops, {'group' : 'SPECjbb2015', 'name' : 'max', 'score' : '<max>'})
 188     matcherCritical = ValuesMatcher(jops, {'group' : 'SPECjbb2015', 'name' : 'critical', 'score' : '<critical>'})
 189     return Test("SPECjbb2015", ['-jar', 'specjbb2015.jar', '-m', 'composite'] +
 190                 _noneAsEmptyList(benchArgs), [success], [], [matcherCritical, matcherMax],
 191                 vmOpts=['-Xmx6g', '-Xms6g', '-Xmn3g', '-XX:+UseParallelOldGC', '-XX:-UseAdaptiveSizePolicy', '-XX:-UseBiasedLocking', '-XX:-UseCompressedOops'], defaultCwd=specjbb2015)
 192 
 193 def getSPECjvm2008(benchArgs=None):
 194 
 195     specjvm2008 = mx.get_env('SPECJVM2008')
 196     if specjvm2008 is None or not exists(join(specjvm2008, 'SPECjvm2008.jar')):
 197         mx.abort('Please set the SPECJVM2008 environment variable to a SPECjvm2008 directory')
 198 
 199     score = re.compile(r"^(Score on|Noncompliant) (?P<benchmark>[a-zA-Z0-9\._]+)( result)?: (?P<score>[0-9]+((,|\.)[0-9]+)?)( SPECjvm2008 Base)? ops/m$", re.MULTILINE)
 200     error = re.compile(r"^Errors in benchmark: ", re.MULTILINE)
 201     # The ' ops/m' at the end of the success string is important : it's how you can tell valid and invalid runs apart
 202     success = re.compile(r"^(Noncompliant c|C)omposite result: [0-9]+((,|\.)[0-9]+)?( SPECjvm2008 (Base|Peak))? ops/m$", re.MULTILINE)
 203     matcher = ValuesMatcher(score, {'group' : 'SPECjvm2008', 'name' : '<benchmark>', 'score' : '<score>'})
 204 
 205     return Test("SPECjvm2008", ['-jar', 'SPECjvm2008.jar'] + _noneAsEmptyList(benchArgs), [success], [error], [matcher], vmOpts=['-Xms3g', '-XX:+' + gc, '-XX:-UseCompressedOops'], defaultCwd=specjvm2008)
 206 
 207 def getDacapos(level=SanityCheckLevel.Normal, gateBuildLevel=None, dacapoArgs=None, extraVmArguments=None):
 208     checks = []
 209 
 210     for (bench, ns) in dacapoSanityWarmup.items():
 211         if ns[level] > 0:
 212             if gateBuildLevel is None or gateBuildLevel in dacapoGateBuildLevels[bench]:
 213                 checks.append(getDacapo(bench, ['-n', str(ns[level])] + _noneAsEmptyList(dacapoArgs), extraVmArguments=extraVmArguments))
 214 
 215     return checks
 216 
 217 def getDacapo(name, dacapoArgs=None, extraVmArguments=None):
 218     dacapo = mx.get_env('DACAPO_CP')
 219     if dacapo is None:
 220         l = mx.library('DACAPO', False)
 221         if l is not None:
 222             dacapo = l.get_path(True)
 223         else:
 224             mx.abort('DaCapo 9.12 jar file must be specified with DACAPO_CP environment variable or as DACAPO library')
 225 
 226     if not isfile(dacapo) or not dacapo.endswith('.jar'):
 227         mx.abort('Specified DaCapo jar file does not exist or is not a jar file: ' + dacapo)
 228 
 229     dacapoSuccess = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) PASSED in ([0-9]+) msec =====", re.MULTILINE)
 230     dacapoFail = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) FAILED (warmup|) =====", re.MULTILINE)
 231     dacapoTime = re.compile(r"===== DaCapo 9\.12 (?P<benchmark>[a-zA-Z0-9_]+) PASSED in (?P<time>[0-9]+) msec =====")
 232     dacapoTime1 = re.compile(r"===== DaCapo 9\.12 (?P<benchmark>[a-zA-Z0-9_]+) completed warmup 1 in (?P<time>[0-9]+) msec =====")
 233 
 234     dacapoMatcher = ValuesMatcher(dacapoTime, {'group' : 'DaCapo', 'name' : '<benchmark>', 'score' : '<time>'})
 235     dacapoMatcher1 = ValuesMatcher(dacapoTime1, {'group' : 'DaCapo-1stRun', 'name' : '<benchmark>', 'score' : '<time>'})
 236 
 237     # Use ipv4 stack for dacapos; tomcat+solaris+ipv6_interface fails (see also: JDK-8072384)
 238     return Test("DaCapo-" + name, ['-jar', mx._cygpathU2W(dacapo), name] + _noneAsEmptyList(dacapoArgs), [dacapoSuccess], [dacapoFail],
 239                 [dacapoMatcher, dacapoMatcher1],
 240                 ['-Xms2g', '-XX:+' + gc, '-XX:-UseCompressedOops', "-Djava.net.preferIPv4Stack=true", '-G:+ExitVMOnException'] +
 241                 _noneAsEmptyList(extraVmArguments))
 242 
 243 def getScalaDacapos(level=SanityCheckLevel.Normal, gateBuildLevel=None, dacapoArgs=None, extraVmArguments=None):
 244     checks = []
 245 
 246     for (bench, ns) in dacapoScalaSanityWarmup.items():
 247         if ns[level] > 0:
 248             if gateBuildLevel is None or gateBuildLevel in dacapoScalaGateBuildLevels[bench]:
 249                 checks.append(getScalaDacapo(bench, ['-n', str(ns[level])] + _noneAsEmptyList(dacapoArgs), extraVmArguments=extraVmArguments))
 250 
 251     return checks
 252 
 253 def getScalaDacapo(name, dacapoArgs=None, extraVmArguments=None):
 254     dacapo = mx.get_env('DACAPO_SCALA_CP')
 255     if dacapo is None:
 256         l = mx.library('DACAPO_SCALA', False)
 257         if l is not None:
 258             dacapo = l.get_path(True)
 259         else:
 260             mx.abort('Scala DaCapo 0.1.0 jar file must be specified with DACAPO_SCALA_CP environment variable or as DACAPO_SCALA library')
 261 
 262     if not isfile(dacapo) or not dacapo.endswith('.jar'):
 263         mx.abort('Specified Scala DaCapo jar file does not exist or is not a jar file: ' + dacapo)
 264 
 265     dacapoSuccess = re.compile(r"^===== DaCapo 0\.1\.0(-SNAPSHOT)? ([a-zA-Z0-9_]+) PASSED in ([0-9]+) msec =====", re.MULTILINE)
 266     dacapoFail = re.compile(r"^===== DaCapo 0\.1\.0(-SNAPSHOT)? ([a-zA-Z0-9_]+) FAILED (warmup|) =====", re.MULTILINE)
 267     dacapoTime = re.compile(r"===== DaCapo 0\.1\.0(-SNAPSHOT)? (?P<benchmark>[a-zA-Z0-9_]+) PASSED in (?P<time>[0-9]+) msec =====")
 268 
 269     dacapoMatcher = ValuesMatcher(dacapoTime, {'group' : "Scala-DaCapo", 'name' : '<benchmark>', 'score' : '<time>'})
 270 
 271     return Test("Scala-DaCapo-" + name, ['-jar', mx._cygpathU2W(dacapo), name] + _noneAsEmptyList(dacapoArgs), [dacapoSuccess], [dacapoFail], [dacapoMatcher], ['-Xms2g', '-XX:+' + gc, '-XX:-UseCompressedOops'] + _noneAsEmptyList(extraVmArguments))
 272 
 273 def getBootstraps():
 274     time = re.compile(r"Bootstrapping Graal\.+ in (?P<time>[0-9]+) ms( \(compiled (?P<methods>[0-9]+) methods\))?")
 275     scoreMatcher = ValuesMatcher(time, {'group' : 'Bootstrap', 'name' : 'BootstrapTime', 'score' : '<time>'})
 276     methodMatcher = ValuesMatcher(time, {'group' : 'Bootstrap', 'name' : 'BootstrapMethods', 'score' : '<methods>'})
 277     scoreMatcherBig = ValuesMatcher(time, {'group' : 'Bootstrap-bigHeap', 'name' : 'BootstrapTime', 'score' : '<time>'})
 278     methodMatcherBig = ValuesMatcher(time, {'group' : 'Bootstrap-bigHeap', 'name' : 'BootstrapMethods', 'score' : '<methods>'})
 279 
 280     tests = []
 281     tests.append(Test("Bootstrap", ['-version'], successREs=[time], scoreMatchers=[scoreMatcher, methodMatcher], ignoredVMs=['client', 'server'], benchmarkCompilationRate=False))
 282     tests.append(Test("Bootstrap-bigHeap", ['-version'], successREs=[time], scoreMatchers=[scoreMatcherBig, methodMatcherBig], vmOpts=['-Xms2g'], ignoredVMs=['client', 'server'], benchmarkCompilationRate=False))
 283     return tests
 284 
 285 class CTWMode:
 286     Full, NoInline = range(2)
 287 
 288 def getCTW(vm, mode):
 289     time = re.compile(r"CompileTheWorld : Done \([0-9]+ classes, [0-9]+ methods, (?P<time>[0-9]+) ms\)")
 290     scoreMatcher = ValuesMatcher(time, {'group' : 'CompileTheWorld', 'name' : 'CompileTime', 'score' : '<time>'})
 291 
 292     jre = os.environ.get('JAVA_HOME')
 293     if exists(join(jre, 'jre')):
 294         jre = join(jre, 'jre')
 295     rtjar = join(jre, 'lib', 'rt.jar')
 296 
 297 
 298     args = ['-XX:+CompileTheWorld', '-Xbootclasspath/p:' + rtjar]
 299     if vm == 'jvmci':
 300         args += ['-XX:+BootstrapGraal']
 301     if mode >= CTWMode.NoInline:
 302         if not mx_graal.isJVMCIEnabled(vm):
 303             args.append('-XX:-Inline')
 304         else:
 305             args.append('-G:CompileTheWordConfig=-Inline')
 306 
 307     return Test("CompileTheWorld", args, successREs=[time], scoreMatchers=[scoreMatcher], benchmarkCompilationRate=False)
 308 
 309 
 310 class Tee:
 311     def __init__(self):
 312         self.output = StringIO.StringIO()
 313     def eat(self, line):
 314         self.output.write(line)
 315         sys.stdout.write(line)
 316 
 317 """
 318 Encapsulates a single program that is a sanity test and/or a benchmark.
 319 """
 320 class Test:
 321     def __init__(self, name, cmd, successREs=None, failureREs=None, scoreMatchers=None, vmOpts=None, defaultCwd=None, ignoredVMs=None, benchmarkCompilationRate=False):
 322 
 323         self.name = name
 324         self.successREs = _noneAsEmptyList(successREs)
 325         self.failureREs = _noneAsEmptyList(failureREs) + [re.compile(r"Exception occurred in scope: ")]
 326         self.scoreMatchers = _noneAsEmptyList(scoreMatchers)
 327         self.vmOpts = _noneAsEmptyList(vmOpts)
 328         self.cmd = cmd
 329         self.defaultCwd = defaultCwd
 330         self.ignoredVMs = _noneAsEmptyList(ignoredVMs)
 331         self.benchmarkCompilationRate = benchmarkCompilationRate
 332         if benchmarkCompilationRate:
 333             self.vmOpts = self.vmOpts + ['-XX:+CITime']
 334 
 335     def __str__(self):
 336         return self.name
 337 
 338     def test(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
 339         """
 340         Run this program as a sanity test.
 341         """
 342         if vm in self.ignoredVMs:
 343             return True
 344         if cwd is None:
 345             cwd = self.defaultCwd
 346         parser = OutputParser()
 347         jvmError = re.compile(r"(?P<jvmerror>([A-Z]:|/).*[/\\]hs_err_pid[0-9]+\.log)")
 348         parser.addMatcher(ValuesMatcher(jvmError, {'jvmError' : '<jvmerror>'}))
 349 
 350         for successRE in self.successREs:
 351             parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
 352         for failureRE in self.failureREs:
 353             parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))
 354 
 355         tee = Tee()
 356         retcode = mx_graal.run_vm(self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, debugLevel=vmbuild)
 357         output = tee.output.getvalue()
 358         valueMaps = parser.parse(output)
 359 
 360         if len(valueMaps) == 0:
 361             return False
 362 
 363         record = {}
 364         for valueMap in valueMaps:
 365             for key, value in valueMap.items():
 366                 if record.has_key(key) and record[key] != value:
 367                     mx.abort('Inconsistant values returned by test machers : ' + str(valueMaps))
 368                 record[key] = value
 369 
 370         jvmErrorFile = record.get('jvmError')
 371         if jvmErrorFile:
 372             mx.log('/!\\JVM Error : dumping error log...')
 373             with open(jvmErrorFile, 'rb') as fp:
 374                 mx.log(fp.read())
 375             os.unlink(jvmErrorFile)
 376             return False
 377 
 378         if record.get('failed') == '1':
 379             return False
 380 
 381         return retcode == 0 and record.get('passed') == '1'
 382 
 383     def bench(self, vm, cwd=None, extraVmOpts=None, vmbuild=None):
 384         """
 385         Run this program as a benchmark.
 386         """
 387         if vm in self.ignoredVMs:
 388             return {}
 389         if cwd is None:
 390             cwd = self.defaultCwd
 391         parser = OutputParser()
 392 
 393         for successRE in self.successREs:
 394             parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
 395         for failureRE in self.failureREs:
 396             parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))
 397         for scoreMatcher in self.scoreMatchers:
 398             parser.addMatcher(scoreMatcher)
 399 
 400         if self.benchmarkCompilationRate:
 401             if vm == 'jvmci':
 402                 bps = re.compile(r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
 403                 ibps = re.compile(r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
 404                 parser.addMatcher(ValuesMatcher(bps, {'group' : 'ParsedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
 405                 parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
 406             else:
 407                 ibps = re.compile(r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard")
 408                 parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : '<compiler>:' + self.name, 'score' : '<rate>'}))
 409 
 410         startDelim = 'START: ' + self.name
 411         endDelim = 'END: ' + self.name
 412 
 413         outputfile = os.environ.get('BENCH_OUTPUT', None)
 414         if outputfile:
 415             # Used only to debug output parsing
 416             with open(outputfile) as fp:
 417                 output = fp.read()
 418                 start = output.find(startDelim)
 419                 end = output.find(endDelim, start)
 420                 if start == -1 and end == -1:
 421                     return {}
 422                 output = output[start + len(startDelim + os.linesep): end]
 423                 mx.log(startDelim)
 424                 mx.log(output)
 425                 mx.log(endDelim)
 426         else:
 427             tee = Tee()
 428             mx.log(startDelim)
 429             if mx_graal.run_vm(self.vmOpts + _noneAsEmptyList(extraVmOpts) + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, debugLevel=vmbuild) != 0:
 430                 mx.abort("Benchmark failed (non-zero retcode)")
 431             mx.log(endDelim)
 432             output = tee.output.getvalue()
 433 
 434         groups = {}
 435         passed = False
 436         for valueMap in parser.parse(output):
 437             assert (valueMap.has_key('name') and valueMap.has_key('score') and valueMap.has_key('group')) or valueMap.has_key('passed') or valueMap.has_key('failed'), valueMap
 438             if valueMap.get('failed') == '1':
 439                 mx.abort("Benchmark failed")
 440             if valueMap.get('passed') == '1':
 441                 passed = True
 442             groupName = valueMap.get('group')
 443             if groupName:
 444                 group = groups.setdefault(groupName, {})
 445                 name = valueMap.get('name')
 446                 score = valueMap.get('score')
 447                 if name and score:
 448                     group[name] = score
 449 
 450         if not passed:
 451             mx.abort("Benchmark failed (not passed)")
 452 
 453         return groups