1 /* 2 * Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "code/codeCache.hpp" 27 #include "runtime/globals.hpp" 28 #include "compiler/compilerDefinitions.hpp" 29 #include "gc/shared/gcConfig.hpp" 30 #include "utilities/defaultStream.hpp" 31 32 const char* compilertype2name_tab[compiler_number_of_types] = { 33 "", 34 "c1", 35 "c2", 36 "jvmci" 37 }; 38 39 #ifdef TIERED 40 bool CompilationModeFlag::_quick_only = false; 41 bool CompilationModeFlag::_high_only = false; 42 bool CompilationModeFlag::_high_only_quick_internal = false; 43 44 45 bool CompilationModeFlag::initialize() { 46 if (CompilationMode != NULL) { 47 if (strcmp(CompilationMode, "default") == 0) { 48 // Do nothing, just support the "default" keyword. 49 } else if (strcmp(CompilationMode, "quick-only") == 0) { 50 _quick_only = true; 51 } else if (strcmp(CompilationMode, "high-only") == 0) { 52 _high_only = true; 53 } else if (strcmp(CompilationMode, "high-only-quick-internal") == 0) { 54 _high_only_quick_internal = true; 55 } else { 56 jio_fprintf(defaultStream::error_stream(), "Unsupported compilation mode '%s', supported modes are: quick-only, high-only, high-only-quick-internal\n", CompilationMode); 57 return false; 58 } 59 } 60 return true; 61 } 62 63 #endif 64 65 #if defined(COMPILER2) 66 CompLevel CompLevel_highest_tier = CompLevel_full_optimization; // pure C2 and tiered or JVMCI and tiered 67 #elif defined(COMPILER1) 68 CompLevel CompLevel_highest_tier = CompLevel_simple; // pure C1 or JVMCI 69 #else 70 CompLevel CompLevel_highest_tier = CompLevel_none; 71 #endif 72 73 #if defined(COMPILER2) 74 CompMode Compilation_mode = CompMode_server; 75 #elif defined(COMPILER1) 76 CompMode Compilation_mode = CompMode_client; 77 #else 78 CompMode Compilation_mode = CompMode_none; 79 #endif 80 81 // Returns threshold scaled with CompileThresholdScaling 82 intx CompilerConfig::scaled_compile_threshold(intx threshold) { 83 return scaled_compile_threshold(threshold, CompileThresholdScaling); 84 } 85 86 // Returns freq_log scaled with CompileThresholdScaling 87 intx CompilerConfig::scaled_freq_log(intx freq_log) { 88 return scaled_freq_log(freq_log, CompileThresholdScaling); 89 } 90 91 // Returns threshold scaled with the value of scale. 92 // If scale < 0.0, threshold is returned without scaling. 93 intx CompilerConfig::scaled_compile_threshold(intx threshold, double scale) { 94 if (scale == 1.0 || scale < 0.0) { 95 return threshold; 96 } else { 97 return (intx)(threshold * scale); 98 } 99 } 100 101 // Returns freq_log scaled with the value of scale. 102 // Returned values are in the range of [0, InvocationCounter::number_of_count_bits + 1]. 103 // If scale < 0.0, freq_log is returned without scaling. 104 intx CompilerConfig::scaled_freq_log(intx freq_log, double scale) { 105 // Check if scaling is necessary or if negative value was specified. 106 if (scale == 1.0 || scale < 0.0) { 107 return freq_log; 108 } 109 // Check values to avoid calculating log2 of 0. 110 if (scale == 0.0 || freq_log == 0) { 111 return 0; 112 } 113 // Determine the maximum notification frequency value currently supported. 114 // The largest mask value that the interpreter/C1 can handle is 115 // of length InvocationCounter::number_of_count_bits. Mask values are always 116 // one bit shorter then the value of the notification frequency. Set 117 // max_freq_bits accordingly. 118 intx max_freq_bits = InvocationCounter::number_of_count_bits + 1; 119 intx scaled_freq = scaled_compile_threshold((intx)1 << freq_log, scale); 120 if (scaled_freq == 0) { 121 // Return 0 right away to avoid calculating log2 of 0. 122 return 0; 123 } else if (scaled_freq > nth_bit(max_freq_bits)) { 124 return max_freq_bits; 125 } else { 126 return log2_intptr(scaled_freq); 127 } 128 } 129 130 #ifdef TIERED 131 void set_client_compilation_mode() { 132 Compilation_mode = CompMode_client; 133 CompLevel_highest_tier = CompLevel_simple; 134 FLAG_SET_ERGO(TieredCompilation, false); 135 FLAG_SET_ERGO(ProfileInterpreter, false); 136 #if INCLUDE_JVMCI 137 FLAG_SET_ERGO(EnableJVMCI, false); 138 FLAG_SET_ERGO(UseJVMCICompiler, false); 139 #endif 140 #if INCLUDE_AOT 141 FLAG_SET_ERGO(UseAOT, false); 142 #endif 143 if (FLAG_IS_DEFAULT(NeverActAsServerClassMachine)) { 144 FLAG_SET_ERGO(NeverActAsServerClassMachine, true); 145 } 146 if (FLAG_IS_DEFAULT(InitialCodeCacheSize)) { 147 FLAG_SET_ERGO(InitialCodeCacheSize, 160*K); 148 } 149 if (FLAG_IS_DEFAULT(ReservedCodeCacheSize)) { 150 FLAG_SET_ERGO(ReservedCodeCacheSize, 32*M); 151 } 152 if (FLAG_IS_DEFAULT(NonProfiledCodeHeapSize)) { 153 FLAG_SET_ERGO(NonProfiledCodeHeapSize, 27*M); 154 } 155 if (FLAG_IS_DEFAULT(ProfiledCodeHeapSize)) { 156 FLAG_SET_ERGO(ProfiledCodeHeapSize, 0); 157 } 158 if (FLAG_IS_DEFAULT(NonNMethodCodeHeapSize)) { 159 FLAG_SET_ERGO(NonNMethodCodeHeapSize, 5*M); 160 } 161 if (FLAG_IS_DEFAULT(CodeCacheExpansionSize)) { 162 FLAG_SET_ERGO(CodeCacheExpansionSize, 32*K); 163 } 164 if (FLAG_IS_DEFAULT(MetaspaceSize)) { 165 FLAG_SET_ERGO(MetaspaceSize, MIN2(12*M, MaxMetaspaceSize)); 166 } 167 if (FLAG_IS_DEFAULT(MaxRAM)) { 168 // Do not use FLAG_SET_ERGO to update MaxRAM, as this will impact 169 // heap setting done based on available phys_mem (see Arguments::set_heap_size). 170 FLAG_SET_DEFAULT(MaxRAM, 1ULL*G); 171 } 172 if (FLAG_IS_DEFAULT(CompileThreshold)) { 173 FLAG_SET_ERGO(CompileThreshold, 1500); 174 } 175 if (FLAG_IS_DEFAULT(OnStackReplacePercentage)) { 176 FLAG_SET_ERGO(OnStackReplacePercentage, 933); 177 } 178 if (FLAG_IS_DEFAULT(CICompilerCount)) { 179 FLAG_SET_ERGO(CICompilerCount, 1); 180 } 181 } 182 183 bool compilation_mode_selected() { 184 return !FLAG_IS_DEFAULT(TieredCompilation) || 185 !FLAG_IS_DEFAULT(TieredStopAtLevel) || 186 !FLAG_IS_DEFAULT(UseAOT) 187 JVMCI_ONLY(|| !FLAG_IS_DEFAULT(EnableJVMCI) 188 || !FLAG_IS_DEFAULT(UseJVMCICompiler)); 189 } 190 191 void select_compilation_mode_ergonomically() { 192 #if defined(_WINDOWS) && !defined(_LP64) 193 if (FLAG_IS_DEFAULT(NeverActAsServerClassMachine)) { 194 FLAG_SET_ERGO(NeverActAsServerClassMachine, true); 195 } 196 #endif 197 if (NeverActAsServerClassMachine) { 198 set_client_compilation_mode(); 199 } 200 } 201 202 203 void CompilerConfig::set_tiered_flags() { 204 // Increase the code cache size - tiered compiles a lot more. 205 if (FLAG_IS_DEFAULT(ReservedCodeCacheSize)) { 206 FLAG_SET_ERGO(ReservedCodeCacheSize, 207 MIN2(CODE_CACHE_DEFAULT_LIMIT, (size_t)ReservedCodeCacheSize * 5)); 208 } 209 // Enable SegmentedCodeCache if TieredCompilation is enabled, ReservedCodeCacheSize >= 240M 210 // and the code cache contains at least 8 pages (segmentation disables advantage of huge pages). 211 if (FLAG_IS_DEFAULT(SegmentedCodeCache) && ReservedCodeCacheSize >= 240*M && 212 8 * CodeCache::page_size() <= ReservedCodeCacheSize) { 213 FLAG_SET_ERGO(SegmentedCodeCache, true); 214 } 215 if (!UseInterpreter) { // -Xcomp 216 Tier3InvokeNotifyFreqLog = 0; 217 Tier4InvocationThreshold = 0; 218 } 219 220 if (CompileThresholdScaling < 0) { 221 vm_exit_during_initialization("Negative value specified for CompileThresholdScaling", NULL); 222 } 223 224 if (CompilationModeFlag::disable_intermediate()) { 225 if (FLAG_IS_DEFAULT(Tier0ProfilingStartPercentage)) { 226 FLAG_SET_DEFAULT(Tier0ProfilingStartPercentage, 33); 227 } 228 } 229 230 // Scale tiered compilation thresholds. 231 // CompileThresholdScaling == 0.0 is equivalent to -Xint and leaves compilation thresholds unchanged. 232 if (!FLAG_IS_DEFAULT(CompileThresholdScaling) && CompileThresholdScaling > 0.0) { 233 FLAG_SET_ERGO(Tier0InvokeNotifyFreqLog, scaled_freq_log(Tier0InvokeNotifyFreqLog)); 234 FLAG_SET_ERGO(Tier0BackedgeNotifyFreqLog, scaled_freq_log(Tier0BackedgeNotifyFreqLog)); 235 236 FLAG_SET_ERGO(Tier3InvocationThreshold, scaled_compile_threshold(Tier3InvocationThreshold)); 237 FLAG_SET_ERGO(Tier3MinInvocationThreshold, scaled_compile_threshold(Tier3MinInvocationThreshold)); 238 FLAG_SET_ERGO(Tier3CompileThreshold, scaled_compile_threshold(Tier3CompileThreshold)); 239 FLAG_SET_ERGO(Tier3BackEdgeThreshold, scaled_compile_threshold(Tier3BackEdgeThreshold)); 240 241 // Tier2{Invocation,MinInvocation,Compile,Backedge}Threshold should be scaled here 242 // once these thresholds become supported. 243 244 FLAG_SET_ERGO(Tier2InvokeNotifyFreqLog, scaled_freq_log(Tier2InvokeNotifyFreqLog)); 245 FLAG_SET_ERGO(Tier2BackedgeNotifyFreqLog, scaled_freq_log(Tier2BackedgeNotifyFreqLog)); 246 247 FLAG_SET_ERGO(Tier3InvokeNotifyFreqLog, scaled_freq_log(Tier3InvokeNotifyFreqLog)); 248 FLAG_SET_ERGO(Tier3BackedgeNotifyFreqLog, scaled_freq_log(Tier3BackedgeNotifyFreqLog)); 249 250 FLAG_SET_ERGO(Tier23InlineeNotifyFreqLog, scaled_freq_log(Tier23InlineeNotifyFreqLog)); 251 252 FLAG_SET_ERGO(Tier4InvocationThreshold, scaled_compile_threshold(Tier4InvocationThreshold)); 253 FLAG_SET_ERGO(Tier4MinInvocationThreshold, scaled_compile_threshold(Tier4MinInvocationThreshold)); 254 FLAG_SET_ERGO(Tier4CompileThreshold, scaled_compile_threshold(Tier4CompileThreshold)); 255 FLAG_SET_ERGO(Tier4BackEdgeThreshold, scaled_compile_threshold(Tier4BackEdgeThreshold)); 256 257 if (CompilationModeFlag::disable_intermediate()) { 258 FLAG_SET_ERGO(Tier40InvocationThreshold, scaled_compile_threshold(Tier40InvocationThreshold)); 259 FLAG_SET_ERGO(Tier40MinInvocationThreshold, scaled_compile_threshold(Tier40MinInvocationThreshold)); 260 FLAG_SET_ERGO(Tier40CompileThreshold, scaled_compile_threshold(Tier40CompileThreshold)); 261 FLAG_SET_ERGO(Tier40BackEdgeThreshold, scaled_compile_threshold(Tier40BackEdgeThreshold)); 262 } 263 264 #if INCLUDE_AOT 265 if (UseAOT) { 266 FLAG_SET_ERGO(Tier3AOTInvocationThreshold, scaled_compile_threshold(Tier3AOTInvocationThreshold)); 267 FLAG_SET_ERGO(Tier3AOTMinInvocationThreshold, scaled_compile_threshold(Tier3AOTMinInvocationThreshold)); 268 FLAG_SET_ERGO(Tier3AOTCompileThreshold, scaled_compile_threshold(Tier3AOTCompileThreshold)); 269 FLAG_SET_ERGO(Tier3AOTBackEdgeThreshold, scaled_compile_threshold(Tier3AOTBackEdgeThreshold)); 270 271 if (CompilationModeFlag::disable_intermediate()) { 272 FLAG_SET_ERGO(Tier0AOTInvocationThreshold, scaled_compile_threshold(Tier0AOTInvocationThreshold)); 273 FLAG_SET_ERGO(Tier0AOTMinInvocationThreshold, scaled_compile_threshold(Tier0AOTMinInvocationThreshold)); 274 FLAG_SET_ERGO(Tier0AOTCompileThreshold, scaled_compile_threshold(Tier0AOTCompileThreshold)); 275 FLAG_SET_ERGO(Tier0AOTBackEdgeThreshold, scaled_compile_threshold(Tier0AOTBackEdgeThreshold)); 276 } 277 } 278 #endif // INCLUDE_AOT 279 } 280 } 281 282 #endif // TIERED 283 284 #if INCLUDE_JVMCI 285 void set_jvmci_specific_flags() { 286 if (UseJVMCICompiler) { 287 Compilation_mode = CompMode_server; 288 289 if (FLAG_IS_DEFAULT(TypeProfileWidth)) { 290 FLAG_SET_DEFAULT(TypeProfileWidth, 8); 291 } 292 if (FLAG_IS_DEFAULT(TypeProfileLevel)) { 293 FLAG_SET_DEFAULT(TypeProfileLevel, 0); 294 } 295 296 if (UseJVMCINativeLibrary) { 297 // SVM compiled code requires more stack space 298 if (FLAG_IS_DEFAULT(CompilerThreadStackSize)) { 299 // Duplicate logic in the implementations of os::create_thread 300 // so that we can then double the computed stack size. Once 301 // the stack size requirements of SVM are better understood, 302 // this logic can be pushed down into os::create_thread. 303 int stack_size = CompilerThreadStackSize; 304 if (stack_size == 0) { 305 stack_size = VMThreadStackSize; 306 } 307 if (stack_size != 0) { 308 FLAG_SET_DEFAULT(CompilerThreadStackSize, stack_size * 2); 309 } 310 } 311 } else { 312 #ifdef TIERED 313 if (!TieredCompilation) { 314 warning("Disabling tiered compilation with non-native JVMCI compiler is not recommended. " 315 "Turning on tiered compilation and disabling intermediate compilation levels instead. "); 316 FLAG_SET_ERGO(TieredCompilation, true); 317 if (CompilationModeFlag::normal()) { 318 CompilationModeFlag::set_high_only_quick_internal(true); 319 } 320 if (CICompilerCount < 2 && CompilationModeFlag::quick_internal()) { 321 warning("Increasing number of compiler threads for JVMCI compiler."); 322 FLAG_SET_ERGO(CICompilerCount, 2); 323 } 324 } 325 #else // TIERED 326 // Adjust the on stack replacement percentage to avoid early 327 // OSR compilations while JVMCI itself is warming up 328 if (FLAG_IS_DEFAULT(OnStackReplacePercentage)) { 329 FLAG_SET_DEFAULT(OnStackReplacePercentage, 933); 330 } 331 #endif // !TIERED 332 // JVMCI needs values not less than defaults 333 if (FLAG_IS_DEFAULT(ReservedCodeCacheSize)) { 334 FLAG_SET_DEFAULT(ReservedCodeCacheSize, MAX2(64*M, ReservedCodeCacheSize)); 335 } 336 if (FLAG_IS_DEFAULT(InitialCodeCacheSize)) { 337 FLAG_SET_DEFAULT(InitialCodeCacheSize, MAX2(16*M, InitialCodeCacheSize)); 338 } 339 if (FLAG_IS_DEFAULT(MetaspaceSize)) { 340 FLAG_SET_DEFAULT(MetaspaceSize, MIN2(MAX2(12*M, MetaspaceSize), MaxMetaspaceSize)); 341 } 342 if (FLAG_IS_DEFAULT(NewSizeThreadIncrease)) { 343 FLAG_SET_DEFAULT(NewSizeThreadIncrease, MAX2(4*K, NewSizeThreadIncrease)); 344 } 345 } // !UseJVMCINativeLibrary 346 } // UseJVMCICompiler 347 } 348 #endif // INCLUDE_JVMCI 349 350 bool CompilerConfig::check_args_consistency(bool status) { 351 // Check lower bounds of the code cache 352 // Template Interpreter code is approximately 3X larger in debug builds. 353 uint min_code_cache_size = CodeCacheMinimumUseSpace DEBUG_ONLY(* 3); 354 if (ReservedCodeCacheSize < InitialCodeCacheSize) { 355 jio_fprintf(defaultStream::error_stream(), 356 "Invalid ReservedCodeCacheSize: %dK. Must be at least InitialCodeCacheSize=%dK.\n", 357 ReservedCodeCacheSize/K, InitialCodeCacheSize/K); 358 status = false; 359 } else if (ReservedCodeCacheSize < min_code_cache_size) { 360 jio_fprintf(defaultStream::error_stream(), 361 "Invalid ReservedCodeCacheSize=%dK. Must be at least %uK.\n", ReservedCodeCacheSize/K, 362 min_code_cache_size/K); 363 status = false; 364 } else if (ReservedCodeCacheSize > CODE_CACHE_SIZE_LIMIT) { 365 // Code cache size larger than CODE_CACHE_SIZE_LIMIT is not supported. 366 jio_fprintf(defaultStream::error_stream(), 367 "Invalid ReservedCodeCacheSize=%dM. Must be at most %uM.\n", ReservedCodeCacheSize/M, 368 CODE_CACHE_SIZE_LIMIT/M); 369 status = false; 370 } else if (NonNMethodCodeHeapSize < min_code_cache_size) { 371 jio_fprintf(defaultStream::error_stream(), 372 "Invalid NonNMethodCodeHeapSize=%dK. Must be at least %uK.\n", NonNMethodCodeHeapSize/K, 373 min_code_cache_size/K); 374 status = false; 375 } 376 377 #ifdef _LP64 378 if (!FLAG_IS_DEFAULT(CICompilerCount) && !FLAG_IS_DEFAULT(CICompilerCountPerCPU) && CICompilerCountPerCPU) { 379 warning("The VM option CICompilerCountPerCPU overrides CICompilerCount."); 380 } 381 #endif 382 383 if (BackgroundCompilation && ReplayCompiles) { 384 if (!FLAG_IS_DEFAULT(BackgroundCompilation)) { 385 warning("BackgroundCompilation disabled due to ReplayCompiles option."); 386 } 387 FLAG_SET_CMDLINE(BackgroundCompilation, false); 388 } 389 390 #ifdef COMPILER2 391 if (PostLoopMultiversioning && !RangeCheckElimination) { 392 if (!FLAG_IS_DEFAULT(PostLoopMultiversioning)) { 393 warning("PostLoopMultiversioning disabled because RangeCheckElimination is disabled."); 394 } 395 FLAG_SET_CMDLINE(PostLoopMultiversioning, false); 396 } 397 if (UseCountedLoopSafepoints && LoopStripMiningIter == 0) { 398 if (!FLAG_IS_DEFAULT(UseCountedLoopSafepoints) || !FLAG_IS_DEFAULT(LoopStripMiningIter)) { 399 warning("When counted loop safepoints are enabled, LoopStripMiningIter must be at least 1 (a safepoint every 1 iteration): setting it to 1"); 400 } 401 LoopStripMiningIter = 1; 402 } else if (!UseCountedLoopSafepoints && LoopStripMiningIter > 0) { 403 if (!FLAG_IS_DEFAULT(UseCountedLoopSafepoints) || !FLAG_IS_DEFAULT(LoopStripMiningIter)) { 404 warning("Disabling counted safepoints implies no loop strip mining: setting LoopStripMiningIter to 0"); 405 } 406 LoopStripMiningIter = 0; 407 } 408 #endif // COMPILER2 409 410 if (Arguments::is_interpreter_only()) { 411 if (UseCompiler) { 412 if (!FLAG_IS_DEFAULT(UseCompiler)) { 413 warning("UseCompiler disabled due to -Xint."); 414 } 415 FLAG_SET_CMDLINE(UseCompiler, false); 416 } 417 if (ProfileInterpreter) { 418 if (!FLAG_IS_DEFAULT(ProfileInterpreter)) { 419 warning("ProfileInterpreter disabled due to -Xint."); 420 } 421 FLAG_SET_CMDLINE(ProfileInterpreter, false); 422 } 423 if (TieredCompilation) { 424 if (!FLAG_IS_DEFAULT(TieredCompilation)) { 425 warning("TieredCompilation disabled due to -Xint."); 426 } 427 FLAG_SET_CMDLINE(TieredCompilation, false); 428 } 429 #if INCLUDE_JVMCI 430 if (EnableJVMCI) { 431 if (!FLAG_IS_DEFAULT(EnableJVMCI) || !FLAG_IS_DEFAULT(UseJVMCICompiler)) { 432 warning("JVMCI Compiler disabled due to -Xint."); 433 } 434 FLAG_SET_CMDLINE(EnableJVMCI, false); 435 FLAG_SET_CMDLINE(UseJVMCICompiler, false); 436 } 437 #endif 438 } else { 439 #if INCLUDE_JVMCI 440 status = status && JVMCIGlobals::check_jvmci_flags_are_consistent(); 441 #endif 442 } 443 return status; 444 } 445 446 void CompilerConfig::ergo_initialize() { 447 if (Arguments::is_interpreter_only()) { 448 return; // Nothing to do. 449 } 450 451 #ifdef TIERED 452 if (!compilation_mode_selected()) { 453 select_compilation_mode_ergonomically(); 454 } 455 #endif 456 457 #if INCLUDE_JVMCI 458 // Check that JVMCI compiler supports selested GC. 459 // Should be done after GCConfig::initialize() was called. 460 JVMCIGlobals::check_jvmci_supported_gc(); 461 462 // Do JVMCI specific settings 463 set_jvmci_specific_flags(); 464 #endif 465 466 #ifdef TIERED 467 if (TieredCompilation) { 468 set_tiered_flags(); 469 } else 470 #endif 471 { 472 // Scale CompileThreshold 473 // CompileThresholdScaling == 0.0 is equivalent to -Xint and leaves CompileThreshold unchanged. 474 if (!FLAG_IS_DEFAULT(CompileThresholdScaling) && CompileThresholdScaling > 0.0) { 475 FLAG_SET_ERGO(CompileThreshold, scaled_compile_threshold(CompileThreshold)); 476 } 477 } 478 479 if (UseOnStackReplacement && !UseLoopCounter) { 480 warning("On-stack-replacement requires loop counters; enabling loop counters"); 481 FLAG_SET_DEFAULT(UseLoopCounter, true); 482 } 483 484 #ifdef COMPILER2 485 if (!EliminateLocks) { 486 EliminateNestedLocks = false; 487 } 488 if (!Inline) { 489 IncrementalInline = false; 490 } 491 #ifndef PRODUCT 492 if (!IncrementalInline) { 493 AlwaysIncrementalInline = false; 494 } 495 if (PrintIdealGraphLevel > 0) { 496 FLAG_SET_ERGO(PrintIdealGraph, true); 497 } 498 #endif 499 if (!UseTypeSpeculation && FLAG_IS_DEFAULT(TypeProfileLevel)) { 500 // nothing to use the profiling, turn if off 501 FLAG_SET_DEFAULT(TypeProfileLevel, 0); 502 } 503 if (!FLAG_IS_DEFAULT(OptoLoopAlignment) && FLAG_IS_DEFAULT(MaxLoopPad)) { 504 FLAG_SET_DEFAULT(MaxLoopPad, OptoLoopAlignment-1); 505 } 506 if (FLAG_IS_DEFAULT(LoopStripMiningIterShortLoop)) { 507 // blind guess 508 LoopStripMiningIterShortLoop = LoopStripMiningIter / 10; 509 } 510 #endif // COMPILER2 511 }