1 /* 2 * Copyright (c) 1999, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "opto/c2compiler.hpp" 27 #include "opto/compile.hpp" 28 #include "opto/optoreg.hpp" 29 #include "opto/output.hpp" 30 #include "opto/runtime.hpp" 31 32 // register information defined by ADLC 33 extern const char register_save_policy[]; 34 extern const int register_save_type[]; 35 36 const char* C2Compiler::retry_no_subsuming_loads() { 37 return "retry without subsuming loads"; 38 } 39 const char* C2Compiler::retry_no_escape_analysis() { 40 return "retry without escape analysis"; 41 } 42 const char* C2Compiler::retry_class_loading_during_parsing() { 43 return "retry class loading during parsing"; 44 } 45 bool C2Compiler::init_c2_runtime() { 46 47 // Check assumptions used while running ADLC 48 Compile::adlc_verification(); 49 assert(REG_COUNT <= ConcreteRegisterImpl::number_of_registers, "incompatible register counts"); 50 51 for (int i = 0; i < ConcreteRegisterImpl::number_of_registers ; i++ ) { 52 OptoReg::vm2opto[i] = OptoReg::Bad; 53 } 54 55 for( OptoReg::Name i=OptoReg::Name(0); i<OptoReg::Name(REG_COUNT); i = OptoReg::add(i,1) ) { 56 VMReg r = OptoReg::as_VMReg(i); 57 if (r->is_valid()) { 58 OptoReg::vm2opto[r->value()] = i; 59 } 60 } 61 62 // Check that runtime and architecture description agree on callee-saved-floats 63 bool callee_saved_floats = false; 64 for( OptoReg::Name i=OptoReg::Name(0); i<OptoReg::Name(_last_Mach_Reg); i = OptoReg::add(i,1) ) { 65 // Is there a callee-saved float or double? 66 if( register_save_policy[i] == 'E' /* callee-saved */ && 67 (register_save_type[i] == Op_RegF || register_save_type[i] == Op_RegD) ) { 68 callee_saved_floats = true; 69 } 70 } 71 72 DEBUG_ONLY( Node::init_NodeProperty(); ) 73 74 Compile::pd_compiler2_init(); 75 76 CompilerThread* thread = CompilerThread::current(); 77 78 HandleMark handle_mark(thread); 79 return OptoRuntime::generate(thread->env()); 80 } 81 82 void C2Compiler::initialize() { 83 // The first compiler thread that gets here will initialize the 84 // small amount of global state (and runtime stubs) that C2 needs. 85 86 // There is a race possible once at startup and then we're fine 87 88 // Note that this is being called from a compiler thread not the 89 // main startup thread. 90 if (should_perform_init()) { 91 bool successful = C2Compiler::init_c2_runtime(); 92 int new_state = (successful) ? initialized : failed; 93 set_state(new_state); 94 } 95 } 96 97 void C2Compiler::compile_method(ciEnv* env, ciMethod* target, int entry_bci, DirectiveSet* directive) { 98 assert(is_initialized(), "Compiler thread must be initialized"); 99 100 bool subsume_loads = SubsumeLoads; 101 bool do_escape_analysis = DoEscapeAnalysis && !env->should_retain_local_variables(); 102 bool eliminate_boxing = EliminateAutoBox; 103 104 while (!env->failing()) { 105 // Attempt to compile while subsuming loads into machine instructions. 106 Compile C(env, this, target, entry_bci, subsume_loads, do_escape_analysis, eliminate_boxing, directive); 107 108 // Check result and retry if appropriate. 109 if (C.failure_reason() != NULL) { 110 if (C.failure_reason_is(retry_class_loading_during_parsing())) { 111 env->report_failure(C.failure_reason()); 112 continue; // retry 113 } 114 if (C.failure_reason_is(retry_no_subsuming_loads())) { 115 assert(subsume_loads, "must make progress"); 116 subsume_loads = false; 117 env->report_failure(C.failure_reason()); 118 continue; // retry 119 } 120 if (C.failure_reason_is(retry_no_escape_analysis())) { 121 assert(do_escape_analysis, "must make progress"); 122 do_escape_analysis = false; 123 env->report_failure(C.failure_reason()); 124 continue; // retry 125 } 126 if (C.has_boxed_value()) { 127 // Recompile without boxing elimination regardless failure reason. 128 assert(eliminate_boxing, "must make progress"); 129 eliminate_boxing = false; 130 env->report_failure(C.failure_reason()); 131 continue; // retry 132 } 133 // Pass any other failure reason up to the ciEnv. 134 // Note that serious, irreversible failures are already logged 135 // on the ciEnv via env->record_method_not_compilable(). 136 env->record_failure(C.failure_reason()); 137 } 138 if (StressRecompilation) { 139 if (subsume_loads) { 140 subsume_loads = false; 141 continue; // retry 142 } 143 if (do_escape_analysis) { 144 do_escape_analysis = false; 145 continue; // retry 146 } 147 } 148 149 // print inlining for last compilation only 150 C.dump_print_inlining(); 151 152 // No retry; just break the loop. 153 break; 154 } 155 } 156 157 void C2Compiler::print_timers() { 158 Compile::print_timers(); 159 } 160 161 bool C2Compiler::is_intrinsic_supported(methodHandle method, bool is_virtual) { 162 vmIntrinsics::ID id = method->intrinsic_id(); 163 assert(id != vmIntrinsics::_none, "must be a VM intrinsic"); 164 165 if (id < vmIntrinsics::FIRST_ID || id > vmIntrinsics::LAST_COMPILER_INLINE) { 166 return false; 167 } 168 169 // Only Object.hashCode and Object.clone intrinsics implement also a virtual 170 // dispatch because calling both methods is expensive but both methods are 171 // frequently overridden. All other intrinsics implement only a non-virtual 172 // dispatch. 173 if (is_virtual) { 174 switch (id) { 175 case vmIntrinsics::_hashCode: 176 case vmIntrinsics::_clone: 177 break; 178 default: 179 return false; 180 } 181 } 182 183 switch (id) { 184 case vmIntrinsics::_compareTo: 185 if (!Matcher::match_rule_supported(Op_StrComp)) return false; 186 break; 187 case vmIntrinsics::_equals: 188 if (!Matcher::match_rule_supported(Op_StrEquals)) return false; 189 break; 190 case vmIntrinsics::_equalsC: 191 if (!Matcher::match_rule_supported(Op_AryEq)) return false; 192 break; 193 case vmIntrinsics::_copyMemory: 194 if (StubRoutines::unsafe_arraycopy() == NULL) return false; 195 break; 196 case vmIntrinsics::_encodeISOArray: 197 if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return false; 198 break; 199 case vmIntrinsics::_bitCount_i: 200 if (!Matcher::match_rule_supported(Op_PopCountI)) return false; 201 break; 202 case vmIntrinsics::_bitCount_l: 203 if (!Matcher::match_rule_supported(Op_PopCountL)) return false; 204 break; 205 case vmIntrinsics::_numberOfLeadingZeros_i: 206 if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return false; 207 break; 208 case vmIntrinsics::_numberOfLeadingZeros_l: 209 if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return false; 210 break; 211 case vmIntrinsics::_numberOfTrailingZeros_i: 212 if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return false; 213 break; 214 case vmIntrinsics::_numberOfTrailingZeros_l: 215 if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return false; 216 break; 217 case vmIntrinsics::_reverseBytes_c: 218 if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false; 219 break; 220 case vmIntrinsics::_reverseBytes_s: 221 if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return false; 222 break; 223 case vmIntrinsics::_reverseBytes_i: 224 if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return false; 225 break; 226 case vmIntrinsics::_reverseBytes_l: 227 if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return false; 228 break; 229 case vmIntrinsics::_compareAndSwapObject: 230 #ifdef _LP64 231 if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return false; 232 #endif 233 break; 234 case vmIntrinsics::_compareAndSwapLong: 235 if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return false; 236 break; 237 case vmIntrinsics::_getAndAddInt: 238 if (!Matcher::match_rule_supported(Op_GetAndAddI)) return false; 239 break; 240 case vmIntrinsics::_getAndAddLong: 241 if (!Matcher::match_rule_supported(Op_GetAndAddL)) return false; 242 break; 243 case vmIntrinsics::_getAndSetInt: 244 if (!Matcher::match_rule_supported(Op_GetAndSetI)) return false; 245 break; 246 case vmIntrinsics::_getAndSetLong: 247 if (!Matcher::match_rule_supported(Op_GetAndSetL)) return false; 248 break; 249 case vmIntrinsics::_getAndSetObject: 250 #ifdef _LP64 251 if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return false; 252 if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return false; 253 break; 254 #else 255 if (!Matcher::match_rule_supported(Op_GetAndSetP)) return false; 256 break; 257 #endif 258 case vmIntrinsics::_incrementExactI: 259 case vmIntrinsics::_addExactI: 260 if (!Matcher::match_rule_supported(Op_OverflowAddI)) return false; 261 break; 262 case vmIntrinsics::_incrementExactL: 263 case vmIntrinsics::_addExactL: 264 if (!Matcher::match_rule_supported(Op_OverflowAddL)) return false; 265 break; 266 case vmIntrinsics::_decrementExactI: 267 case vmIntrinsics::_subtractExactI: 268 if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false; 269 break; 270 case vmIntrinsics::_decrementExactL: 271 case vmIntrinsics::_subtractExactL: 272 if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false; 273 break; 274 case vmIntrinsics::_negateExactI: 275 if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false; 276 break; 277 case vmIntrinsics::_negateExactL: 278 if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false; 279 break; 280 case vmIntrinsics::_multiplyExactI: 281 if (!Matcher::match_rule_supported(Op_OverflowMulI)) return false; 282 break; 283 case vmIntrinsics::_multiplyExactL: 284 if (!Matcher::match_rule_supported(Op_OverflowMulL)) return false; 285 break; 286 case vmIntrinsics::_getCallerClass: 287 if (SystemDictionary::reflect_CallerSensitive_klass() == NULL) return false; 288 break; 289 case vmIntrinsics::_hashCode: 290 case vmIntrinsics::_identityHashCode: 291 case vmIntrinsics::_getClass: 292 case vmIntrinsics::_dsin: 293 case vmIntrinsics::_dcos: 294 case vmIntrinsics::_dtan: 295 case vmIntrinsics::_dabs: 296 case vmIntrinsics::_datan2: 297 case vmIntrinsics::_dsqrt: 298 case vmIntrinsics::_dexp: 299 case vmIntrinsics::_dlog: 300 case vmIntrinsics::_dlog10: 301 case vmIntrinsics::_dpow: 302 case vmIntrinsics::_min: 303 case vmIntrinsics::_max: 304 case vmIntrinsics::_arraycopy: 305 case vmIntrinsics::_indexOf: 306 case vmIntrinsics::_getObject: 307 case vmIntrinsics::_getBoolean: 308 case vmIntrinsics::_getByte: 309 case vmIntrinsics::_getShort: 310 case vmIntrinsics::_getChar: 311 case vmIntrinsics::_getInt: 312 case vmIntrinsics::_getLong: 313 case vmIntrinsics::_getFloat: 314 case vmIntrinsics::_getDouble: 315 case vmIntrinsics::_putObject: 316 case vmIntrinsics::_putBoolean: 317 case vmIntrinsics::_putByte: 318 case vmIntrinsics::_putShort: 319 case vmIntrinsics::_putChar: 320 case vmIntrinsics::_putInt: 321 case vmIntrinsics::_putLong: 322 case vmIntrinsics::_putFloat: 323 case vmIntrinsics::_putDouble: 324 case vmIntrinsics::_getByte_raw: 325 case vmIntrinsics::_getShort_raw: 326 case vmIntrinsics::_getChar_raw: 327 case vmIntrinsics::_getInt_raw: 328 case vmIntrinsics::_getLong_raw: 329 case vmIntrinsics::_getFloat_raw: 330 case vmIntrinsics::_getDouble_raw: 331 case vmIntrinsics::_getAddress_raw: 332 case vmIntrinsics::_putByte_raw: 333 case vmIntrinsics::_putShort_raw: 334 case vmIntrinsics::_putChar_raw: 335 case vmIntrinsics::_putInt_raw: 336 case vmIntrinsics::_putLong_raw: 337 case vmIntrinsics::_putFloat_raw: 338 case vmIntrinsics::_putDouble_raw: 339 case vmIntrinsics::_putAddress_raw: 340 case vmIntrinsics::_getObjectVolatile: 341 case vmIntrinsics::_getBooleanVolatile: 342 case vmIntrinsics::_getByteVolatile: 343 case vmIntrinsics::_getShortVolatile: 344 case vmIntrinsics::_getCharVolatile: 345 case vmIntrinsics::_getIntVolatile: 346 case vmIntrinsics::_getLongVolatile: 347 case vmIntrinsics::_getFloatVolatile: 348 case vmIntrinsics::_getDoubleVolatile: 349 case vmIntrinsics::_putObjectVolatile: 350 case vmIntrinsics::_putBooleanVolatile: 351 case vmIntrinsics::_putByteVolatile: 352 case vmIntrinsics::_putShortVolatile: 353 case vmIntrinsics::_putCharVolatile: 354 case vmIntrinsics::_putIntVolatile: 355 case vmIntrinsics::_putLongVolatile: 356 case vmIntrinsics::_putFloatVolatile: 357 case vmIntrinsics::_putDoubleVolatile: 358 case vmIntrinsics::_getShortUnaligned: 359 case vmIntrinsics::_getCharUnaligned: 360 case vmIntrinsics::_getIntUnaligned: 361 case vmIntrinsics::_getLongUnaligned: 362 case vmIntrinsics::_putShortUnaligned: 363 case vmIntrinsics::_putCharUnaligned: 364 case vmIntrinsics::_putIntUnaligned: 365 case vmIntrinsics::_putLongUnaligned: 366 case vmIntrinsics::_compareAndSwapInt: 367 case vmIntrinsics::_putOrderedObject: 368 case vmIntrinsics::_putOrderedInt: 369 case vmIntrinsics::_putOrderedLong: 370 case vmIntrinsics::_loadFence: 371 case vmIntrinsics::_storeFence: 372 case vmIntrinsics::_fullFence: 373 case vmIntrinsics::_currentThread: 374 case vmIntrinsics::_isInterrupted: 375 #ifdef TRACE_HAVE_INTRINSICS 376 case vmIntrinsics::_classID: 377 case vmIntrinsics::_threadID: 378 case vmIntrinsics::_counterTime: 379 #endif 380 case vmIntrinsics::_currentTimeMillis: 381 case vmIntrinsics::_nanoTime: 382 case vmIntrinsics::_allocateInstance: 383 case vmIntrinsics::_newArray: 384 case vmIntrinsics::_getLength: 385 case vmIntrinsics::_copyOf: 386 case vmIntrinsics::_copyOfRange: 387 case vmIntrinsics::_clone: 388 case vmIntrinsics::_isAssignableFrom: 389 case vmIntrinsics::_isInstance: 390 case vmIntrinsics::_getModifiers: 391 case vmIntrinsics::_isInterface: 392 case vmIntrinsics::_isArray: 393 case vmIntrinsics::_isPrimitive: 394 case vmIntrinsics::_getSuperclass: 395 case vmIntrinsics::_getClassAccessFlags: 396 case vmIntrinsics::_floatToRawIntBits: 397 case vmIntrinsics::_floatToIntBits: 398 case vmIntrinsics::_intBitsToFloat: 399 case vmIntrinsics::_doubleToRawLongBits: 400 case vmIntrinsics::_doubleToLongBits: 401 case vmIntrinsics::_longBitsToDouble: 402 case vmIntrinsics::_Reference_get: 403 case vmIntrinsics::_Class_cast: 404 case vmIntrinsics::_aescrypt_encryptBlock: 405 case vmIntrinsics::_aescrypt_decryptBlock: 406 case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt: 407 case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt: 408 case vmIntrinsics::_sha_implCompress: 409 case vmIntrinsics::_sha2_implCompress: 410 case vmIntrinsics::_sha5_implCompress: 411 case vmIntrinsics::_digestBase_implCompressMB: 412 case vmIntrinsics::_multiplyToLen: 413 case vmIntrinsics::_squareToLen: 414 case vmIntrinsics::_mulAdd: 415 case vmIntrinsics::_montgomeryMultiply: 416 case vmIntrinsics::_montgomerySquare: 417 case vmIntrinsics::_ghash_processBlocks: 418 case vmIntrinsics::_updateCRC32: 419 case vmIntrinsics::_updateBytesCRC32: 420 case vmIntrinsics::_updateByteBufferCRC32: 421 case vmIntrinsics::_updateBytesCRC32C: 422 case vmIntrinsics::_updateDirectByteBufferCRC32C: 423 case vmIntrinsics::_updateBytesAdler32: 424 case vmIntrinsics::_updateByteBufferAdler32: 425 case vmIntrinsics::_profileBoolean: 426 case vmIntrinsics::_isCompileConstant: 427 break; 428 default: 429 return false; 430 } 431 return true; 432 } 433 434 int C2Compiler::initial_code_buffer_size() { 435 assert(SegmentedCodeCache, "Should be only used with a segmented code cache"); 436 return Compile::MAX_inst_size + Compile::MAX_locs_size + initial_const_capacity; 437 }