src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8130832 Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page




 287   Node* get_state_from_sha_object(Node *sha_object);
 288   Node* get_state_from_sha5_object(Node *sha_object);
 289   Node* inline_digestBase_implCompressMB_predicate(int predicate);
 290   bool inline_encodeISOArray();
 291   bool inline_updateCRC32();
 292   bool inline_updateBytesCRC32();
 293   bool inline_updateByteBufferCRC32();
 294   Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
 295   bool inline_updateBytesCRC32C();
 296   bool inline_updateDirectByteBufferCRC32C();
 297   bool inline_multiplyToLen();
 298   bool inline_squareToLen();
 299   bool inline_mulAdd();
 300   bool inline_montgomeryMultiply();
 301   bool inline_montgomerySquare();
 302 
 303   bool inline_profileBoolean();
 304   bool inline_isCompileConstant();
 305 };
 306 
 307 
 308 //---------------------------make_vm_intrinsic----------------------------
 309 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
 310   vmIntrinsics::ID id = m->intrinsic_id();
 311   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 312 
 313   ccstr disable_intr = NULL;
 314 
 315   if ((DisableIntrinsic[0] != '\0'
 316        && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
 317       (method_has_option_value("DisableIntrinsic", disable_intr)
 318        && strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)) {
 319     // disabled by a user request on the command line:
 320     // example: -XX:DisableIntrinsic=_hashCode,_getClass
 321     return NULL;










 322   }

 323 
 324   if (!m->is_loaded()) {
 325     // do not attempt to inline unloaded methods
 326     return NULL;

























 327   }
 328 
 329   // Only a few intrinsics implement a virtual dispatch.
 330   // They are expensive calls which are also frequently overridden.


 331   if (is_virtual) {
 332     switch (id) {
 333     case vmIntrinsics::_hashCode:
 334     case vmIntrinsics::_clone:
 335       // OK, Object.hashCode and Object.clone intrinsics come in both flavors
 336       break;
 337     default:
 338       return NULL;
 339     }
 340   }
 341 
 342   // -XX:-InlineNatives disables nearly all intrinsics:

 343   if (!InlineNatives) {
 344     switch (id) {
 345     case vmIntrinsics::_indexOf:
 346     case vmIntrinsics::_compareTo:
 347     case vmIntrinsics::_equals:
 348     case vmIntrinsics::_equalsC:
 349     case vmIntrinsics::_getAndAddInt:
 350     case vmIntrinsics::_getAndAddLong:
 351     case vmIntrinsics::_getAndSetInt:
 352     case vmIntrinsics::_getAndSetLong:
 353     case vmIntrinsics::_getAndSetObject:
 354     case vmIntrinsics::_loadFence:
 355     case vmIntrinsics::_storeFence:
 356     case vmIntrinsics::_fullFence:
 357       break;  // InlineNatives does not control String.compareTo
 358     case vmIntrinsics::_Reference_get:
 359       break;  // InlineNatives does not control Reference.get
 360     default:
 361       return NULL;
 362     }
 363   }
 364 
 365   int predicates = 0;
 366   bool does_virtual_dispatch = false;
 367 
 368   switch (id) {
 369   case vmIntrinsics::_compareTo:
 370     if (!SpecialStringCompareTo)  return NULL;
 371     if (!Matcher::match_rule_supported(Op_StrComp))  return NULL;
 372     break;
 373   case vmIntrinsics::_indexOf:
 374     if (!SpecialStringIndexOf)  return NULL;
 375     break;
 376   case vmIntrinsics::_equals:
 377     if (!SpecialStringEquals)  return NULL;
 378     if (!Matcher::match_rule_supported(Op_StrEquals))  return NULL;
 379     break;
 380   case vmIntrinsics::_equalsC:
 381     if (!SpecialArraysEquals)  return NULL;
 382     if (!Matcher::match_rule_supported(Op_AryEq))  return NULL;
 383     break;
 384   case vmIntrinsics::_arraycopy:
 385     if (!InlineArrayCopy)  return NULL;
 386     break;
 387   case vmIntrinsics::_copyMemory:
 388     if (StubRoutines::unsafe_arraycopy() == NULL)  return NULL;
 389     if (!InlineArrayCopy)  return NULL;
 390     break;
 391   case vmIntrinsics::_hashCode:
 392     if (!InlineObjectHash)  return NULL;
 393     does_virtual_dispatch = true;
 394     break;
 395   case vmIntrinsics::_clone:
 396     does_virtual_dispatch = true;
 397   case vmIntrinsics::_copyOf:
 398   case vmIntrinsics::_copyOfRange:
 399     if (!InlineObjectCopy)  return NULL;
 400     // These also use the arraycopy intrinsic mechanism:
 401     if (!InlineArrayCopy)  return NULL;
 402     break;
 403   case vmIntrinsics::_encodeISOArray:
 404     if (!SpecialEncodeISOArray)  return NULL;
 405     if (!Matcher::match_rule_supported(Op_EncodeISOArray))  return NULL;
 406     break;
 407   case vmIntrinsics::_checkIndex:
 408     // We do not intrinsify this.  The optimizer does fine with it.
 409     return NULL;
 410 
 411   case vmIntrinsics::_getCallerClass:
 412     if (!InlineReflectionGetCallerClass)  return NULL;
 413     if (SystemDictionary::reflect_CallerSensitive_klass() == NULL)  return NULL;


 414     break;
 415 
 416   case vmIntrinsics::_bitCount_i:
 417     if (!Matcher::match_rule_supported(Op_PopCountI)) return NULL;
 418     break;
 419 
 420   case vmIntrinsics::_bitCount_l:
 421     if (!Matcher::match_rule_supported(Op_PopCountL)) return NULL;
 422     break;
 423 
 424   case vmIntrinsics::_numberOfLeadingZeros_i:
 425     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return NULL;
 426     break;
 427 
 428   case vmIntrinsics::_numberOfLeadingZeros_l:
 429     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return NULL;
 430     break;
 431 
 432   case vmIntrinsics::_numberOfTrailingZeros_i:
 433     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return NULL;
 434     break;
 435 
 436   case vmIntrinsics::_numberOfTrailingZeros_l:
 437     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL;
 438     break;
 439 
 440   case vmIntrinsics::_reverseBytes_c:
 441     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return NULL;
 442     break;
 443   case vmIntrinsics::_reverseBytes_s:
 444     if (!Matcher::match_rule_supported(Op_ReverseBytesS))  return NULL;
 445     break;
 446   case vmIntrinsics::_reverseBytes_i:
 447     if (!Matcher::match_rule_supported(Op_ReverseBytesI))  return NULL;
 448     break;
 449   case vmIntrinsics::_reverseBytes_l:
 450     if (!Matcher::match_rule_supported(Op_ReverseBytesL))  return NULL;
 451     break;
 452 
 453   case vmIntrinsics::_Reference_get:
 454     // Use the intrinsic version of Reference.get() so that the value in
 455     // the referent field can be registered by the G1 pre-barrier code.
 456     // Also add memory barrier to prevent commoning reads from this field
 457     // across safepoint since GC can change it value.
 458     break;
 459 
 460   case vmIntrinsics::_compareAndSwapObject:
 461 #ifdef _LP64
 462     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return NULL;
 463 #endif
 464     break;
 465 
 466   case vmIntrinsics::_compareAndSwapLong:
 467     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return NULL;
 468     break;
 469 
 470   case vmIntrinsics::_getAndAddInt:
 471     if (!Matcher::match_rule_supported(Op_GetAndAddI)) return NULL;
 472     break;
 473 
 474   case vmIntrinsics::_getAndAddLong:
 475     if (!Matcher::match_rule_supported(Op_GetAndAddL)) return NULL;
 476     break;
 477 
 478   case vmIntrinsics::_getAndSetInt:
 479     if (!Matcher::match_rule_supported(Op_GetAndSetI)) return NULL;
 480     break;
 481 
 482   case vmIntrinsics::_getAndSetLong:
 483     if (!Matcher::match_rule_supported(Op_GetAndSetL)) return NULL;
 484     break;
 485 
 486   case vmIntrinsics::_getAndSetObject:
 487 #ifdef _LP64
 488     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
 489     if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return NULL;
 490     break;
 491 #else
 492     if (!Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
 493     break;
 494 #endif
 495 
 496   case vmIntrinsics::_aescrypt_encryptBlock:
 497   case vmIntrinsics::_aescrypt_decryptBlock:
 498     if (!UseAESIntrinsics) return NULL;
 499     break;
 500 
 501   case vmIntrinsics::_multiplyToLen:
 502     if (!UseMultiplyToLenIntrinsic) return NULL;
 503     break;
 504 
 505   case vmIntrinsics::_squareToLen:
 506     if (!UseSquareToLenIntrinsic) return NULL;
 507     break;
 508 
 509   case vmIntrinsics::_mulAdd:
 510     if (!UseMulAddIntrinsic) return NULL;
 511     break;
 512 
 513   case vmIntrinsics::_montgomeryMultiply:
 514      if (!UseMontgomeryMultiplyIntrinsic) return NULL;
 515     break;
 516   case vmIntrinsics::_montgomerySquare:
 517      if (!UseMontgomerySquareIntrinsic) return NULL;
 518     break;
 519 
 520   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 521   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 522     if (!UseAESIntrinsics) return NULL;
 523     // these two require the predicated logic
 524     predicates = 1;
 525     break;
 526 
 527   case vmIntrinsics::_sha_implCompress:
 528     if (!UseSHA1Intrinsics) return NULL;
 529     break;
 530 
 531   case vmIntrinsics::_sha2_implCompress:
 532     if (!UseSHA256Intrinsics) return NULL;
 533     break;
 534 
 535   case vmIntrinsics::_sha5_implCompress:
 536     if (!UseSHA512Intrinsics) return NULL;
 537     break;
 538 
 539   case vmIntrinsics::_digestBase_implCompressMB:
 540     if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return NULL;
 541     predicates = 3;
 542     break;
 543 
 544   case vmIntrinsics::_ghash_processBlocks:
 545     if (!UseGHASHIntrinsics) return NULL;
 546     break;
 547 
 548   case vmIntrinsics::_updateCRC32:
 549   case vmIntrinsics::_updateBytesCRC32:
 550   case vmIntrinsics::_updateByteBufferCRC32:
 551     if (!UseCRC32Intrinsics) return NULL;
 552     break;
 553 
 554   case vmIntrinsics::_updateBytesCRC32C:
 555   case vmIntrinsics::_updateDirectByteBufferCRC32C:
 556     if (!UseCRC32CIntrinsics) return NULL;
 557     break;
 558 
 559   case vmIntrinsics::_incrementExactI:
 560   case vmIntrinsics::_addExactI:
 561     if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL;
 562     break;
 563   case vmIntrinsics::_incrementExactL:
 564   case vmIntrinsics::_addExactL:
 565     if (!Matcher::match_rule_supported(Op_OverflowAddL) || !UseMathExactIntrinsics) return NULL;
 566     break;
 567   case vmIntrinsics::_decrementExactI:
 568   case vmIntrinsics::_subtractExactI:
 569     if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;

 570     break;
 571   case vmIntrinsics::_decrementExactL:
 572   case vmIntrinsics::_subtractExactL:
 573     if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
 574     break;
 575   case vmIntrinsics::_negateExactI:
 576     if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
 577     break;
 578   case vmIntrinsics::_negateExactL:
 579     if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
 580     break;
 581   case vmIntrinsics::_multiplyExactI:
 582     if (!Matcher::match_rule_supported(Op_OverflowMulI) || !UseMathExactIntrinsics) return NULL;
 583     break;
 584   case vmIntrinsics::_multiplyExactL:
 585     if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return NULL;
 586     break;
 587 
 588   case vmIntrinsics::_getShortUnaligned:
 589   case vmIntrinsics::_getCharUnaligned:
 590   case vmIntrinsics::_getIntUnaligned:
 591   case vmIntrinsics::_getLongUnaligned:
 592   case vmIntrinsics::_putShortUnaligned:
 593   case vmIntrinsics::_putCharUnaligned:
 594   case vmIntrinsics::_putIntUnaligned:
 595   case vmIntrinsics::_putLongUnaligned:
 596     if (!UseUnalignedAccesses) return NULL;
 597     break;
 598 
 599  default:
 600     assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
 601     assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
 602     break;
 603   }
 604 
 605   // -XX:-InlineClassNatives disables natives from the Class class.
 606   // The flag applies to all reflective calls, notably Array.newArray
 607   // (visible to Java programmers as Array.newInstance).
 608   if (m->holder()->name() == ciSymbol::java_lang_Class() ||
 609       m->holder()->name() == ciSymbol::java_lang_reflect_Array()) {
 610     if (!InlineClassNatives)  return NULL;
 611   }
 612 
 613   // -XX:-InlineThreadNatives disables natives from the Thread class.
 614   if (m->holder()->name() == ciSymbol::java_lang_Thread()) {
 615     if (!InlineThreadNatives)  return NULL;
 616   }
 617 
 618   // -XX:-InlineMathNatives disables natives from the Math,Float and Double classes.
 619   if (m->holder()->name() == ciSymbol::java_lang_Math() ||
 620       m->holder()->name() == ciSymbol::java_lang_Float() ||
 621       m->holder()->name() == ciSymbol::java_lang_Double()) {
 622     if (!InlineMathNatives)  return NULL;
 623   }
 624 
 625   // -XX:-InlineUnsafeOps disables natives from the Unsafe class.
 626   if (m->holder()->name() == ciSymbol::sun_misc_Unsafe()) {
 627     if (!InlineUnsafeOps)  return NULL;













 628   }
 629 
 630   return new LibraryIntrinsic(m, is_virtual, predicates, does_virtual_dispatch, (vmIntrinsics::ID) id);











 631 }
 632 
 633 //----------------------register_library_intrinsics-----------------------
 634 // Initialize this file's data structures, for each Compile instance.
 635 void Compile::register_library_intrinsics() {
 636   // Nothing to do here.
 637 }
 638 
 639 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
 640   LibraryCallKit kit(jvms, this);
 641   Compile* C = kit.C;
 642   int nodes = C->unique();
 643 #ifndef PRODUCT
 644   if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
 645     char buf[1000];
 646     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 647     tty->print_cr("Intrinsic %s", str);
 648   }
 649 #endif
 650   ciMethod* callee = kit.callee();




 287   Node* get_state_from_sha_object(Node *sha_object);
 288   Node* get_state_from_sha5_object(Node *sha_object);
 289   Node* inline_digestBase_implCompressMB_predicate(int predicate);
 290   bool inline_encodeISOArray();
 291   bool inline_updateCRC32();
 292   bool inline_updateBytesCRC32();
 293   bool inline_updateByteBufferCRC32();
 294   Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
 295   bool inline_updateBytesCRC32C();
 296   bool inline_updateDirectByteBufferCRC32C();
 297   bool inline_multiplyToLen();
 298   bool inline_squareToLen();
 299   bool inline_mulAdd();
 300   bool inline_montgomeryMultiply();
 301   bool inline_montgomerySquare();
 302 
 303   bool inline_profileBoolean();
 304   bool inline_isCompileConstant();
 305 };
 306 
 307 bool Compile::intrinsic_does_virtual_dispatch_for(Method* method) {
 308   vmIntrinsics::ID id = method->intrinsic_id();


 309   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 310   switch(id) {
 311   case vmIntrinsics::_hashCode:
 312   case vmIntrinsics::_clone:
 313     return true;
 314     break;
 315   default:
 316     return false;
 317   }
 318 }
 319 int Compile::intrinsic_predicates_needed_for(Method* method) {
 320   vmIntrinsics::ID id = method->intrinsic_id();
 321   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 322   switch (id) {
 323   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 324   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 325     return 1;
 326   case vmIntrinsics::_digestBase_implCompressMB:
 327     return 3;
 328   default:
 329     return 0;
 330   }
 331 }
 332 
 333 bool Compile::is_intrinsic_available_for(Method* method, Method* compilation_context, bool is_virtual) {
 334   vmIntrinsics::ID id = method->intrinsic_id();
 335   if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::LAST_COMPILER_INLINE) {
 336     return false;
 337   }
 338 
 339   // Check if the intrinsic corresponding to 'method' has been disabled on
 340   // the command line by using the DisableIntrinsic flag (either globally
 341   // or on a per-method level, see src/share/vm/compiler/abstractCompiler.hpp
 342   // for details).
 343   // Usually, the compilation context is the caller of the method 'method'.
 344   // The only case when for a non-recursive method 'method' the compilation context
 345   // is not the caller of the 'method' (but it is the method itself) is
 346   // java.lang.ref.Referene::get.
 347   // For java.lang.ref.Reference::get, the intrinsic version is used
 348   // instead of the C2-compiled version so that the value in the referent
 349   // field can be registered by the G1 pre-barrier code. The intrinsified
 350   // version of Reference::get also adds a memory barrier to prevent
 351   // commoning reads from the referent field across safepoint since GC
 352   // can change the referent field's value. See Compile::Compile()
 353   // in src/share/vm/opto/compile.cpp for more details.
 354   ccstr disable_intr = NULL;
 355   if ((DisableIntrinsic[0] != '\0' && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
 356       (compilation_context != NULL &&
 357           CompilerOracle::has_option_value(compilation_context, "DisableIntrinsic", disable_intr) &&
 358           strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)
 359   ) {
 360     return false;
 361   }
 362 
 363   // Only Object.hashCode and Object.clone intrinsics implement also a virtual
 364   // dispatch because calling both methods is expensive but both methods are
 365   // frequently overridden. All other intrinsics implement only a non-virtual
 366   // dispatch.
 367   if (is_virtual) {
 368     switch (id) {
 369     case vmIntrinsics::_hashCode:
 370     case vmIntrinsics::_clone:

 371       break;
 372     default:
 373       return false;
 374     }
 375   }
 376 
 377   // -XX:-InlineNatives disables nearly all intrinsics except the ones listed in
 378   // the following switch statement.
 379   if (!InlineNatives) {
 380     switch (id) {
 381     case vmIntrinsics::_indexOf:
 382     case vmIntrinsics::_compareTo:
 383     case vmIntrinsics::_equals:
 384     case vmIntrinsics::_equalsC:
 385     case vmIntrinsics::_getAndAddInt:
 386     case vmIntrinsics::_getAndAddLong:
 387     case vmIntrinsics::_getAndSetInt:
 388     case vmIntrinsics::_getAndSetLong:
 389     case vmIntrinsics::_getAndSetObject:
 390     case vmIntrinsics::_loadFence:
 391     case vmIntrinsics::_storeFence:
 392     case vmIntrinsics::_fullFence:

 393     case vmIntrinsics::_Reference_get:
 394       break;
 395     default:
 396       return false;
 397     }
 398   }
 399 



 400   switch (id) {
 401   case vmIntrinsics::_compareTo:
 402     if (!SpecialStringCompareTo) return false;
 403     if (!Matcher::match_rule_supported(Op_StrComp)) return false;
 404     break;
 405   case vmIntrinsics::_indexOf:
 406     if (!SpecialStringIndexOf) return false;
 407     break;
 408   case vmIntrinsics::_equals:
 409     if (!SpecialStringEquals) return false;
 410     if (!Matcher::match_rule_supported(Op_StrEquals)) return false;
 411     break;
 412   case vmIntrinsics::_equalsC:
 413     if (!SpecialArraysEquals) return false;
 414     if (!Matcher::match_rule_supported(Op_AryEq)) return false;
 415     break;
 416   case vmIntrinsics::_arraycopy:
 417     if (!InlineArrayCopy) return false;
 418     break;
 419   case vmIntrinsics::_copyMemory:
 420     if (!InlineArrayCopy) return false;
 421     if (StubRoutines::unsafe_arraycopy() == NULL) return false;
 422     break;
 423   case vmIntrinsics::_hashCode:
 424     if (!InlineObjectHash) return false;

 425     break;
 426   case vmIntrinsics::_clone:

 427   case vmIntrinsics::_copyOf:
 428   case vmIntrinsics::_copyOfRange:
 429     // These intrinsics use both the objectcopy and the arraycopy
 430     // intrinsic mechanism.
 431     if (!InlineObjectCopy || !InlineArrayCopy) return false;
 432     break;
 433   case vmIntrinsics::_encodeISOArray:
 434     if (!SpecialEncodeISOArray) return false;
 435     if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return false;
 436     break;
 437   case vmIntrinsics::_checkIndex:
 438     // We do not intrinsify this.  The optimizer does fine with it.
 439     return false;

 440   case vmIntrinsics::_getCallerClass:
 441     if (!InlineReflectionGetCallerClass ||
 442         SystemDictionary::reflect_CallerSensitive_klass() == NULL) {
 443       return false;
 444     }
 445     break;

 446   case vmIntrinsics::_bitCount_i:
 447     if (!Matcher::match_rule_supported(Op_PopCountI)) return false;
 448     break;

 449   case vmIntrinsics::_bitCount_l:
 450     if (!Matcher::match_rule_supported(Op_PopCountL)) return false;
 451     break;

 452   case vmIntrinsics::_numberOfLeadingZeros_i:
 453     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return false;
 454     break;

 455   case vmIntrinsics::_numberOfLeadingZeros_l:
 456     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return false;
 457     break;

 458   case vmIntrinsics::_numberOfTrailingZeros_i:
 459     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return false;
 460     break;

 461   case vmIntrinsics::_numberOfTrailingZeros_l:
 462     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return false;
 463     break;

 464   case vmIntrinsics::_reverseBytes_c:
 465     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false;
 466     break;
 467   case vmIntrinsics::_reverseBytes_s:
 468     if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return false;
 469     break;
 470   case vmIntrinsics::_reverseBytes_i:
 471     if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return false;
 472     break;
 473   case vmIntrinsics::_reverseBytes_l:
 474     if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return false;







 475     break;

 476   case vmIntrinsics::_compareAndSwapObject:
 477 #ifdef _LP64
 478 if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return false;
 479 #endif
 480 break;

 481   case vmIntrinsics::_compareAndSwapLong:
 482     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return false;
 483     break;

 484   case vmIntrinsics::_getAndAddInt:
 485     if (!Matcher::match_rule_supported(Op_GetAndAddI)) return false;
 486     break;

 487   case vmIntrinsics::_getAndAddLong:
 488     if (!Matcher::match_rule_supported(Op_GetAndAddL)) return false;
 489     break;

 490   case vmIntrinsics::_getAndSetInt:
 491     if (!Matcher::match_rule_supported(Op_GetAndSetI)) return false;
 492     break;

 493   case vmIntrinsics::_getAndSetLong:
 494     if (!Matcher::match_rule_supported(Op_GetAndSetL)) return false;
 495     break;

 496   case vmIntrinsics::_getAndSetObject:
 497 #ifdef _LP64
 498     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 499     if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return false;
 500     break;
 501 #else
 502     if (!Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 503     break;
 504 #endif

 505   case vmIntrinsics::_aescrypt_encryptBlock:
 506   case vmIntrinsics::_aescrypt_decryptBlock:
 507     if (!UseAESIntrinsics) return false;
 508     break;

 509   case vmIntrinsics::_multiplyToLen:
 510     if (!UseMultiplyToLenIntrinsic) return false;
 511     break;

 512   case vmIntrinsics::_squareToLen:
 513     if (!UseSquareToLenIntrinsic) return false;
 514     break;

 515   case vmIntrinsics::_mulAdd:
 516     if (!UseMulAddIntrinsic) return false;
 517     break;

 518   case vmIntrinsics::_montgomeryMultiply:
 519     if (!UseMontgomeryMultiplyIntrinsic) return false;
 520     break;
 521   case vmIntrinsics::_montgomerySquare:
 522     if (!UseMontgomerySquareIntrinsic) return false;
 523     break;

 524   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 525   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 526     if (!UseAESIntrinsics) return false;


 527     break;

 528   case vmIntrinsics::_sha_implCompress:
 529     if (!UseSHA1Intrinsics) return false;
 530     break;

 531   case vmIntrinsics::_sha2_implCompress:
 532     if (!UseSHA256Intrinsics) return false;
 533     break;

 534   case vmIntrinsics::_sha5_implCompress:
 535     if (!UseSHA512Intrinsics) return false;
 536     break;

 537   case vmIntrinsics::_digestBase_implCompressMB:
 538     if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return false;

 539     break;

 540   case vmIntrinsics::_ghash_processBlocks:
 541     if (!UseGHASHIntrinsics) return false;
 542     break;

 543   case vmIntrinsics::_updateCRC32:
 544   case vmIntrinsics::_updateBytesCRC32:
 545   case vmIntrinsics::_updateByteBufferCRC32:
 546     if (!UseCRC32Intrinsics) return false;
 547     break;

 548   case vmIntrinsics::_updateBytesCRC32C:
 549   case vmIntrinsics::_updateDirectByteBufferCRC32C:
 550     if (!UseCRC32CIntrinsics) return false;
 551     break;

 552   case vmIntrinsics::_incrementExactI:
 553   case vmIntrinsics::_addExactI:
 554     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowAddI)) return false;
 555     break;
 556   case vmIntrinsics::_incrementExactL:
 557   case vmIntrinsics::_addExactL:
 558     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowAddL)) return false;
 559     break;
 560   case vmIntrinsics::_decrementExactI:
 561   case vmIntrinsics::_subtractExactI:
 562   case vmIntrinsics::_negateExactI:
 563     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowSubI)) return false;
 564     break;
 565   case vmIntrinsics::_decrementExactL:
 566   case vmIntrinsics::_subtractExactL:





 567   case vmIntrinsics::_negateExactL:
 568     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowSubL)) return false;
 569     break;
 570   case vmIntrinsics::_multiplyExactI:
 571     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowMulI)) return false;
 572     break;
 573   case vmIntrinsics::_multiplyExactL:
 574     if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return false;
 575     break;

 576   case vmIntrinsics::_getShortUnaligned:
 577   case vmIntrinsics::_getCharUnaligned:
 578   case vmIntrinsics::_getIntUnaligned:
 579   case vmIntrinsics::_getLongUnaligned:
 580   case vmIntrinsics::_putShortUnaligned:
 581   case vmIntrinsics::_putCharUnaligned:
 582   case vmIntrinsics::_putIntUnaligned:
 583   case vmIntrinsics::_putLongUnaligned:
 584     if (!UseUnalignedAccesses) return false;
 585     break;

 586   default:


 587     break;
 588   }
 589 
 590   // -XX:-InlineClassNatives disables natives from the Class class.
 591   // The flag applies to all reflective calls, notably Array.newArray
 592   // (visible to Java programmers as Array.newInstance).
 593   if (method->method_holder()->name() == vmSymbols::java_lang_Class() ||
 594       method->method_holder()->name() == vmSymbols::java_lang_reflect_Array()) {
 595     if (!InlineClassNatives) return false;
 596   }
 597 
 598   // -XX:-InlineThreadNatives disables natives from the Thread class.
 599   if (method->method_holder()->name() == vmSymbols::java_lang_Thread()) {
 600     if (!InlineThreadNatives) return false;
 601   }
 602 
 603   // -XX:-InlineMathNatives disables natives from the Math,Float and Double classes.
 604   if (method->method_holder()->name() == vmSymbols::java_lang_Math() ||
 605       method->method_holder()->name() == vmSymbols::java_lang_Float() ||
 606       method->method_holder()->name() == vmSymbols::java_lang_Double()) {
 607     if (!InlineMathNatives) return false;
 608   }
 609 
 610   // -XX:-InlineUnsafeOps disables natives from the Unsafe class.
 611   if (method->method_holder()->name() == vmSymbols::sun_misc_Unsafe()) {
 612     if (!InlineUnsafeOps) return false;
 613   }
 614 
 615   return true;
 616 }
 617 
 618 //---------------------------make_vm_intrinsic----------------------------
 619 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
 620   vmIntrinsics::ID id = m->intrinsic_id();
 621   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 622 
 623   if (!m->is_loaded()) {
 624     // Do not attempt to inline unloaded methods.
 625     return NULL;
 626   }
 627 
 628   Method* method = m->get_Method();
 629   Method* compilation_context = Compile::method()->get_Method();
 630   if (is_intrinsic_available_for(method, compilation_context, is_virtual)) {
 631     assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
 632     assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
 633     return new LibraryIntrinsic(m, is_virtual,
 634                                 intrinsic_predicates_needed_for(method),
 635                                 intrinsic_does_virtual_dispatch_for(method),
 636                                 (vmIntrinsics::ID) id);
 637   } else {
 638     return NULL;
 639   }
 640 }
 641 
 642 //----------------------register_library_intrinsics-----------------------
 643 // Initialize this file's data structures, for each Compile instance.
 644 void Compile::register_library_intrinsics() {
 645   // Nothing to do here.
 646 }
 647 
 648 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
 649   LibraryCallKit kit(jvms, this);
 650   Compile* C = kit.C;
 651   int nodes = C->unique();
 652 #ifndef PRODUCT
 653   if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
 654     char buf[1000];
 655     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 656     tty->print_cr("Intrinsic %s", str);
 657   }
 658 #endif
 659   ciMethod* callee = kit.callee();


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File