src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8130832-review Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page
rev 8688 : 1234567: Invalid bug number
Summary: This is a local commit to reduce jprt submit time.
Reviewed-by: kvn


 287   Node* get_state_from_sha_object(Node *sha_object);
 288   Node* get_state_from_sha5_object(Node *sha_object);
 289   Node* inline_digestBase_implCompressMB_predicate(int predicate);
 290   bool inline_encodeISOArray();
 291   bool inline_updateCRC32();
 292   bool inline_updateBytesCRC32();
 293   bool inline_updateByteBufferCRC32();
 294   Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
 295   bool inline_updateBytesCRC32C();
 296   bool inline_updateDirectByteBufferCRC32C();
 297   bool inline_multiplyToLen();
 298   bool inline_squareToLen();
 299   bool inline_mulAdd();
 300   bool inline_montgomeryMultiply();
 301   bool inline_montgomerySquare();
 302 
 303   bool inline_profileBoolean();
 304   bool inline_isCompileConstant();
 305 };
 306 
 307 
 308 //---------------------------make_vm_intrinsic----------------------------
 309 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
 310   vmIntrinsics::ID id = m->intrinsic_id();
 311   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 312 
 313   ccstr disable_intr = NULL;
 314 
 315   if ((DisableIntrinsic[0] != '\0'
 316        && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
 317       (method_has_option_value("DisableIntrinsic", disable_intr)
 318        && strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)) {
 319     // disabled by a user request on the command line:
 320     // example: -XX:DisableIntrinsic=_hashCode,_getClass
 321     return NULL;
 322   }













 323 
 324   if (!m->is_loaded()) {
 325     // do not attempt to inline unloaded methods
 326     return NULL;

























 327   }
 328 
 329   // Only a few intrinsics implement a virtual dispatch.
 330   // They are expensive calls which are also frequently overridden.


 331   if (is_virtual) {
 332     switch (id) {
 333     case vmIntrinsics::_hashCode:
 334     case vmIntrinsics::_clone:
 335       // OK, Object.hashCode and Object.clone intrinsics come in both flavors
 336       break;
 337     default:
 338       return NULL;
 339     }
 340   }
 341 
 342   // -XX:-InlineNatives disables nearly all intrinsics:

 343   if (!InlineNatives) {
 344     switch (id) {
 345     case vmIntrinsics::_indexOf:
 346     case vmIntrinsics::_compareTo:
 347     case vmIntrinsics::_equals:
 348     case vmIntrinsics::_equalsC:
 349     case vmIntrinsics::_getAndAddInt:
 350     case vmIntrinsics::_getAndAddLong:
 351     case vmIntrinsics::_getAndSetInt:
 352     case vmIntrinsics::_getAndSetLong:
 353     case vmIntrinsics::_getAndSetObject:
 354     case vmIntrinsics::_loadFence:
 355     case vmIntrinsics::_storeFence:
 356     case vmIntrinsics::_fullFence:
 357       break;  // InlineNatives does not control String.compareTo
 358     case vmIntrinsics::_Reference_get:
 359       break;  // InlineNatives does not control Reference.get
 360     default:
 361       return NULL;
 362     }
 363   }
 364 
 365   int predicates = 0;
 366   bool does_virtual_dispatch = false;
 367 
 368   switch (id) {
 369   case vmIntrinsics::_compareTo:
 370     if (!SpecialStringCompareTo)  return NULL;
 371     if (!Matcher::match_rule_supported(Op_StrComp))  return NULL;
 372     break;
 373   case vmIntrinsics::_indexOf:
 374     if (!SpecialStringIndexOf)  return NULL;
 375     break;
 376   case vmIntrinsics::_equals:
 377     if (!SpecialStringEquals)  return NULL;
 378     if (!Matcher::match_rule_supported(Op_StrEquals))  return NULL;
 379     break;
 380   case vmIntrinsics::_equalsC:
 381     if (!SpecialArraysEquals)  return NULL;
 382     if (!Matcher::match_rule_supported(Op_AryEq))  return NULL;
 383     break;
 384   case vmIntrinsics::_arraycopy:
 385     if (!InlineArrayCopy)  return NULL;
 386     break;
 387   case vmIntrinsics::_copyMemory:
 388     if (StubRoutines::unsafe_arraycopy() == NULL)  return NULL;
 389     if (!InlineArrayCopy)  return NULL;
 390     break;
 391   case vmIntrinsics::_hashCode:
 392     if (!InlineObjectHash)  return NULL;
 393     does_virtual_dispatch = true;
 394     break;
 395   case vmIntrinsics::_clone:
 396     does_virtual_dispatch = true;
 397   case vmIntrinsics::_copyOf:
 398   case vmIntrinsics::_copyOfRange:
 399     if (!InlineObjectCopy)  return NULL;
 400     // These also use the arraycopy intrinsic mechanism:
 401     if (!InlineArrayCopy)  return NULL;
 402     break;
 403   case vmIntrinsics::_encodeISOArray:
 404     if (!SpecialEncodeISOArray)  return NULL;
 405     if (!Matcher::match_rule_supported(Op_EncodeISOArray))  return NULL;
 406     break;
 407   case vmIntrinsics::_checkIndex:
 408     // We do not intrinsify this.  The optimizer does fine with it.
 409     return NULL;
 410 
 411   case vmIntrinsics::_getCallerClass:
 412     if (!InlineReflectionGetCallerClass)  return NULL;
 413     if (SystemDictionary::reflect_CallerSensitive_klass() == NULL)  return NULL;


 414     break;
 415 
 416   case vmIntrinsics::_bitCount_i:
 417     if (!Matcher::match_rule_supported(Op_PopCountI)) return NULL;
 418     break;
 419 
 420   case vmIntrinsics::_bitCount_l:
 421     if (!Matcher::match_rule_supported(Op_PopCountL)) return NULL;
 422     break;
 423 
 424   case vmIntrinsics::_numberOfLeadingZeros_i:
 425     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return NULL;
 426     break;
 427 
 428   case vmIntrinsics::_numberOfLeadingZeros_l:
 429     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return NULL;
 430     break;
 431 
 432   case vmIntrinsics::_numberOfTrailingZeros_i:
 433     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return NULL;
 434     break;
 435 
 436   case vmIntrinsics::_numberOfTrailingZeros_l:
 437     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL;
 438     break;
 439 
 440   case vmIntrinsics::_reverseBytes_c:
 441     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return NULL;
 442     break;
 443   case vmIntrinsics::_reverseBytes_s:
 444     if (!Matcher::match_rule_supported(Op_ReverseBytesS))  return NULL;
 445     break;
 446   case vmIntrinsics::_reverseBytes_i:
 447     if (!Matcher::match_rule_supported(Op_ReverseBytesI))  return NULL;
 448     break;
 449   case vmIntrinsics::_reverseBytes_l:
 450     if (!Matcher::match_rule_supported(Op_ReverseBytesL))  return NULL;
 451     break;
 452 
 453   case vmIntrinsics::_Reference_get:
 454     // Use the intrinsic version of Reference.get() so that the value in
 455     // the referent field can be registered by the G1 pre-barrier code.
 456     // Also add memory barrier to prevent commoning reads from this field
 457     // across safepoint since GC can change it value.
 458     break;
 459 
 460   case vmIntrinsics::_compareAndSwapObject:
 461 #ifdef _LP64
 462     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return NULL;
 463 #endif
 464     break;
 465 
 466   case vmIntrinsics::_compareAndSwapLong:
 467     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return NULL;
 468     break;
 469 
 470   case vmIntrinsics::_getAndAddInt:
 471     if (!Matcher::match_rule_supported(Op_GetAndAddI)) return NULL;
 472     break;
 473 
 474   case vmIntrinsics::_getAndAddLong:
 475     if (!Matcher::match_rule_supported(Op_GetAndAddL)) return NULL;
 476     break;
 477 
 478   case vmIntrinsics::_getAndSetInt:
 479     if (!Matcher::match_rule_supported(Op_GetAndSetI)) return NULL;
 480     break;
 481 
 482   case vmIntrinsics::_getAndSetLong:
 483     if (!Matcher::match_rule_supported(Op_GetAndSetL)) return NULL;
 484     break;
 485 
 486   case vmIntrinsics::_getAndSetObject:
 487 #ifdef _LP64
 488     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
 489     if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return NULL;
 490     break;
 491 #else
 492     if (!Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
 493     break;
 494 #endif
 495 
 496   case vmIntrinsics::_aescrypt_encryptBlock:
 497   case vmIntrinsics::_aescrypt_decryptBlock:
 498     if (!UseAESIntrinsics) return NULL;
 499     break;
 500 
 501   case vmIntrinsics::_multiplyToLen:
 502     if (!UseMultiplyToLenIntrinsic) return NULL;
 503     break;
 504 
 505   case vmIntrinsics::_squareToLen:
 506     if (!UseSquareToLenIntrinsic) return NULL;
 507     break;
 508 
 509   case vmIntrinsics::_mulAdd:
 510     if (!UseMulAddIntrinsic) return NULL;
 511     break;
 512 
 513   case vmIntrinsics::_montgomeryMultiply:
 514      if (!UseMontgomeryMultiplyIntrinsic) return NULL;
 515     break;
 516   case vmIntrinsics::_montgomerySquare:
 517      if (!UseMontgomerySquareIntrinsic) return NULL;
 518     break;
 519 
 520   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 521   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 522     if (!UseAESIntrinsics) return NULL;
 523     // these two require the predicated logic
 524     predicates = 1;
 525     break;
 526 
 527   case vmIntrinsics::_sha_implCompress:
 528     if (!UseSHA1Intrinsics) return NULL;
 529     break;
 530 
 531   case vmIntrinsics::_sha2_implCompress:
 532     if (!UseSHA256Intrinsics) return NULL;
 533     break;
 534 
 535   case vmIntrinsics::_sha5_implCompress:
 536     if (!UseSHA512Intrinsics) return NULL;
 537     break;
 538 
 539   case vmIntrinsics::_digestBase_implCompressMB:
 540     if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return NULL;
 541     predicates = 3;
 542     break;
 543 
 544   case vmIntrinsics::_ghash_processBlocks:
 545     if (!UseGHASHIntrinsics) return NULL;
 546     break;
 547 
 548   case vmIntrinsics::_updateCRC32:
 549   case vmIntrinsics::_updateBytesCRC32:
 550   case vmIntrinsics::_updateByteBufferCRC32:
 551     if (!UseCRC32Intrinsics) return NULL;
 552     break;
 553 
 554   case vmIntrinsics::_updateBytesCRC32C:
 555   case vmIntrinsics::_updateDirectByteBufferCRC32C:
 556     if (!UseCRC32CIntrinsics) return NULL;
 557     break;
 558 
 559   case vmIntrinsics::_incrementExactI:
 560   case vmIntrinsics::_addExactI:
 561     if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL;
 562     break;
 563   case vmIntrinsics::_incrementExactL:
 564   case vmIntrinsics::_addExactL:
 565     if (!Matcher::match_rule_supported(Op_OverflowAddL) || !UseMathExactIntrinsics) return NULL;
 566     break;
 567   case vmIntrinsics::_decrementExactI:
 568   case vmIntrinsics::_subtractExactI:
 569     if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;

 570     break;
 571   case vmIntrinsics::_decrementExactL:
 572   case vmIntrinsics::_subtractExactL:
 573     if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
 574     break;
 575   case vmIntrinsics::_negateExactI:
 576     if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
 577     break;
 578   case vmIntrinsics::_negateExactL:
 579     if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
 580     break;
 581   case vmIntrinsics::_multiplyExactI:
 582     if (!Matcher::match_rule_supported(Op_OverflowMulI) || !UseMathExactIntrinsics) return NULL;
 583     break;
 584   case vmIntrinsics::_multiplyExactL:
 585     if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return NULL;
 586     break;
 587 
 588   case vmIntrinsics::_getShortUnaligned:
 589   case vmIntrinsics::_getCharUnaligned:
 590   case vmIntrinsics::_getIntUnaligned:
 591   case vmIntrinsics::_getLongUnaligned:
 592   case vmIntrinsics::_putShortUnaligned:
 593   case vmIntrinsics::_putCharUnaligned:
 594   case vmIntrinsics::_putIntUnaligned:
 595   case vmIntrinsics::_putLongUnaligned:
 596     if (!UseUnalignedAccesses) return NULL;
 597     break;
 598 
 599  default:
 600     assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
 601     assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
 602     break;
 603   }
 604 

 605   // -XX:-InlineClassNatives disables natives from the Class class.
 606   // The flag applies to all reflective calls, notably Array.newArray
 607   // (visible to Java programmers as Array.newInstance).
 608   if (m->holder()->name() == ciSymbol::java_lang_Class() ||
 609       m->holder()->name() == ciSymbol::java_lang_reflect_Array()) {
 610     if (!InlineClassNatives)  return NULL;
 611   }
 612 
 613   // -XX:-InlineThreadNatives disables natives from the Thread class.
 614   if (m->holder()->name() == ciSymbol::java_lang_Thread()) {
 615     if (!InlineThreadNatives)  return NULL;
 616   }
 617 
 618   // -XX:-InlineMathNatives disables natives from the Math,Float and Double classes.
 619   if (m->holder()->name() == ciSymbol::java_lang_Math() ||
 620       m->holder()->name() == ciSymbol::java_lang_Float() ||
 621       m->holder()->name() == ciSymbol::java_lang_Double()) {
 622     if (!InlineMathNatives)  return NULL;
 623   }
 624 
 625   // -XX:-InlineUnsafeOps disables natives from the Unsafe class.
 626   if (m->holder()->name() == ciSymbol::sun_misc_Unsafe()) {
 627     if (!InlineUnsafeOps)  return NULL;
 628   }
 629 
 630   return new LibraryIntrinsic(m, is_virtual, predicates, does_virtual_dispatch, (vmIntrinsics::ID) id);

































 631 }
 632 
 633 //----------------------register_library_intrinsics-----------------------
 634 // Initialize this file's data structures, for each Compile instance.
 635 void Compile::register_library_intrinsics() {
 636   // Nothing to do here.
 637 }
 638 
 639 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
 640   LibraryCallKit kit(jvms, this);
 641   Compile* C = kit.C;
 642   int nodes = C->unique();
 643 #ifndef PRODUCT
 644   if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
 645     char buf[1000];
 646     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 647     tty->print_cr("Intrinsic %s", str);
 648   }
 649 #endif
 650   ciMethod* callee = kit.callee();




 287   Node* get_state_from_sha_object(Node *sha_object);
 288   Node* get_state_from_sha5_object(Node *sha_object);
 289   Node* inline_digestBase_implCompressMB_predicate(int predicate);
 290   bool inline_encodeISOArray();
 291   bool inline_updateCRC32();
 292   bool inline_updateBytesCRC32();
 293   bool inline_updateByteBufferCRC32();
 294   Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
 295   bool inline_updateBytesCRC32C();
 296   bool inline_updateDirectByteBufferCRC32C();
 297   bool inline_multiplyToLen();
 298   bool inline_squareToLen();
 299   bool inline_mulAdd();
 300   bool inline_montgomeryMultiply();
 301   bool inline_montgomerySquare();
 302 
 303   bool inline_profileBoolean();
 304   bool inline_isCompileConstant();
 305 };
 306 
 307 bool Compile::intrinsic_does_virtual_dispatch_for(vmIntrinsics::ID id) {



 308   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 309   switch(id) {
 310   case vmIntrinsics::_hashCode:
 311   case vmIntrinsics::_clone:
 312     return true;
 313     break;
 314   default:
 315     return false;



 316   }
 317 }
 318 int Compile::intrinsic_predicates_needed_for(vmIntrinsics::ID id) {
 319   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 320   switch (id) {
 321   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 322   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 323     return 1;
 324   case vmIntrinsics::_digestBase_implCompressMB:
 325     return 3;
 326   default:
 327     return 0;
 328   }
 329 }
 330 
 331 bool Compile::is_intrinsic_available_for(Method* method, Method* compilation_context, bool is_virtual) {
 332   vmIntrinsics::ID id = method->intrinsic_id();
 333   if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::LAST_COMPILER_INLINE) {
 334     return false;
 335   }
 336 
 337   // Check if the intrinsic corresponding to 'method' has been disabled on
 338   // the command line by using the DisableIntrinsic flag (either globally
 339   // or on a per-method level, see src/share/vm/compiler/abstractCompiler.hpp
 340   // for details).
 341   // Usually, the compilation context is the caller of the method 'method'.
 342   // The only case when for a non-recursive method 'method' the compilation context
 343   // is not the caller of the 'method' (but it is the method itself) is
 344   // java.lang.ref.Referene::get.
 345   // For java.lang.ref.Reference::get, the intrinsic version is used
 346   // instead of the C2-compiled version so that the value in the referent
 347   // field can be registered by the G1 pre-barrier code. The intrinsified
 348   // version of Reference::get also adds a memory barrier to prevent
 349   // commoning reads from the referent field across safepoint since GC
 350   // can change the referent field's value. See Compile::Compile()
 351   // in src/share/vm/opto/compile.cpp for more details.
 352   ccstr disable_intr = NULL;
 353   if ((DisableIntrinsic[0] != '\0' && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
 354       (compilation_context != NULL &&
 355           CompilerOracle::has_option_value(compilation_context, "DisableIntrinsic", disable_intr) &&
 356           strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)
 357   ) {
 358     return false;
 359   }
 360 
 361   // Only Object.hashCode and Object.clone intrinsics implement also a virtual
 362   // dispatch because calling both methods is expensive but both methods are
 363   // frequently overridden. All other intrinsics implement only a non-virtual
 364   // dispatch.
 365   if (is_virtual) {
 366     switch (id) {
 367     case vmIntrinsics::_hashCode:
 368     case vmIntrinsics::_clone:

 369       break;
 370     default:
 371       return false;
 372     }
 373   }
 374 
 375   // -XX:-InlineNatives disables nearly all intrinsics except the ones listed in
 376   // the following switch statement.
 377   if (!InlineNatives) {
 378     switch (id) {
 379     case vmIntrinsics::_indexOf:
 380     case vmIntrinsics::_compareTo:
 381     case vmIntrinsics::_equals:
 382     case vmIntrinsics::_equalsC:
 383     case vmIntrinsics::_getAndAddInt:
 384     case vmIntrinsics::_getAndAddLong:
 385     case vmIntrinsics::_getAndSetInt:
 386     case vmIntrinsics::_getAndSetLong:
 387     case vmIntrinsics::_getAndSetObject:
 388     case vmIntrinsics::_loadFence:
 389     case vmIntrinsics::_storeFence:
 390     case vmIntrinsics::_fullFence:

 391     case vmIntrinsics::_Reference_get:
 392       break;
 393     default:
 394       return false;
 395     }
 396   }
 397 



 398   switch (id) {
 399   case vmIntrinsics::_compareTo:
 400     if (!SpecialStringCompareTo) return false;
 401     if (!Matcher::match_rule_supported(Op_StrComp)) return false;
 402     break;
 403   case vmIntrinsics::_indexOf:
 404     if (!SpecialStringIndexOf) return false;
 405     break;
 406   case vmIntrinsics::_equals:
 407     if (!SpecialStringEquals) return false;
 408     if (!Matcher::match_rule_supported(Op_StrEquals)) return false;
 409     break;
 410   case vmIntrinsics::_equalsC:
 411     if (!SpecialArraysEquals) return false;
 412     if (!Matcher::match_rule_supported(Op_AryEq)) return false;
 413     break;
 414   case vmIntrinsics::_arraycopy:
 415     if (!InlineArrayCopy) return false;
 416     break;
 417   case vmIntrinsics::_copyMemory:
 418     if (!InlineArrayCopy) return false;
 419     if (StubRoutines::unsafe_arraycopy() == NULL) return false;
 420     break;
 421   case vmIntrinsics::_hashCode:
 422     if (!InlineObjectHash) return false;

 423     break;
 424   case vmIntrinsics::_clone:

 425   case vmIntrinsics::_copyOf:
 426   case vmIntrinsics::_copyOfRange:
 427     // These intrinsics use both the objectcopy and the arraycopy
 428     // intrinsic mechanism.
 429     if (!InlineObjectCopy || !InlineArrayCopy) return false;
 430     break;
 431   case vmIntrinsics::_encodeISOArray:
 432     if (!SpecialEncodeISOArray) return false;
 433     if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return false;
 434     break;
 435   case vmIntrinsics::_checkIndex:
 436     // We do not intrinsify this.  The optimizer does fine with it.
 437     return false;

 438   case vmIntrinsics::_getCallerClass:
 439     if (!InlineReflectionGetCallerClass ||
 440         SystemDictionary::reflect_CallerSensitive_klass() == NULL) {
 441       return false;
 442     }
 443     break;

 444   case vmIntrinsics::_bitCount_i:
 445     if (!Matcher::match_rule_supported(Op_PopCountI)) return false;
 446     break;

 447   case vmIntrinsics::_bitCount_l:
 448     if (!Matcher::match_rule_supported(Op_PopCountL)) return false;
 449     break;

 450   case vmIntrinsics::_numberOfLeadingZeros_i:
 451     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return false;
 452     break;

 453   case vmIntrinsics::_numberOfLeadingZeros_l:
 454     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return false;
 455     break;

 456   case vmIntrinsics::_numberOfTrailingZeros_i:
 457     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return false;
 458     break;

 459   case vmIntrinsics::_numberOfTrailingZeros_l:
 460     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return false;
 461     break;

 462   case vmIntrinsics::_reverseBytes_c:
 463     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false;
 464     break;
 465   case vmIntrinsics::_reverseBytes_s:
 466     if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return false;
 467     break;
 468   case vmIntrinsics::_reverseBytes_i:
 469     if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return false;
 470     break;
 471   case vmIntrinsics::_reverseBytes_l:
 472     if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return false;







 473     break;

 474   case vmIntrinsics::_compareAndSwapObject:
 475 #ifdef _LP64
 476 if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return false;
 477 #endif
 478 break;

 479   case vmIntrinsics::_compareAndSwapLong:
 480     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return false;
 481     break;

 482   case vmIntrinsics::_getAndAddInt:
 483     if (!Matcher::match_rule_supported(Op_GetAndAddI)) return false;
 484     break;

 485   case vmIntrinsics::_getAndAddLong:
 486     if (!Matcher::match_rule_supported(Op_GetAndAddL)) return false;
 487     break;

 488   case vmIntrinsics::_getAndSetInt:
 489     if (!Matcher::match_rule_supported(Op_GetAndSetI)) return false;
 490     break;

 491   case vmIntrinsics::_getAndSetLong:
 492     if (!Matcher::match_rule_supported(Op_GetAndSetL)) return false;
 493     break;

 494   case vmIntrinsics::_getAndSetObject:
 495 #ifdef _LP64
 496     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 497     if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return false;
 498     break;
 499 #else
 500     if (!Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 501     break;
 502 #endif

 503   case vmIntrinsics::_aescrypt_encryptBlock:
 504   case vmIntrinsics::_aescrypt_decryptBlock:
 505     if (!UseAESIntrinsics) return false;
 506     break;

 507   case vmIntrinsics::_multiplyToLen:
 508     if (!UseMultiplyToLenIntrinsic) return false;
 509     break;

 510   case vmIntrinsics::_squareToLen:
 511     if (!UseSquareToLenIntrinsic) return false;
 512     break;

 513   case vmIntrinsics::_mulAdd:
 514     if (!UseMulAddIntrinsic) return false;
 515     break;

 516   case vmIntrinsics::_montgomeryMultiply:
 517     if (!UseMontgomeryMultiplyIntrinsic) return false;
 518     break;
 519   case vmIntrinsics::_montgomerySquare:
 520     if (!UseMontgomerySquareIntrinsic) return false;
 521     break;

 522   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 523   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 524     if (!UseAESIntrinsics) return false;


 525     break;

 526   case vmIntrinsics::_sha_implCompress:
 527     if (!UseSHA1Intrinsics) return false;
 528     break;

 529   case vmIntrinsics::_sha2_implCompress:
 530     if (!UseSHA256Intrinsics) return false;
 531     break;

 532   case vmIntrinsics::_sha5_implCompress:
 533     if (!UseSHA512Intrinsics) return false;
 534     break;

 535   case vmIntrinsics::_digestBase_implCompressMB:
 536     if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return false;

 537     break;

 538   case vmIntrinsics::_ghash_processBlocks:
 539     if (!UseGHASHIntrinsics) return false;
 540     break;

 541   case vmIntrinsics::_updateCRC32:
 542   case vmIntrinsics::_updateBytesCRC32:
 543   case vmIntrinsics::_updateByteBufferCRC32:
 544     if (!UseCRC32Intrinsics) return false;
 545     break;

 546   case vmIntrinsics::_updateBytesCRC32C:
 547   case vmIntrinsics::_updateDirectByteBufferCRC32C:
 548     if (!UseCRC32CIntrinsics) return false;
 549     break;

 550   case vmIntrinsics::_incrementExactI:
 551   case vmIntrinsics::_addExactI:
 552     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowAddI)) return false;
 553     break;
 554   case vmIntrinsics::_incrementExactL:
 555   case vmIntrinsics::_addExactL:
 556     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowAddL)) return false;
 557     break;
 558   case vmIntrinsics::_decrementExactI:
 559   case vmIntrinsics::_subtractExactI:
 560   case vmIntrinsics::_negateExactI:
 561     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowSubI)) return false;
 562     break;
 563   case vmIntrinsics::_decrementExactL:
 564   case vmIntrinsics::_subtractExactL:





 565   case vmIntrinsics::_negateExactL:
 566     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowSubL)) return false;
 567     break;
 568   case vmIntrinsics::_multiplyExactI:
 569     if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowMulI)) return false;
 570     break;
 571   case vmIntrinsics::_multiplyExactL:
 572     if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return false;
 573     break;

 574   case vmIntrinsics::_getShortUnaligned:
 575   case vmIntrinsics::_getCharUnaligned:
 576   case vmIntrinsics::_getIntUnaligned:
 577   case vmIntrinsics::_getLongUnaligned:
 578   case vmIntrinsics::_putShortUnaligned:
 579   case vmIntrinsics::_putCharUnaligned:
 580   case vmIntrinsics::_putIntUnaligned:
 581   case vmIntrinsics::_putLongUnaligned:
 582     if (!UseUnalignedAccesses) return false;
 583     break;

 584   default:


 585     break;
 586   }
 587 
 588   Symbol* method_holder = method->method_holder()->name();
 589   // -XX:-InlineClassNatives disables natives from the Class class.
 590   // The flag applies to all reflective calls, notably Array.newArray
 591   // (visible to Java programmers as Array.newInstance).
 592   if (method_holder == vmSymbols::java_lang_Class() ||
 593       method_holder == vmSymbols::java_lang_reflect_Array()) {
 594     if (!InlineClassNatives) return false;
 595   }
 596 
 597   // -XX:-InlineThreadNatives disables natives from the Thread class.
 598   if (method_holder == vmSymbols::java_lang_Thread()) {
 599     if (!InlineThreadNatives) return false;
 600   }
 601 
 602   // -XX:-InlineMathNatives disables natives from the Math,Float and Double classes.
 603   if (method_holder == vmSymbols::java_lang_Math() ||
 604       method_holder == vmSymbols::java_lang_Float() ||
 605       method_holder == vmSymbols::java_lang_Double()) {
 606     if (!InlineMathNatives) return false;
 607   }
 608 
 609   // -XX:-InlineUnsafeOps disables natives from the Unsafe class.
 610   if (method_holder == vmSymbols::sun_misc_Unsafe()) {
 611     if (!InlineUnsafeOps) return false;
 612   }
 613 
 614   return true;
 615 }
 616 
 617 //---------------------------make_vm_intrinsic----------------------------
 618 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
 619   vmIntrinsics::ID id = m->intrinsic_id();
 620   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 621 
 622   if (!m->is_loaded()) {
 623     // Do not attempt to inline unloaded methods.
 624     return NULL;
 625   }
 626 
 627   bool is_available = false;
 628   {
 629     // For calling is_intrinsic_available we need to transition to
 630     // the '_thread_in_vm' stat because is_intrinsic_available_for()
 631     // accesses VM-internal data.
 632     VM_ENTRY_MARK;
 633     methodHandle mh(THREAD, m->get_Method());
 634     methodHandle ct(THREAD, method()->get_Method());
 635     is_available = is_intrinsic_available_for(mh(), ct(), is_virtual);
 636   }
 637 
 638   if (is_available) {
 639     assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
 640     assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
 641     return new LibraryIntrinsic(m, is_virtual,
 642                                 intrinsic_predicates_needed_for(id),
 643                                 intrinsic_does_virtual_dispatch_for(id),
 644                                 (vmIntrinsics::ID) id);
 645   } else {
 646     return NULL;
 647   }
 648 }
 649 
 650 //----------------------register_library_intrinsics-----------------------
 651 // Initialize this file's data structures, for each Compile instance.
 652 void Compile::register_library_intrinsics() {
 653   // Nothing to do here.
 654 }
 655 
 656 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
 657   LibraryCallKit kit(jvms, this);
 658   Compile* C = kit.C;
 659   int nodes = C->unique();
 660 #ifndef PRODUCT
 661   if ((C->print_intrinsics() || C->print_inlining()) && Verbose) {
 662     char buf[1000];
 663     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 664     tty->print_cr("Intrinsic %s", str);
 665   }
 666 #endif
 667   ciMethod* callee = kit.callee();


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File