Print this page
rev 2237 : [mq]: initial-intrinsification-changes
rev 2238 : [mq]: code-review-comments-vladimir
rev 2239 : [mq]: client_assertion_fauilure
rev 2240 : [mq]: code-review-comments-tom
rev 2242 : [mq]: reflection-unsafe-read-barrier

Split Close
Expand all
Collapse all
          --- old/src/share/vm/opto/library_call.cpp
          +++ new/src/share/vm/opto/library_call.cpp
↓ open down ↓ 157 lines elided ↑ open up ↑
 158  158    bool inline_trans(vmIntrinsics::ID id);
 159  159    bool inline_abs(vmIntrinsics::ID id);
 160  160    bool inline_sqrt(vmIntrinsics::ID id);
 161  161    bool inline_pow(vmIntrinsics::ID id);
 162  162    bool inline_exp(vmIntrinsics::ID id);
 163  163    bool inline_min_max(vmIntrinsics::ID id);
 164  164    Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 165  165    // This returns Type::AnyPtr, RawPtr, or OopPtr.
 166  166    int classify_unsafe_addr(Node* &base, Node* &offset);
 167  167    Node* make_unsafe_address(Node* base, Node* offset);
      168 +  // Helper for inline_unsafe_access.
      169 +  // Generates the guards that check whether the result of
      170 +  // Unsafe.getObject should be recorded in an SATB log buffer.
      171 +  void insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val);
 168  172    bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
 169  173    bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
 170  174    bool inline_unsafe_allocate();
 171  175    bool inline_unsafe_copyMemory();
 172  176    bool inline_native_currentThread();
 173  177    bool inline_native_time_funcs(bool isNano);
 174  178    bool inline_native_isInterrupted();
 175  179    bool inline_native_Class_query(vmIntrinsics::ID id);
 176  180    bool inline_native_subtype_check();
 177  181  
↓ open down ↓ 54 lines elided ↑ open up ↑
 232  236                                      Node* src,  Node* src_offset,
 233  237                                      Node* dest, Node* dest_offset,
 234  238                                      Node* copy_length, bool dest_uninitialized);
 235  239    bool inline_unsafe_CAS(BasicType type);
 236  240    bool inline_unsafe_ordered_store(BasicType type);
 237  241    bool inline_fp_conversions(vmIntrinsics::ID id);
 238  242    bool inline_numberOfLeadingZeros(vmIntrinsics::ID id);
 239  243    bool inline_numberOfTrailingZeros(vmIntrinsics::ID id);
 240  244    bool inline_bitCount(vmIntrinsics::ID id);
 241  245    bool inline_reverseBytes(vmIntrinsics::ID id);
      246 +
      247 +  bool inline_reference_get();
 242  248  };
 243  249  
 244  250  
 245  251  //---------------------------make_vm_intrinsic----------------------------
 246  252  CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
 247  253    vmIntrinsics::ID id = m->intrinsic_id();
 248  254    assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 249  255  
 250  256    if (DisableIntrinsic[0] != '\0'
 251  257        && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) {
↓ open down ↓ 76 lines elided ↑ open up ↑
 328  334      if (!UseNewReflection)  return NULL;
 329  335      if (!InlineReflectionGetCallerClass)  return NULL;
 330  336      if (!JDK_Version::is_gte_jdk14x_version())  return NULL;
 331  337      break;
 332  338  
 333  339    case vmIntrinsics::_bitCount_i:
 334  340    case vmIntrinsics::_bitCount_l:
 335  341      if (!UsePopCountInstruction)  return NULL;
 336  342      break;
 337  343  
      344 +  case vmIntrinsics::_Reference_get:
      345 +    // It is only when G1 is enabled that we absolutely
      346 +    // need to use the intrinsic version of Reference.get()
      347 +    // so that the value in the referent field, if necessary,
      348 +    // can be registered by the pre-barrier code.
      349 +    if (!UseG1GC) return NULL;
      350 +    break;
      351 +
 338  352   default:
 339  353      assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
 340  354      assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
 341  355      break;
 342  356    }
 343  357  
 344  358    // -XX:-InlineClassNatives disables natives from the Class class.
 345  359    // The flag applies to all reflective calls, notably Array.newArray
 346  360    // (visible to Java programmers as Array.newInstance).
 347  361    if (m->holder()->name() == ciSymbol::java_lang_Class() ||
↓ open down ↓ 31 lines elided ↑ open up ↑
 379  393    LibraryCallKit kit(jvms, this);
 380  394    Compile* C = kit.C;
 381  395    int nodes = C->unique();
 382  396  #ifndef PRODUCT
 383  397    if ((PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) && Verbose) {
 384  398      char buf[1000];
 385  399      const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 386  400      tty->print_cr("Intrinsic %s", str);
 387  401    }
 388  402  #endif
      403 +
 389  404    if (kit.try_to_inline()) {
 390  405      if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 391      -      tty->print("Inlining intrinsic %s%s at bci:%d in",
 392      -                 vmIntrinsics::name_at(intrinsic_id()),
 393      -                 (is_virtual() ? " (virtual)" : ""), kit.bci());
 394      -      kit.caller()->print_short_name(tty);
 395      -      tty->print_cr(" (%d bytes)", kit.caller()->code_size());
      406 +      if (jvms->has_method()) {
      407 +        // Not a root compile.
      408 +        tty->print("Inlining intrinsic %s%s at bci:%d in",
      409 +                   vmIntrinsics::name_at(intrinsic_id()),
      410 +                   (is_virtual() ? " (virtual)" : ""), kit.bci());
      411 +        kit.caller()->print_short_name(tty);
      412 +        tty->print_cr(" (%d bytes)", kit.caller()->code_size());
      413 +      } else {
      414 +        // Root compile
      415 +        tty->print_cr("Generating intrinsic %s%s at bci:%d",
      416 +                       vmIntrinsics::name_at(intrinsic_id()),
      417 +                       (is_virtual() ? " (virtual)" : ""), kit.bci());
      418 +      }
 396  419      }
 397  420      C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
 398  421      if (C->log()) {
 399  422        C->log()->elem("intrinsic id='%s'%s nodes='%d'",
 400  423                       vmIntrinsics::name_at(intrinsic_id()),
 401  424                       (is_virtual() ? " virtual='1'" : ""),
 402  425                       C->unique() - nodes);
 403  426      }
 404  427      return kit.transfer_exceptions_into_jvms();
 405  428    }
 406  429  
 407  430    if (PrintIntrinsics) {
 408      -    tty->print("Did not inline intrinsic %s%s at bci:%d in",
      431 +    if (jvms->has_method()) {
      432 +      // Not a root compile.
      433 +      tty->print("Did not inline intrinsic %s%s at bci:%d in",
      434 +                 vmIntrinsics::name_at(intrinsic_id()),
      435 +                 (is_virtual() ? " (virtual)" : ""), kit.bci());
      436 +      kit.caller()->print_short_name(tty);
      437 +      tty->print_cr(" (%d bytes)", kit.caller()->code_size());
      438 +    } else {
      439 +      // Root compile
      440 +      tty->print("Did not generate intrinsic %s%s at bci:%d in",
 409  441                 vmIntrinsics::name_at(intrinsic_id()),
 410  442                 (is_virtual() ? " (virtual)" : ""), kit.bci());
 411      -    kit.caller()->print_short_name(tty);
 412      -    tty->print_cr(" (%d bytes)", kit.caller()->code_size());
      443 +    }
 413  444    }
 414  445    C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
 415  446    return NULL;
 416  447  }
 417  448  
 418  449  bool LibraryCallKit::try_to_inline() {
 419  450    // Handle symbolic names for otherwise undistinguished boolean switches:
 420  451    const bool is_store       = true;
 421  452    const bool is_native_ptr  = true;
 422  453    const bool is_static      = true;
 423  454  
      455 +  if (!jvms()->has_method()) {
      456 +    // Root JVMState has a null method.
      457 +    assert(map()->memory()->Opcode() == Op_Parm, "");
      458 +    // Insert the memory aliasing node
      459 +    set_all_memory(reset_memory());
      460 +  }
      461 +  assert(merged_memory(), "");
      462 +
 424  463    switch (intrinsic_id()) {
 425  464    case vmIntrinsics::_hashCode:
 426  465      return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
 427  466    case vmIntrinsics::_identityHashCode:
 428  467      return inline_native_hashcode(/*!virtual*/ false, is_static);
 429  468    case vmIntrinsics::_getClass:
 430  469      return inline_native_getClass();
 431  470  
 432  471    case vmIntrinsics::_dsin:
 433  472    case vmIntrinsics::_dcos:
↓ open down ↓ 220 lines elided ↑ open up ↑
 654  693      return inline_reverseBytes((vmIntrinsics::ID) intrinsic_id());
 655  694  
 656  695    case vmIntrinsics::_get_AtomicLong:
 657  696      return inline_native_AtomicLong_get();
 658  697    case vmIntrinsics::_attemptUpdate:
 659  698      return inline_native_AtomicLong_attemptUpdate();
 660  699  
 661  700    case vmIntrinsics::_getCallerClass:
 662  701      return inline_native_Reflection_getCallerClass();
 663  702  
      703 +  case vmIntrinsics::_Reference_get:
      704 +    return inline_reference_get();
      705 +
 664  706    default:
 665  707      // If you get here, it may be that someone has added a new intrinsic
 666  708      // to the list in vmSymbols.hpp without implementing it here.
 667  709  #ifndef PRODUCT
 668  710      if ((PrintMiscellaneous && (Verbose || WizardMode)) || PrintOpto) {
 669  711        tty->print_cr("*** Warning: Unimplemented intrinsic %s(%d)",
 670  712                      vmIntrinsics::name_at(intrinsic_id()), intrinsic_id());
 671  713      }
 672  714  #endif
 673  715      return false;
↓ open down ↓ 1398 lines elided ↑ open up ↑
2072 2114    default:
2073 2115      ;
2074 2116    }
2075 2117    return true;
2076 2118  }
2077 2119  
2078 2120  //----------------------------inline_unsafe_access----------------------------
2079 2121  
2080 2122  const static BasicType T_ADDRESS_HOLDER = T_LONG;
2081 2123  
     2124 +// Helper that guards and inserts a G1 pre-barrier.
     2125 +void LibraryCallKit::insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val) {
     2126 +  assert(UseG1GC, "should not call this otherwise");
     2127 +
     2128 +  // We need to generate the following....
     2129 +  //
     2130 +  // if (offset == java_lang_ref_Reference::_reference_offset) {
     2131 +  //   if (base != null) {
     2132 +  //     if (klass(base)->reference_type() != REF_NONE)) {
     2133 +  //       pre_barrier(_, pre_val, ...);
     2134 +  //     }
     2135 +  //   }
     2136 +  // }
     2137 +
     2138 +  float likely  = PROB_LIKELY(0.999);
     2139 +  float unlikely  = PROB_UNLIKELY(0.999);
     2140 +
     2141 +  IdealKit ideal(gvn(), control(),  merged_memory());
     2142 +#define __ ideal.
     2143 +
     2144 +  const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes() +
     2145 +                                        sizeof(oopDesc);
     2146 +
     2147 +  Node* referent_off = __ ConI(java_lang_ref_Reference::referent_offset);
     2148 +
     2149 +  __ if_then(offset, BoolTest::eq, referent_off, unlikely); {
     2150 +    __ if_then(base_oop, BoolTest::ne, null(), likely); {
     2151 +      Node* k_adr = __ AddP(base_oop, base_oop, __ ConX(oopDesc::klass_offset_in_bytes()));
     2152 +
     2153 +      Node* klass = _gvn.transform( LoadKlassNode::make(gvn(), immutable_memory(), k_adr,
     2154 +                                                        TypeRawPtr::BOTTOM,
     2155 +                                                        TypeKlassPtr::OBJECT_OR_NULL) );
     2156 +
     2157 +      Node* ref_typ_adr = __ AddP(klass, klass, __ ConX(reference_type_offset));
     2158 +      Node* ref_none_val = __ ConI(REF_NONE);
     2159 +
     2160 +      Node* ref_typ = __ load(__ ctrl(), ref_typ_adr, TypeInt::INT, T_INT, Compile::AliasIdxRaw);
     2161 +
     2162 +      __ if_then(ref_typ, BoolTest::ne, ref_none_val, unlikely); { 
     2163 +        
     2164 +        // Sync IdealKit and graphKit.
     2165 +        set_all_memory( __ merged_memory());
     2166 +        
     2167 +        // Use the pre-barrier to record the value in the referent field
     2168 +        pre_barrier(false /* do_load */,
     2169 +                    __ ctrl(),
     2170 +                    NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */,
     2171 +                    pre_val /* pre_val */,
     2172 +                    T_OBJECT);
     2173 +        // Update IdealKit memory.
     2174 +        __ set_all_memory(merged_memory());
     2175 +        __ set_ctrl(control());
     2176 +      } __ end_if(); // _ref_type != ref_none
     2177 +    } __ end_if(); // base  != NULL
     2178 +  } __ end_if(); // offset == referent_offset
     2179 +  
     2180 +  // Final sync IdealKit and GraphKit.
     2181 +  sync_kit(ideal);
     2182 +#undef __
     2183 +}
     2184 +
     2185 +
2082 2186  // Interpret Unsafe.fieldOffset cookies correctly:
2083 2187  extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset);
2084 2188  
2085 2189  bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) {
2086 2190    if (callee()->is_static())  return false;  // caller must have the capability!
2087 2191  
2088 2192  #ifndef PRODUCT
2089 2193    {
2090 2194      ResourceMark rm;
2091 2195      // Check the signatures.
↓ open down ↓ 56 lines elided ↑ open up ↑
2148 2252        val = pop_pair();
2149 2253        break;
2150 2254      default:
2151 2255        val = pop();
2152 2256      }
2153 2257    }
2154 2258  
2155 2259    // Build address expression.  See the code in inline_unsafe_prefetch.
2156 2260    Node *adr;
2157 2261    Node *heap_base_oop = top();
     2262 +  Node* offset = top();
     2263 +
2158 2264    if (!is_native_ptr) {
2159 2265      // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2160      -    Node* offset = pop_pair();
     2266 +    offset = pop_pair();
2161 2267      // The base is either a Java object or a value produced by Unsafe.staticFieldBase
2162 2268      Node* base   = pop();
2163 2269      // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2164 2270      // to be plain byte offsets, which are also the same as those accepted
2165 2271      // by oopDesc::field_base.
2166 2272      assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2167 2273             "fieldOffset must be byte-scaled");
2168 2274      // 32-bit machines ignore the high half!
2169 2275      offset = ConvL2X(offset);
2170 2276      adr = make_unsafe_address(base, offset);
↓ open down ↓ 20 lines elided ↑ open up ↑
2191 2297    Compile::AliasType* alias_type = C->alias_type(adr_type);
2192 2298    assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2193 2299  
2194 2300    // We will need memory barriers unless we can determine a unique
2195 2301    // alias category for this reference.  (Note:  If for some reason
2196 2302    // the barriers get omitted and the unsafe reference begins to "pollute"
2197 2303    // the alias analysis of the rest of the graph, either Compile::can_alias
2198 2304    // or Compile::must_alias will throw a diagnostic assert.)
2199 2305    bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2200 2306  
     2307 +  // If we are reading the value of the referent field of a Reference
     2308 +  // object (either by using Unsafe directly or through reflection)
     2309 +  // then, if G1 is enabled, we need to record the referent in an
     2310 +  // SATB log buffer using the pre-barrier mechanism.
     2311 +  bool need_read_barrier = UseG1GC && !is_native_ptr && !is_store &&
     2312 +                           offset != top() && heap_base_oop != top();
     2313 +
2201 2314    if (!is_store && type == T_OBJECT) {
2202 2315      // Attempt to infer a sharper value type from the offset and base type.
2203 2316      ciKlass* sharpened_klass = NULL;
2204 2317  
2205 2318      // See if it is an instance field, with an object type.
2206 2319      if (alias_type->field() != NULL) {
2207 2320        assert(!is_native_ptr, "native pointer op cannot use a java address");
2208 2321        if (alias_type->field()->type()->is_klass()) {
2209 2322          sharpened_klass = alias_type->field()->type()->as_klass();
2210 2323        }
↓ open down ↓ 63 lines elided ↑ open up ↑
2274 2387    if (!is_store) {
2275 2388      Node* p = make_load(control(), adr, value_type, type, adr_type, is_volatile);
2276 2389      // load value and push onto stack
2277 2390      switch (type) {
2278 2391      case T_BOOLEAN:
2279 2392      case T_CHAR:
2280 2393      case T_BYTE:
2281 2394      case T_SHORT:
2282 2395      case T_INT:
2283 2396      case T_FLOAT:
     2397 +      push(p);
     2398 +      break;
2284 2399      case T_OBJECT:
2285      -      push( p );
     2400 +      if (need_read_barrier) {
     2401 +        insert_g1_pre_barrier(heap_base_oop, offset, p);
     2402 +      }
     2403 +      push(p);
2286 2404        break;
2287 2405      case T_ADDRESS:
2288 2406        // Cast to an int type.
2289 2407        p = _gvn.transform( new (C, 2) CastP2XNode(NULL,p) );
2290 2408        p = ConvX2L(p);
2291 2409        push_pair(p);
2292 2410        break;
2293 2411      case T_DOUBLE:
2294 2412      case T_LONG:
2295 2413        push_pair( p );
↓ open down ↓ 236 lines elided ↑ open up ↑
2532 2650    switch(type) {
2533 2651    case T_INT:
2534 2652      cas = _gvn.transform(new (C, 5) CompareAndSwapINode(control(), mem, adr, newval, oldval));
2535 2653      break;
2536 2654    case T_LONG:
2537 2655      cas = _gvn.transform(new (C, 5) CompareAndSwapLNode(control(), mem, adr, newval, oldval));
2538 2656      break;
2539 2657    case T_OBJECT:
2540 2658       // reference stores need a store barrier.
2541 2659      // (They don't if CAS fails, but it isn't worth checking.)
2542      -    pre_barrier(control(), base, adr, alias_idx, newval, value_type->make_oopptr(), T_OBJECT);
     2660 +    pre_barrier(true /* do_load*/,
     2661 +                control(), base, adr, alias_idx, newval, value_type->make_oopptr(),
     2662 +                NULL /* pre_val*/,
     2663 +                T_OBJECT);
2543 2664  #ifdef _LP64
2544 2665      if (adr->bottom_type()->is_ptr_to_narrowoop()) {
2545 2666        Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop()));
2546 2667        Node *oldval_enc = _gvn.transform(new (C, 2) EncodePNode(oldval, oldval->bottom_type()->make_narrowoop()));
2547 2668        cas = _gvn.transform(new (C, 5) CompareAndSwapNNode(control(), mem, adr,
2548 2669                                                            newval_enc, oldval_enc));
2549 2670      } else
2550 2671  #endif
2551 2672      {
2552 2673        cas = _gvn.transform(new (C, 5) CompareAndSwapPNode(control(), mem, adr, newval, oldval));
↓ open down ↓ 2755 lines elided ↑ open up ↑
5308 5429    address     copyfunc_addr =
5309 5430        basictype2arraycopy(basic_elem_type, src_offset, dest_offset,
5310 5431                            disjoint_bases, copyfunc_name, dest_uninitialized);
5311 5432  
5312 5433    // Call it.  Note that the count_ix value is not scaled to a byte-size.
5313 5434    make_runtime_call(RC_LEAF|RC_NO_FP,
5314 5435                      OptoRuntime::fast_arraycopy_Type(),
5315 5436                      copyfunc_addr, copyfunc_name, adr_type,
5316 5437                      src_start, dest_start, copy_length XTOP);
5317 5438  }
     5439 +
     5440 +//----------------------------inline_reference_get----------------------------
     5441 +
     5442 +bool LibraryCallKit::inline_reference_get() {
     5443 +  const int nargs = 1; // self
     5444 +
     5445 +  guarantee(java_lang_ref_Reference::referent_offset > 0, 
     5446 +            "should have already been set");
     5447 +  
     5448 +  int referent_offset = java_lang_ref_Reference::referent_offset;
     5449 +
     5450 +  // Restore the stack and pop off the argument
     5451 +  _sp += nargs;
     5452 +  Node *reference_obj = pop();
     5453 +
     5454 +  // Null check on self without removing any arguments.
     5455 +  _sp += nargs;
     5456 +  reference_obj = do_null_check(reference_obj, T_OBJECT);
     5457 +  _sp -= nargs;;
     5458 +
     5459 +  if (stopped()) return true;
     5460 +
     5461 +  Node *adr = basic_plus_adr(reference_obj, reference_obj, referent_offset);
     5462 +
     5463 +  ciInstanceKlass* klass = env()->Object_klass();
     5464 +  const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass);
     5465 +
     5466 +  Node* no_ctrl = NULL;
     5467 +  Node *result = make_load(no_ctrl, adr, object_type, T_OBJECT);
     5468 +
     5469 +  // Use the pre-barrier to record the value in the referent field
     5470 +  pre_barrier(false /* do_load */,
     5471 +              control(),
     5472 +              NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */,
     5473 +              result /* pre_val */,
     5474 +              T_OBJECT);
     5475 +
     5476 +  push(result);
     5477 +  return true;
     5478 +}
     5479 +
    
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX