Print this page


Split Close
Expand all
Collapse all
          --- old/src/cpu/x86/vm/templateTable_x86_64.cpp
          +++ new/src/cpu/x86/vm/templateTable_x86_64.cpp
↓ open down ↓ 195 lines elided ↑ open up ↑
 196  196        ShouldNotReachHere();
 197  197  
 198  198    }
 199  199  }
 200  200  
 201  201  Address TemplateTable::at_bcp(int offset) {
 202  202    assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 203  203    return Address(r13, offset);
 204  204  }
 205  205  
 206      -void TemplateTable::patch_bytecode(Bytecodes::Code bytecode, Register bc,
 207      -                                   Register scratch,
 208      -                                   bool load_bc_into_scratch/*=true*/) {
 209      -  if (!RewriteBytecodes) {
 210      -    return;
 211      -  }
 212      -  // the pair bytecodes have already done the load.
 213      -  if (load_bc_into_scratch) {
 214      -    __ movl(bc, bytecode);
      206 +void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
      207 +                                   Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
      208 +                                   int byte_no) {
      209 +  if (!RewriteBytecodes)  return;
      210 +  Label L_patch_done;
      211 +
      212 +  switch (bc) {
      213 +  case Bytecodes::_fast_aputfield:
      214 +  case Bytecodes::_fast_bputfield:
      215 +  case Bytecodes::_fast_cputfield:
      216 +  case Bytecodes::_fast_dputfield:
      217 +  case Bytecodes::_fast_fputfield:
      218 +  case Bytecodes::_fast_iputfield:
      219 +  case Bytecodes::_fast_lputfield:
      220 +  case Bytecodes::_fast_sputfield:
      221 +    {
      222 +      // We skip bytecode quickening for putfield instructions when
      223 +      // the put_code written to the constant pool cache is zero.
      224 +      // This is required so that every execution of this instruction
      225 +      // calls out to InterpreterRuntime::resolve_get_put to do
      226 +      // additional, required work.
      227 +      assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
      228 +      assert(load_bc_into_bc_reg, "we use bc_reg as temp");
      229 +      __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
      230 +      __ movl(bc_reg, bc);
      231 +      __ cmpl(temp_reg, (int) 0);
      232 +      __ jcc(Assembler::zero, L_patch_done);  // don't patch
      233 +    }
      234 +    break;
      235 +  default:
      236 +    assert(byte_no == -1, "sanity");
      237 +    // the pair bytecodes have already done the load.
      238 +    if (load_bc_into_bc_reg) {
      239 +      __ movl(bc_reg, bc);
      240 +    }
 215  241    }
 216      -  Label patch_done;
      242 +
 217  243    if (JvmtiExport::can_post_breakpoint()) {
 218      -    Label fast_patch;
      244 +    Label L_fast_patch;
 219  245      // if a breakpoint is present we can't rewrite the stream directly
 220      -    __ movzbl(scratch, at_bcp(0));
 221      -    __ cmpl(scratch, Bytecodes::_breakpoint);
 222      -    __ jcc(Assembler::notEqual, fast_patch);
 223      -    __ get_method(scratch);
      246 +    __ movzbl(temp_reg, at_bcp(0));
      247 +    __ cmpl(temp_reg, Bytecodes::_breakpoint);
      248 +    __ jcc(Assembler::notEqual, L_fast_patch);
      249 +    __ get_method(temp_reg);
 224  250      // Let breakpoint table handling rewrite to quicker bytecode
 225      -    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, r13, bc);
      251 +    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), temp_reg, r13, bc_reg);
 226  252  #ifndef ASSERT
 227      -    __ jmpb(patch_done);
      253 +    __ jmpb(L_patch_done);
 228  254  #else
 229      -    __ jmp(patch_done);
      255 +    __ jmp(L_patch_done);
 230  256  #endif
 231      -    __ bind(fast_patch);
      257 +    __ bind(L_fast_patch);
 232  258    }
      259 +
 233  260  #ifdef ASSERT
 234      -  Label okay;
 235      -  __ load_unsigned_byte(scratch, at_bcp(0));
 236      -  __ cmpl(scratch, (int) Bytecodes::java_code(bytecode));
 237      -  __ jcc(Assembler::equal, okay);
 238      -  __ cmpl(scratch, bc);
 239      -  __ jcc(Assembler::equal, okay);
      261 +  Label L_okay;
      262 +  __ load_unsigned_byte(temp_reg, at_bcp(0));
      263 +  __ cmpl(temp_reg, (int) Bytecodes::java_code(bc));
      264 +  __ jcc(Assembler::equal, L_okay);
      265 +  __ cmpl(temp_reg, bc_reg);
      266 +  __ jcc(Assembler::equal, L_okay);
 240  267    __ stop("patching the wrong bytecode");
 241      -  __ bind(okay);
      268 +  __ bind(L_okay);
 242  269  #endif
      270 +
 243  271    // patch bytecode
 244      -  __ movb(at_bcp(0), bc);
 245      -  __ bind(patch_done);
      272 +  __ movb(at_bcp(0), bc_reg);
      273 +  __ bind(L_patch_done);
 246  274  }
 247  275  
 248  276  
 249  277  // Individual instructions
 250  278  
 251  279  void TemplateTable::nop() {
 252  280    transition(vtos, vtos);
 253  281    // nothing to do
 254  282  }
 255  283  
↓ open down ↓ 1835 lines elided ↑ open up ↑
2091 2119  
2092 2120  void TemplateTable::resolve_cache_and_index(int byte_no,
2093 2121                                              Register result,
2094 2122                                              Register Rcache,
2095 2123                                              Register index,
2096 2124                                              size_t index_size) {
2097 2125    const Register temp = rbx;
2098 2126    assert_different_registers(result, Rcache, index, temp);
2099 2127  
2100 2128    Label resolved;
2101      -  __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2102 2129    if (byte_no == f1_oop) {
2103 2130      // We are resolved if the f1 field contains a non-null object (CallSite, etc.)
2104 2131      // This kind of CP cache entry does not need to match the flags byte, because
2105 2132      // there is a 1-1 relation between bytecode type and CP entry type.
2106 2133      assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD)
     2134 +    __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2107 2135      __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2108 2136      __ testptr(result, result);
2109 2137      __ jcc(Assembler::notEqual, resolved);
2110 2138    } else {
2111 2139      assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2112 2140      assert(result == noreg, "");  //else change code for setting result
2113      -    const int shift_count = (1 + byte_no) * BitsPerByte;
2114      -    __ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset()));
2115      -    __ shrl(temp, shift_count);
2116      -    // have we resolved this bytecode?
2117      -    __ andl(temp, 0xFF);
2118      -    __ cmpl(temp, (int) bytecode());
     2141 +    __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
     2142 +    __ cmpl(temp, (int) bytecode());  // have we resolved this bytecode?
2119 2143      __ jcc(Assembler::equal, resolved);
2120 2144    }
2121 2145  
2122 2146    // resolve first time through
2123 2147    address entry;
2124 2148    switch (bytecode()) {
2125 2149    case Bytecodes::_getstatic:
2126 2150    case Bytecodes::_putstatic:
2127 2151    case Bytecodes::_getfield:
2128 2152    case Bytecodes::_putfield:
↓ open down ↓ 371 lines elided ↑ open up ↑
2500 2524    const Address field(obj, off, Address::times_1);
2501 2525  
2502 2526    Label notByte, notInt, notShort, notChar,
2503 2527          notLong, notFloat, notObj, notDouble;
2504 2528  
2505 2529    __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2506 2530  
2507 2531    assert(btos == 0, "change code, btos != 0");
2508 2532    __ andl(flags, 0x0f);
2509 2533    __ jcc(Assembler::notZero, notByte);
     2534 +
2510 2535    // btos
2511      -  __ pop(btos);
2512      -  if (!is_static) pop_and_check_object(obj);
2513      -  __ movb(field, rax);
2514      -  if (!is_static) {
2515      -    patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx);
     2536 +  {
     2537 +    __ pop(btos);
     2538 +    if (!is_static) pop_and_check_object(obj);
     2539 +    __ movb(field, rax);
     2540 +    if (!is_static) {
     2541 +      patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
     2542 +    }
     2543 +    __ jmp(Done);
2516 2544    }
2517      -  __ jmp(Done);
2518 2545  
2519 2546    __ bind(notByte);
2520 2547    __ cmpl(flags, atos);
2521 2548    __ jcc(Assembler::notEqual, notObj);
2522      -  // atos
2523      -  __ pop(atos);
2524      -  if (!is_static) pop_and_check_object(obj);
2525 2549  
2526      -  // Store into the field
2527      -  do_oop_store(_masm, field, rax, _bs->kind(), false);
2528      -
2529      -  if (!is_static) {
2530      -    patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx);
     2550 +  // atos
     2551 +  {
     2552 +    __ pop(atos);
     2553 +    if (!is_static) pop_and_check_object(obj);
     2554 +    // Store into the field
     2555 +    do_oop_store(_masm, field, rax, _bs->kind(), false);
     2556 +    if (!is_static) {
     2557 +      patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
     2558 +    }
     2559 +    __ jmp(Done);
2531 2560    }
2532      -  __ jmp(Done);
2533 2561  
2534 2562    __ bind(notObj);
2535 2563    __ cmpl(flags, itos);
2536 2564    __ jcc(Assembler::notEqual, notInt);
     2565 +
2537 2566    // itos
2538      -  __ pop(itos);
2539      -  if (!is_static) pop_and_check_object(obj);
2540      -  __ movl(field, rax);
2541      -  if (!is_static) {
2542      -    patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx);
     2567 +  {
     2568 +    __ pop(itos);
     2569 +    if (!is_static) pop_and_check_object(obj);
     2570 +    __ movl(field, rax);
     2571 +    if (!is_static) {
     2572 +      patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
     2573 +    }
     2574 +    __ jmp(Done);
2543 2575    }
2544      -  __ jmp(Done);
2545 2576  
2546 2577    __ bind(notInt);
2547 2578    __ cmpl(flags, ctos);
2548 2579    __ jcc(Assembler::notEqual, notChar);
     2580 +
2549 2581    // ctos
2550      -  __ pop(ctos);
2551      -  if (!is_static) pop_and_check_object(obj);
2552      -  __ movw(field, rax);
2553      -  if (!is_static) {
2554      -    patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx);
     2582 +  {
     2583 +    __ pop(ctos);
     2584 +    if (!is_static) pop_and_check_object(obj);
     2585 +    __ movw(field, rax);
     2586 +    if (!is_static) {
     2587 +      patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
     2588 +    }
     2589 +    __ jmp(Done);
2555 2590    }
2556      -  __ jmp(Done);
2557 2591  
2558 2592    __ bind(notChar);
2559 2593    __ cmpl(flags, stos);
2560 2594    __ jcc(Assembler::notEqual, notShort);
     2595 +
2561 2596    // stos
2562      -  __ pop(stos);
2563      -  if (!is_static) pop_and_check_object(obj);
2564      -  __ movw(field, rax);
2565      -  if (!is_static) {
2566      -    patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx);
     2597 +  {
     2598 +    __ pop(stos);
     2599 +    if (!is_static) pop_and_check_object(obj);
     2600 +    __ movw(field, rax);
     2601 +    if (!is_static) {
     2602 +      patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
     2603 +    }
     2604 +    __ jmp(Done);
2567 2605    }
2568      -  __ jmp(Done);
2569 2606  
2570 2607    __ bind(notShort);
2571 2608    __ cmpl(flags, ltos);
2572 2609    __ jcc(Assembler::notEqual, notLong);
     2610 +
2573 2611    // ltos
2574      -  __ pop(ltos);
2575      -  if (!is_static) pop_and_check_object(obj);
2576      -  __ movq(field, rax);
2577      -  if (!is_static) {
2578      -    patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx);
     2612 +  {
     2613 +    __ pop(ltos);
     2614 +    if (!is_static) pop_and_check_object(obj);
     2615 +    __ movq(field, rax);
     2616 +    if (!is_static) {
     2617 +      patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
     2618 +    }
     2619 +    __ jmp(Done);
2579 2620    }
2580      -  __ jmp(Done);
2581 2621  
2582 2622    __ bind(notLong);
2583 2623    __ cmpl(flags, ftos);
2584 2624    __ jcc(Assembler::notEqual, notFloat);
     2625 +
2585 2626    // ftos
2586      -  __ pop(ftos);
2587      -  if (!is_static) pop_and_check_object(obj);
2588      -  __ movflt(field, xmm0);
2589      -  if (!is_static) {
2590      -    patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx);
     2627 +  {
     2628 +    __ pop(ftos);
     2629 +    if (!is_static) pop_and_check_object(obj);
     2630 +    __ movflt(field, xmm0);
     2631 +    if (!is_static) {
     2632 +      patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
     2633 +    }
     2634 +    __ jmp(Done);
2591 2635    }
2592      -  __ jmp(Done);
2593 2636  
2594 2637    __ bind(notFloat);
2595 2638  #ifdef ASSERT
2596 2639    __ cmpl(flags, dtos);
2597 2640    __ jcc(Assembler::notEqual, notDouble);
2598 2641  #endif
     2642 +
2599 2643    // dtos
2600      -  __ pop(dtos);
2601      -  if (!is_static) pop_and_check_object(obj);
2602      -  __ movdbl(field, xmm0);
2603      -  if (!is_static) {
2604      -    patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx);
     2644 +  {
     2645 +    __ pop(dtos);
     2646 +    if (!is_static) pop_and_check_object(obj);
     2647 +    __ movdbl(field, xmm0);
     2648 +    if (!is_static) {
     2649 +      patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
     2650 +    }
2605 2651    }
2606 2652  
2607 2653  #ifdef ASSERT
2608 2654    __ jmp(Done);
2609 2655  
2610 2656    __ bind(notDouble);
2611 2657    __ stop("Bad state");
2612 2658  #endif
2613 2659  
2614 2660    __ bind(Done);
     2661 +
2615 2662    // Check for volatile store
2616 2663    __ testl(rdx, rdx);
2617 2664    __ jcc(Assembler::zero, notVolatile);
2618 2665    volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2619 2666                                                 Assembler::StoreStore));
2620      -
2621 2667    __ bind(notVolatile);
2622 2668  }
2623 2669  
2624 2670  void TemplateTable::putfield(int byte_no) {
2625 2671    putfield_or_static(byte_no, false);
2626 2672  }
2627 2673  
2628 2674  void TemplateTable::putstatic(int byte_no) {
2629 2675    putfield_or_static(byte_no, true);
2630 2676  }
↓ open down ↓ 1051 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX