src/cpu/ppc/vm/interp_masm_ppc_64.cpp

Print this page
rev 6728 : 8050942: PPC64: implement template interpreter for ppc64le
Contributed-by: asmundak@google.com


 102   assert(scratch_reg != R0, "can't use R0 as scratch_reg here");
 103   if (JvmtiExport::can_pop_frame()) {
 104     Label L;
 105 
 106     // Check the "pending popframe condition" flag in the current thread.
 107     lwz(scratch_reg, in_bytes(JavaThread::popframe_condition_offset()), R16_thread);
 108 
 109     // Initiate popframe handling only if it is not already being
 110     // processed. If the flag has the popframe_processing bit set, it
 111     // means that this code is called *during* popframe handling - we
 112     // don't want to reenter.
 113     andi_(R0, scratch_reg, JavaThread::popframe_pending_bit);
 114     beq(CCR0, L);
 115 
 116     andi_(R0, scratch_reg, JavaThread::popframe_processing_bit);
 117     bne(CCR0, L);
 118 
 119     // Call the Interpreter::remove_activation_preserving_args_entry()
 120     // func to get the address of the same-named entrypoint in the
 121     // generated interpreter code.





 122     call_c(CAST_FROM_FN_PTR(FunctionDescriptor*,
 123                             Interpreter::remove_activation_preserving_args_entry),
 124            relocInfo::none);

 125 
 126     // Jump to Interpreter::_remove_activation_preserving_args_entry.
 127     mtctr(R3_RET);
 128     bctr();
 129 
 130     align(32, 12);
 131     bind(L);
 132   }
 133 }
 134 
 135 void InterpreterMacroAssembler::check_and_handle_earlyret(Register scratch_reg) {
 136   const Register Rthr_state_addr = scratch_reg;
 137   if (JvmtiExport::can_force_early_return()) {
 138     Label Lno_early_ret;
 139     ld(Rthr_state_addr, in_bytes(JavaThread::jvmti_thread_state_offset()), R16_thread);
 140     cmpdi(CCR0, Rthr_state_addr, 0);
 141     beq(CCR0, Lno_early_ret);
 142 
 143     lwz(R0, in_bytes(JvmtiThreadState::earlyret_state_offset()), Rthr_state_addr);
 144     cmpwi(CCR0, R0, JvmtiThreadState::earlyret_pending);


 314     case btos:
 315     case ctos:
 316     case stos:
 317     case itos: pop_i();              break;
 318     case ltos: pop_l();              break;
 319     case ftos: pop_f();              break;
 320     case dtos: pop_d();              break;
 321     case vtos: /* nothing to do */   break;
 322     default  : ShouldNotReachHere();
 323   }
 324   verify_oop(R17_tos, state);
 325 }
 326 
 327 void InterpreterMacroAssembler::empty_expression_stack() {
 328   addi(R15_esp, R26_monitor, - Interpreter::stackElementSize);
 329 }
 330 
 331 void InterpreterMacroAssembler::get_2_byte_integer_at_bcp(int         bcp_offset,
 332                                                           Register    Rdst,
 333                                                           signedOrNot is_signed) {











 334   // Read Java big endian format.
 335   if (is_signed == Signed) {
 336     lha(Rdst, bcp_offset, R14_bcp);
 337   } else {
 338     lhz(Rdst, bcp_offset, R14_bcp);
 339   }
 340 #if 0
 341   assert(Rtmp != Rdst, "need separate temp register");
 342   Register Rfirst = Rtmp;
 343   lbz(Rfirst, bcp_offset, R14_bcp); // first byte
 344   lbz(Rdst, bcp_offset+1, R14_bcp); // second byte
 345 
 346   // Rdst = ((Rfirst<<8) & 0xFF00) | (Rdst &~ 0xFF00)
 347   rldimi(/*RA=*/Rdst, /*RS=*/Rfirst, /*sh=*/8, /*mb=*/48);
 348   if (is_signed == Signed) {
 349     extsh(Rdst, Rdst);
 350   }
 351 #endif
 352 }
 353 
 354 void InterpreterMacroAssembler::get_4_byte_integer_at_bcp(int         bcp_offset,
 355                                                           Register    Rdst,
 356                                                           signedOrNot is_signed) {











 357   // Read Java big endian format.
 358   if (bcp_offset & 3) { // Offset unaligned?
 359     load_const_optimized(Rdst, bcp_offset);
 360     if (is_signed == Signed) {
 361       lwax(Rdst, R14_bcp, Rdst);
 362     } else {
 363       lwzx(Rdst, R14_bcp, Rdst);
 364     }
 365   } else {
 366     if (is_signed == Signed) {
 367       lwa(Rdst, bcp_offset, R14_bcp);
 368     } else {
 369       lwz(Rdst, bcp_offset, R14_bcp);
 370     }
 371   }

 372 }
 373 

 374 // Load the constant pool cache index from the bytecode stream.
 375 //
 376 // Kills / writes:
 377 //   - Rdst, Rscratch
 378 void InterpreterMacroAssembler::get_cache_index_at_bcp(Register Rdst, int bcp_offset, size_t index_size) {
 379   assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");

 380   if (index_size == sizeof(u2)) {
 381     get_2_byte_integer_at_bcp(bcp_offset, Rdst, Unsigned);
 382   } else if (index_size == sizeof(u4)) {
 383     get_4_byte_integer_at_bcp(bcp_offset, Rdst, Signed);





 384     assert(ConstantPool::decode_invokedynamic_index(~123) == 123, "else change next line");
 385     nand(Rdst, Rdst, Rdst); // convert to plain index
 386   } else if (index_size == sizeof(u1)) {
 387     lbz(Rdst, bcp_offset, R14_bcp);
 388   } else {
 389     ShouldNotReachHere();
 390   }
 391   // Rdst now contains cp cache index.
 392 }
 393 
 394 void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache, int bcp_offset, size_t index_size) {
 395   get_cache_index_at_bcp(cache, bcp_offset, index_size);
 396   sldi(cache, cache, exact_log2(in_words(ConstantPoolCacheEntry::size()) * BytesPerWord));
 397   add(cache, R27_constPoolCache, cache);























 398 }
 399 
 400 // Load object from cpool->resolved_references(index).
 401 void InterpreterMacroAssembler::load_resolved_reference_at_index(Register result, Register index) {
 402   assert_different_registers(result, index);
 403   get_constant_pool(result);
 404 
 405   // Convert from field index to resolved_references() index and from
 406   // word index to byte offset. Since this is a java object, it can be compressed.
 407   Register tmp = index;  // reuse
 408   sldi(tmp, index, LogBytesPerHeapOop);
 409   // Load pointer for resolved_references[] objArray.
 410   ld(result, ConstantPool::resolved_references_offset_in_bytes(), result);
 411   // JNIHandles::resolve(result)
 412   ld(result, 0, result);
 413 #ifdef ASSERT
 414   Label index_ok;
 415   lwa(R0, arrayOopDesc::length_offset_in_bytes(), result);
 416   sldi(R0, R0, LogBytesPerHeapOop);
 417   cmpd(CCR0, tmp, R0);




 102   assert(scratch_reg != R0, "can't use R0 as scratch_reg here");
 103   if (JvmtiExport::can_pop_frame()) {
 104     Label L;
 105 
 106     // Check the "pending popframe condition" flag in the current thread.
 107     lwz(scratch_reg, in_bytes(JavaThread::popframe_condition_offset()), R16_thread);
 108 
 109     // Initiate popframe handling only if it is not already being
 110     // processed. If the flag has the popframe_processing bit set, it
 111     // means that this code is called *during* popframe handling - we
 112     // don't want to reenter.
 113     andi_(R0, scratch_reg, JavaThread::popframe_pending_bit);
 114     beq(CCR0, L);
 115 
 116     andi_(R0, scratch_reg, JavaThread::popframe_processing_bit);
 117     bne(CCR0, L);
 118 
 119     // Call the Interpreter::remove_activation_preserving_args_entry()
 120     // func to get the address of the same-named entrypoint in the
 121     // generated interpreter code.
 122 #if defined(ABI_ELFv2)
 123     call_c(CAST_FROM_FN_PTR(address,
 124                             Interpreter::remove_activation_preserving_args_entry),
 125                             relocInfo::none);
 126 #else
 127     call_c(CAST_FROM_FN_PTR(FunctionDescriptor*,
 128                             Interpreter::remove_activation_preserving_args_entry),
 129            relocInfo::none);
 130 #endif
 131 
 132     // Jump to Interpreter::_remove_activation_preserving_args_entry.
 133     mtctr(R3_RET);
 134     bctr();
 135 
 136     align(32, 12);
 137     bind(L);
 138   }
 139 }
 140 
 141 void InterpreterMacroAssembler::check_and_handle_earlyret(Register scratch_reg) {
 142   const Register Rthr_state_addr = scratch_reg;
 143   if (JvmtiExport::can_force_early_return()) {
 144     Label Lno_early_ret;
 145     ld(Rthr_state_addr, in_bytes(JavaThread::jvmti_thread_state_offset()), R16_thread);
 146     cmpdi(CCR0, Rthr_state_addr, 0);
 147     beq(CCR0, Lno_early_ret);
 148 
 149     lwz(R0, in_bytes(JvmtiThreadState::earlyret_state_offset()), Rthr_state_addr);
 150     cmpwi(CCR0, R0, JvmtiThreadState::earlyret_pending);


 320     case btos:
 321     case ctos:
 322     case stos:
 323     case itos: pop_i();              break;
 324     case ltos: pop_l();              break;
 325     case ftos: pop_f();              break;
 326     case dtos: pop_d();              break;
 327     case vtos: /* nothing to do */   break;
 328     default  : ShouldNotReachHere();
 329   }
 330   verify_oop(R17_tos, state);
 331 }
 332 
 333 void InterpreterMacroAssembler::empty_expression_stack() {
 334   addi(R15_esp, R26_monitor, - Interpreter::stackElementSize);
 335 }
 336 
 337 void InterpreterMacroAssembler::get_2_byte_integer_at_bcp(int         bcp_offset,
 338                                                           Register    Rdst,
 339                                                           signedOrNot is_signed) {
 340 #if defined(VM_LITTLE_ENDIAN)
 341   if (bcp_offset) {
 342     load_const_optimized(Rdst, bcp_offset);
 343     lhbrx(Rdst, R14_bcp, Rdst);
 344   } else {
 345     lhbrx(Rdst, R14_bcp);
 346   }
 347   if (is_signed == Signed) {
 348     extsh(Rdst, Rdst);
 349   }
 350 #else
 351   // Read Java big endian format.
 352   if (is_signed == Signed) {
 353     lha(Rdst, bcp_offset, R14_bcp);
 354   } else {
 355     lhz(Rdst, bcp_offset, R14_bcp);
 356   }











 357 #endif
 358 }
 359 
 360 void InterpreterMacroAssembler::get_4_byte_integer_at_bcp(int         bcp_offset,
 361                                                           Register    Rdst,
 362                                                           signedOrNot is_signed) {
 363 #if defined(VM_LITTLE_ENDIAN)
 364   if (bcp_offset) {
 365     load_const_optimized(Rdst, bcp_offset);
 366     lwbrx(Rdst, R14_bcp, Rdst);
 367   } else {
 368     lwbrx(Rdst, R14_bcp);
 369   }
 370   if (is_signed == Signed) {
 371     extsw(Rdst, Rdst);
 372   }
 373 #else
 374   // Read Java big endian format.
 375   if (bcp_offset & 3) { // Offset unaligned?
 376     load_const_optimized(Rdst, bcp_offset);
 377     if (is_signed == Signed) {
 378       lwax(Rdst, R14_bcp, Rdst);
 379     } else {
 380       lwzx(Rdst, R14_bcp, Rdst);
 381     }
 382   } else {
 383     if (is_signed == Signed) {
 384       lwa(Rdst, bcp_offset, R14_bcp);
 385     } else {
 386       lwz(Rdst, bcp_offset, R14_bcp);
 387     }
 388   }
 389 #endif
 390 }
 391 
 392 
 393 // Load the constant pool cache index from the bytecode stream.
 394 //
 395 // Kills / writes:
 396 //   - Rdst, Rscratch
 397 void InterpreterMacroAssembler::get_cache_index_at_bcp(Register Rdst, int bcp_offset, size_t index_size) {
 398   assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");
 399   // Cache index is always in the native format, courtesy of Rewriter.
 400   if (index_size == sizeof(u2)) {
 401     lhz(Rdst, bcp_offset, R14_bcp);
 402   } else if (index_size == sizeof(u4)) {
 403     if (bcp_offset & 3) {
 404       load_const_optimized(Rdst, bcp_offset);
 405       lwax(Rdst, R14_bcp, Rdst);
 406     } else {
 407       lwa(Rdst, bcp_offset, R14_bcp);
 408     }
 409     assert(ConstantPool::decode_invokedynamic_index(~123) == 123, "else change next line");
 410     nand(Rdst, Rdst, Rdst); // convert to plain index
 411   } else if (index_size == sizeof(u1)) {
 412     lbz(Rdst, bcp_offset, R14_bcp);
 413   } else {
 414     ShouldNotReachHere();
 415   }
 416   // Rdst now contains cp cache index.
 417 }
 418 
 419 void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache, int bcp_offset, size_t index_size) {
 420   get_cache_index_at_bcp(cache, bcp_offset, index_size);
 421   sldi(cache, cache, exact_log2(in_words(ConstantPoolCacheEntry::size()) * BytesPerWord));
 422   add(cache, R27_constPoolCache, cache);
 423 }
 424 
 425 // Load 4-byte signed or unsigned integer in Java format (that is, big-endian format)
 426 // from (Rsrc)+offset.
 427 void InterpreterMacroAssembler::get_u4(Register Rdst, Register Rsrc, int offset,
 428                                        signedOrNot is_signed) {
 429 #if defined(VM_LITTLE_ENDIAN)
 430   if (offset) {
 431     load_const_optimized(Rdst, offset);
 432     lwbrx(Rdst, Rdst, Rsrc);
 433   } else {
 434     lwbrx(Rdst, Rsrc);
 435   }
 436   if (is_signed == Signed) {
 437     extsw(Rdst, Rdst);
 438   }
 439 #else
 440   if (is_signed == Signed) {
 441     lwa(Rdst, offset, Rsrc);
 442   } else {
 443     lwz(Rdst, offset, Rsrc);
 444   }
 445 #endif
 446 }
 447 
 448 // Load object from cpool->resolved_references(index).
 449 void InterpreterMacroAssembler::load_resolved_reference_at_index(Register result, Register index) {
 450   assert_different_registers(result, index);
 451   get_constant_pool(result);
 452 
 453   // Convert from field index to resolved_references() index and from
 454   // word index to byte offset. Since this is a java object, it can be compressed.
 455   Register tmp = index;  // reuse
 456   sldi(tmp, index, LogBytesPerHeapOop);
 457   // Load pointer for resolved_references[] objArray.
 458   ld(result, ConstantPool::resolved_references_offset_in_bytes(), result);
 459   // JNIHandles::resolve(result)
 460   ld(result, 0, result);
 461 #ifdef ASSERT
 462   Label index_ok;
 463   lwa(R0, arrayOopDesc::length_offset_in_bytes(), result);
 464   sldi(R0, R0, LogBytesPerHeapOop);
 465   cmpd(CCR0, tmp, R0);