< prev index next >

src/cpu/ppc/vm/interp_masm_ppc_64.cpp

Print this page
rev 8107 : 8077838: Recent developments for ppc.


 429 #if defined(VM_LITTLE_ENDIAN)
 430   if (offset) {
 431     load_const_optimized(Rdst, offset);
 432     lwbrx(Rdst, Rdst, Rsrc);
 433   } else {
 434     lwbrx(Rdst, Rsrc);
 435   }
 436   if (is_signed == Signed) {
 437     extsw(Rdst, Rdst);
 438   }
 439 #else
 440   if (is_signed == Signed) {
 441     lwa(Rdst, offset, Rsrc);
 442   } else {
 443     lwz(Rdst, offset, Rsrc);
 444   }
 445 #endif
 446 }
 447 
 448 // Load object from cpool->resolved_references(index).
 449 void InterpreterMacroAssembler::load_resolved_reference_at_index(Register result, Register index) {
 450   assert_different_registers(result, index);
 451   get_constant_pool(result);
 452 
 453   // Convert from field index to resolved_references() index and from
 454   // word index to byte offset. Since this is a java object, it can be compressed.
 455   Register tmp = index;  // reuse
 456   sldi(tmp, index, LogBytesPerHeapOop);
 457   // Load pointer for resolved_references[] objArray.
 458   ld(result, ConstantPool::resolved_references_offset_in_bytes(), result);
 459   // JNIHandles::resolve(result)
 460   ld(result, 0, result);
 461 #ifdef ASSERT
 462   Label index_ok;
 463   lwa(R0, arrayOopDesc::length_offset_in_bytes(), result);
 464   sldi(R0, R0, LogBytesPerHeapOop);
 465   cmpd(CCR0, tmp, R0);
 466   blt(CCR0, index_ok);
 467   stop("resolved reference index out of bounds", 0x09256);
 468   bind(index_ok);
 469 #endif
 470   // Add in the index.
 471   add(result, tmp, result);
 472   load_heap_oop(result, arrayOopDesc::base_offset_in_bytes(T_OBJECT), result);
 473 }
 474 
 475 // Generate a subtype check: branch to ok_is_subtype if sub_klass is
 476 // a subtype of super_klass. Blows registers Rsub_klass, tmp1, tmp2.
 477 void InterpreterMacroAssembler::gen_subtype_check(Register Rsub_klass, Register Rsuper_klass, Register Rtmp1,
 478                                                   Register Rtmp2, Register Rtmp3, Label &ok_is_subtype) {
 479   // Profile the not-null value's klass.
 480   profile_typecheck(Rsub_klass, Rtmp1, Rtmp2);
 481   check_klass_subtype(Rsub_klass, Rsuper_klass, Rtmp1, Rtmp2, ok_is_subtype);
 482   profile_typecheck_failed(Rtmp1, Rtmp2);
 483 }
 484 
 485 void InterpreterMacroAssembler::generate_stack_overflow_check_with_compare_and_throw(Register Rmem_frame_size, Register Rscratch1) {
 486   Label done;
 487   sub(Rmem_frame_size, R1_SP, Rmem_frame_size);
 488   ld(Rscratch1, thread_(stack_overflow_limit));
 489   cmpld(CCR0/*is_stack_overflow*/, Rmem_frame_size, Rscratch1);
 490   bgt(CCR0/*is_stack_overflow*/, done);
 491 
 492   // Load target address of the runtime stub.


 859 
 860     // } else if (THREAD->is_lock_owned((address)displaced_header))
 861     //   // Simple recursive case.
 862     //   monitor->lock()->set_displaced_header(NULL);
 863 
 864     // We did not see an unlocked object so try the fast recursive case.
 865 
 866     // Check if owner is self by comparing the value in the markOop of object
 867     // (current_header) with the stack pointer.
 868     sub(current_header, current_header, R1_SP);
 869 
 870     assert(os::vm_page_size() > 0xfff, "page size too small - change the constant");
 871     load_const_optimized(tmp,
 872                          (address) (~(os::vm_page_size()-1) |
 873                                     markOopDesc::lock_mask_in_place));
 874 
 875     and_(R0/*==0?*/, current_header, tmp);
 876     // If condition is true we are done and hence we can store 0 in the displaced
 877     // header indicating it is a recursive lock.
 878     bne(CCR0, slow_case);
 879     release();
 880     std(R0/*==0!*/, BasicObjectLock::lock_offset_in_bytes() +
 881         BasicLock::displaced_header_offset_in_bytes(), monitor);
 882     b(done);
 883 
 884     // } else {
 885     //   // Slow path.
 886     //   InterpreterRuntime::monitorenter(THREAD, monitor);
 887 
 888     // None of the above fast optimizations worked so we have to get into the
 889     // slow case of monitor enter.
 890     bind(slow_case);
 891     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
 892             monitor, /*check_for_exceptions=*/true CC_INTERP_ONLY(&& false));
 893     // }
 894     align(32, 12);
 895     bind(done);
 896   }
 897 }
 898 
 899 // Unlocks an object. Used in monitorexit bytecode and remove_activation.


1844   if (ProfileInterpreter && MethodData::profile_parameters()) {
1845     Label profile_continue, done;
1846 
1847     test_method_data_pointer(profile_continue);
1848 
1849     // Load the offset of the area within the MDO used for
1850     // parameters. If it's negative we're not profiling any parameters.
1851     lwz(tmp1, in_bytes(MethodData::parameters_type_data_di_offset()) - in_bytes(MethodData::data_offset()), R28_mdx);
1852     cmpwi(CCR0, tmp1, 0);
1853     blt(CCR0, profile_continue);
1854 
1855     // Compute a pointer to the area for parameters from the offset
1856     // and move the pointer to the slot for the last
1857     // parameters. Collect profiling from last parameter down.
1858     // mdo start + parameters offset + array length - 1
1859 
1860     // Pointer to the parameter area in the MDO.
1861     const Register mdp = tmp1;
1862     add(mdp, tmp1, R28_mdx);
1863 
1864     // Pffset of the current profile entry to update.
1865     const Register entry_offset = tmp2;
1866     // entry_offset = array len in number of cells
1867     ld(entry_offset, in_bytes(ArrayData::array_len_offset()), mdp);
1868 
1869     int off_base = in_bytes(ParametersTypeData::stack_slot_offset(0));
1870     assert(off_base % DataLayout::cell_size == 0, "should be a number of cells");
1871 
1872     // entry_offset (number of cells)  = array len - size of 1 entry + offset of the stack slot field
1873     addi(entry_offset, entry_offset, -TypeStackSlotEntries::per_arg_count() + (off_base / DataLayout::cell_size));
1874     // entry_offset in bytes
1875     sldi(entry_offset, entry_offset, exact_log2(DataLayout::cell_size));
1876 
1877     Label loop;
1878     align(32, 12);
1879     bind(loop);
1880 
1881     // Load offset on the stack from the slot for this parameter.
1882     ld(tmp3, entry_offset, mdp);
1883     sldi(tmp3, tmp3, Interpreter::logStackElementSize);
1884     neg(tmp3, tmp3);




 429 #if defined(VM_LITTLE_ENDIAN)
 430   if (offset) {
 431     load_const_optimized(Rdst, offset);
 432     lwbrx(Rdst, Rdst, Rsrc);
 433   } else {
 434     lwbrx(Rdst, Rsrc);
 435   }
 436   if (is_signed == Signed) {
 437     extsw(Rdst, Rdst);
 438   }
 439 #else
 440   if (is_signed == Signed) {
 441     lwa(Rdst, offset, Rsrc);
 442   } else {
 443     lwz(Rdst, offset, Rsrc);
 444   }
 445 #endif
 446 }
 447 
 448 // Load object from cpool->resolved_references(index).
 449 void InterpreterMacroAssembler::load_resolved_reference_at_index(Register result, Register index, Label *is_null) {
 450   assert_different_registers(result, index);
 451   get_constant_pool(result);
 452 
 453   // Convert from field index to resolved_references() index and from
 454   // word index to byte offset. Since this is a java object, it can be compressed.
 455   Register tmp = index;  // reuse
 456   sldi(tmp, index, LogBytesPerHeapOop);
 457   // Load pointer for resolved_references[] objArray.
 458   ld(result, ConstantPool::resolved_references_offset_in_bytes(), result);
 459   // JNIHandles::resolve(result)
 460   ld(result, 0, result);
 461 #ifdef ASSERT
 462   Label index_ok;
 463   lwa(R0, arrayOopDesc::length_offset_in_bytes(), result);
 464   sldi(R0, R0, LogBytesPerHeapOop);
 465   cmpd(CCR0, tmp, R0);
 466   blt(CCR0, index_ok);
 467   stop("resolved reference index out of bounds", 0x09256);
 468   bind(index_ok);
 469 #endif
 470   // Add in the index.
 471   add(result, tmp, result);
 472   load_heap_oop(result, arrayOopDesc::base_offset_in_bytes(T_OBJECT), result, is_null);
 473 }
 474 
 475 // Generate a subtype check: branch to ok_is_subtype if sub_klass is
 476 // a subtype of super_klass. Blows registers Rsub_klass, tmp1, tmp2.
 477 void InterpreterMacroAssembler::gen_subtype_check(Register Rsub_klass, Register Rsuper_klass, Register Rtmp1,
 478                                                   Register Rtmp2, Register Rtmp3, Label &ok_is_subtype) {
 479   // Profile the not-null value's klass.
 480   profile_typecheck(Rsub_klass, Rtmp1, Rtmp2);
 481   check_klass_subtype(Rsub_klass, Rsuper_klass, Rtmp1, Rtmp2, ok_is_subtype);
 482   profile_typecheck_failed(Rtmp1, Rtmp2);
 483 }
 484 
 485 void InterpreterMacroAssembler::generate_stack_overflow_check_with_compare_and_throw(Register Rmem_frame_size, Register Rscratch1) {
 486   Label done;
 487   sub(Rmem_frame_size, R1_SP, Rmem_frame_size);
 488   ld(Rscratch1, thread_(stack_overflow_limit));
 489   cmpld(CCR0/*is_stack_overflow*/, Rmem_frame_size, Rscratch1);
 490   bgt(CCR0/*is_stack_overflow*/, done);
 491 
 492   // Load target address of the runtime stub.


 859 
 860     // } else if (THREAD->is_lock_owned((address)displaced_header))
 861     //   // Simple recursive case.
 862     //   monitor->lock()->set_displaced_header(NULL);
 863 
 864     // We did not see an unlocked object so try the fast recursive case.
 865 
 866     // Check if owner is self by comparing the value in the markOop of object
 867     // (current_header) with the stack pointer.
 868     sub(current_header, current_header, R1_SP);
 869 
 870     assert(os::vm_page_size() > 0xfff, "page size too small - change the constant");
 871     load_const_optimized(tmp,
 872                          (address) (~(os::vm_page_size()-1) |
 873                                     markOopDesc::lock_mask_in_place));
 874 
 875     and_(R0/*==0?*/, current_header, tmp);
 876     // If condition is true we are done and hence we can store 0 in the displaced
 877     // header indicating it is a recursive lock.
 878     bne(CCR0, slow_case);

 879     std(R0/*==0!*/, BasicObjectLock::lock_offset_in_bytes() +
 880         BasicLock::displaced_header_offset_in_bytes(), monitor);
 881     b(done);
 882 
 883     // } else {
 884     //   // Slow path.
 885     //   InterpreterRuntime::monitorenter(THREAD, monitor);
 886 
 887     // None of the above fast optimizations worked so we have to get into the
 888     // slow case of monitor enter.
 889     bind(slow_case);
 890     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
 891             monitor, /*check_for_exceptions=*/true CC_INTERP_ONLY(&& false));
 892     // }
 893     align(32, 12);
 894     bind(done);
 895   }
 896 }
 897 
 898 // Unlocks an object. Used in monitorexit bytecode and remove_activation.


1843   if (ProfileInterpreter && MethodData::profile_parameters()) {
1844     Label profile_continue, done;
1845 
1846     test_method_data_pointer(profile_continue);
1847 
1848     // Load the offset of the area within the MDO used for
1849     // parameters. If it's negative we're not profiling any parameters.
1850     lwz(tmp1, in_bytes(MethodData::parameters_type_data_di_offset()) - in_bytes(MethodData::data_offset()), R28_mdx);
1851     cmpwi(CCR0, tmp1, 0);
1852     blt(CCR0, profile_continue);
1853 
1854     // Compute a pointer to the area for parameters from the offset
1855     // and move the pointer to the slot for the last
1856     // parameters. Collect profiling from last parameter down.
1857     // mdo start + parameters offset + array length - 1
1858 
1859     // Pointer to the parameter area in the MDO.
1860     const Register mdp = tmp1;
1861     add(mdp, tmp1, R28_mdx);
1862 
1863     // Offset of the current profile entry to update.
1864     const Register entry_offset = tmp2;
1865     // entry_offset = array len in number of cells
1866     ld(entry_offset, in_bytes(ArrayData::array_len_offset()), mdp);
1867 
1868     int off_base = in_bytes(ParametersTypeData::stack_slot_offset(0));
1869     assert(off_base % DataLayout::cell_size == 0, "should be a number of cells");
1870 
1871     // entry_offset (number of cells)  = array len - size of 1 entry + offset of the stack slot field
1872     addi(entry_offset, entry_offset, -TypeStackSlotEntries::per_arg_count() + (off_base / DataLayout::cell_size));
1873     // entry_offset in bytes
1874     sldi(entry_offset, entry_offset, exact_log2(DataLayout::cell_size));
1875 
1876     Label loop;
1877     align(32, 12);
1878     bind(loop);
1879 
1880     // Load offset on the stack from the slot for this parameter.
1881     ld(tmp3, entry_offset, mdp);
1882     sldi(tmp3, tmp3, Interpreter::logStackElementSize);
1883     neg(tmp3, tmp3);


< prev index next >