< prev index next >

src/cpu/x86/vm/macroAssembler_x86.cpp

Print this page
rev 12310 : [mq]: gcinterface.patch


2918 // !defined(COMPILER2) is because of stupid core builds
2919 #if !defined(_LP64) || defined(COMPILER1) || !defined(COMPILER2) || INCLUDE_JVMCI
2920 void MacroAssembler::empty_FPU_stack() {
2921   if (VM_Version::supports_mmx()) {
2922     emms();
2923   } else {
2924     for (int i = 8; i-- > 0; ) ffree(i);
2925   }
2926 }
2927 #endif // !LP64 || C1 || !C2 || INCLUDE_JVMCI
2928 
2929 
2930 // Defines obj, preserves var_size_in_bytes
2931 void MacroAssembler::eden_allocate(Register obj,
2932                                    Register var_size_in_bytes,
2933                                    int con_size_in_bytes,
2934                                    Register t1,
2935                                    Label& slow_case) {
2936   assert(obj == rax, "obj must be in rax, for cmpxchg");
2937   assert_different_registers(obj, var_size_in_bytes, t1);
2938   if (!Universe::heap()->supports_inline_contig_alloc()) {
2939     jmp(slow_case);
2940   } else {
2941     Register end = t1;
2942     Label retry;
2943     bind(retry);
2944     ExternalAddress heap_top((address) Universe::heap()->top_addr());
2945     movptr(obj, heap_top);
2946     if (var_size_in_bytes == noreg) {
2947       lea(end, Address(obj, con_size_in_bytes));
2948     } else {
2949       lea(end, Address(obj, var_size_in_bytes, Address::times_1));
2950     }
2951     // if end < obj then we wrapped around => object too long => slow case
2952     cmpptr(end, obj);
2953     jcc(Assembler::below, slow_case);
2954     cmpptr(end, ExternalAddress((address) Universe::heap()->end_addr()));
2955     jcc(Assembler::above, slow_case);
2956     // Compare obj with the top addr, and if still equal, store the new top addr in
2957     // end at the address of the top addr pointer. Sets ZF if was equal, and clears
2958     // it otherwise. Use lock prefix for atomicity on MPs.
2959     locked_cmpxchgptr(end, heap_top);
2960     jcc(Assembler::notEqual, retry);
2961   }
2962 }
2963 
2964 void MacroAssembler::enter() {
2965   push(rbp);
2966   mov(rbp, rsp);
2967 }
2968 
2969 // A 5 byte nop that is safe for patching (see patch_verified_entry)
2970 void MacroAssembler::fat_nop() {
2971   if (UseAddressNop) {
2972     addr_nop_5();
2973   } else {
2974     emit_int8(0x26); // es:


5244   if(tosca_live) pop(rax);
5245 
5246   bind(done);
5247 }
5248 
5249 void MacroAssembler::g1_write_barrier_post(Register store_addr,
5250                                            Register new_val,
5251                                            Register thread,
5252                                            Register tmp,
5253                                            Register tmp2) {
5254 #ifdef _LP64
5255   assert(thread == r15_thread, "must be");
5256 #endif // _LP64
5257 
5258   Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5259                                        DirtyCardQueue::byte_offset_of_index()));
5260   Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5261                                        DirtyCardQueue::byte_offset_of_buf()));
5262 
5263   CardTableModRefBS* ct =
5264     barrier_set_cast<CardTableModRefBS>(Universe::heap()->barrier_set());
5265   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5266 
5267   Label done;
5268   Label runtime;
5269 
5270   // Does store cross heap regions?
5271 
5272   movptr(tmp, store_addr);
5273   xorptr(tmp, new_val);
5274   shrptr(tmp, HeapRegion::LogOfHRGrainBytes);
5275   jcc(Assembler::equal, done);
5276 
5277   // crosses regions, storing NULL?
5278 
5279   cmpptr(new_val, (int32_t) NULL_WORD);
5280   jcc(Assembler::equal, done);
5281 
5282   // storing region crossing non-NULL, is card already dirty?
5283 
5284   const Register card_addr = tmp;


5329   call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_post), card_addr, thread);
5330   pop(thread);
5331 #endif
5332   pop(new_val);
5333   pop(store_addr);
5334 
5335   bind(done);
5336 }
5337 
5338 #endif // INCLUDE_ALL_GCS
5339 //////////////////////////////////////////////////////////////////////////////////
5340 
5341 
5342 void MacroAssembler::store_check(Register obj, Address dst) {
5343   store_check(obj);
5344 }
5345 
5346 void MacroAssembler::store_check(Register obj) {
5347   // Does a store check for the oop in register obj. The content of
5348   // register obj is destroyed afterwards.
5349   BarrierSet* bs = Universe::heap()->barrier_set();
5350   assert(bs->kind() == BarrierSet::CardTableForRS ||
5351          bs->kind() == BarrierSet::CardTableExtension,
5352          "Wrong barrier set kind");
5353 
5354   CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs);
5355   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5356 
5357   shrptr(obj, CardTableModRefBS::card_shift);
5358 
5359   Address card_addr;
5360 
5361   // The calculation for byte_map_base is as follows:
5362   // byte_map_base = _byte_map - (uintptr_t(low_bound) >> card_shift);
5363   // So this essentially converts an address to a displacement and it will
5364   // never need to be relocated. On 64bit however the value may be too
5365   // large for a 32bit displacement.
5366   intptr_t disp = (intptr_t) ct->byte_map_base;
5367   if (is_simm32(disp)) {
5368     card_addr = Address(noreg, obj, Address::times_1, disp);
5369   } else {


5451   movptr(Address(thread, JavaThread::tlab_top_offset()), end);
5452 
5453   // recover var_size_in_bytes if necessary
5454   if (var_size_in_bytes == end) {
5455     subptr(var_size_in_bytes, obj);
5456   }
5457   verify_tlab();
5458 }
5459 
5460 // Preserves rbx, and rdx.
5461 Register MacroAssembler::tlab_refill(Label& retry,
5462                                      Label& try_eden,
5463                                      Label& slow_case) {
5464   Register top = rax;
5465   Register t1  = rcx; // object size
5466   Register t2  = rsi;
5467   Register thread_reg = NOT_LP64(rdi) LP64_ONLY(r15_thread);
5468   assert_different_registers(top, thread_reg, t1, t2, /* preserve: */ rbx, rdx);
5469   Label do_refill, discard_tlab;
5470 
5471   if (!Universe::heap()->supports_inline_contig_alloc()) {
5472     // No allocation in the shared eden.
5473     jmp(slow_case);
5474   }
5475 
5476   NOT_LP64(get_thread(thread_reg));
5477 
5478   movptr(top, Address(thread_reg, in_bytes(JavaThread::tlab_top_offset())));
5479   movptr(t1,  Address(thread_reg, in_bytes(JavaThread::tlab_end_offset())));
5480 
5481   // calculate amount of free space
5482   subptr(t1, top);
5483   shrptr(t1, LogHeapWordSize);
5484 
5485   // Retain tlab and allocate object in shared space if
5486   // the amount free in the tlab is too large to discard.
5487   cmpptr(t1, Address(thread_reg, in_bytes(JavaThread::tlab_refill_waste_limit_offset())));
5488   jcc(Assembler::lessEqual, discard_tlab);
5489 
5490   // Retain
5491   // %%% yuck as movptr...


6594     movl(dst, (int32_t)NULL_WORD);
6595   } else {
6596     movslq(dst, (int32_t)NULL_WORD);
6597   }
6598 #else
6599   movl(dst, (int32_t)NULL_WORD);
6600 #endif
6601 }
6602 
6603 #ifdef _LP64
6604 void MacroAssembler::store_klass_gap(Register dst, Register src) {
6605   if (UseCompressedClassPointers) {
6606     // Store to klass gap in destination
6607     movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
6608   }
6609 }
6610 
6611 #ifdef ASSERT
6612 void MacroAssembler::verify_heapbase(const char* msg) {
6613   assert (UseCompressedOops, "should be compressed");
6614   assert (Universe::heap() != NULL, "java heap should be initialized");
6615   if (CheckCompressedOops) {
6616     Label ok;
6617     push(rscratch1); // cmpptr trashes rscratch1
6618     cmpptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6619     jcc(Assembler::equal, ok);
6620     STOP(msg);
6621     bind(ok);
6622     pop(rscratch1);
6623   }
6624 }
6625 #endif
6626 
6627 // Algorithm must match oop.inline.hpp encode_heap_oop.
6628 void MacroAssembler::encode_heap_oop(Register r) {
6629 #ifdef ASSERT
6630   verify_heapbase("MacroAssembler::encode_heap_oop: heap base corrupted?");
6631 #endif
6632   verify_oop(r, "broken oop in encode_heap_oop");
6633   if (Universe::narrow_oop_base() == NULL) {
6634     if (Universe::narrow_oop_shift() != 0) {


6693   verify_heapbase("MacroAssembler::decode_heap_oop: heap base corrupted?");
6694 #endif
6695   if (Universe::narrow_oop_base() == NULL) {
6696     if (Universe::narrow_oop_shift() != 0) {
6697       assert (LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6698       shlq(r, LogMinObjAlignmentInBytes);
6699     }
6700   } else {
6701     Label done;
6702     shlq(r, LogMinObjAlignmentInBytes);
6703     jccb(Assembler::equal, done);
6704     addq(r, r12_heapbase);
6705     bind(done);
6706   }
6707   verify_oop(r, "broken oop in decode_heap_oop");
6708 }
6709 
6710 void  MacroAssembler::decode_heap_oop_not_null(Register r) {
6711   // Note: it will change flags
6712   assert (UseCompressedOops, "should only be used for compressed headers");
6713   assert (Universe::heap() != NULL, "java heap should be initialized");
6714   // Cannot assert, unverified entry point counts instructions (see .ad file)
6715   // vtableStubs also counts instructions in pd_code_size_limit.
6716   // Also do not verify_oop as this is called by verify_oop.
6717   if (Universe::narrow_oop_shift() != 0) {
6718     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6719     shlq(r, LogMinObjAlignmentInBytes);
6720     if (Universe::narrow_oop_base() != NULL) {
6721       addq(r, r12_heapbase);
6722     }
6723   } else {
6724     assert (Universe::narrow_oop_base() == NULL, "sanity");
6725   }
6726 }
6727 
6728 void  MacroAssembler::decode_heap_oop_not_null(Register dst, Register src) {
6729   // Note: it will change flags
6730   assert (UseCompressedOops, "should only be used for compressed headers");
6731   assert (Universe::heap() != NULL, "java heap should be initialized");
6732   // Cannot assert, unverified entry point counts instructions (see .ad file)
6733   // vtableStubs also counts instructions in pd_code_size_limit.
6734   // Also do not verify_oop as this is called by verify_oop.
6735   if (Universe::narrow_oop_shift() != 0) {
6736     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6737     if (LogMinObjAlignmentInBytes == Address::times_8) {
6738       leaq(dst, Address(r12_heapbase, src, Address::times_8, 0));
6739     } else {
6740       if (dst != src) {
6741         movq(dst, src);
6742       }
6743       shlq(dst, LogMinObjAlignmentInBytes);
6744       if (Universe::narrow_oop_base() != NULL) {
6745         addq(dst, r12_heapbase);
6746       }
6747     }
6748   } else {
6749     assert (Universe::narrow_oop_base() == NULL, "sanity");
6750     if (dst != src) {
6751       movq(dst, src);


6772 void MacroAssembler::encode_klass_not_null(Register dst, Register src) {
6773   if (dst == src) {
6774     encode_klass_not_null(src);
6775   } else {
6776     if (Universe::narrow_klass_base() != NULL) {
6777       mov64(dst, (int64_t)Universe::narrow_klass_base());
6778       negq(dst);
6779       addq(dst, src);
6780     } else {
6781       movptr(dst, src);
6782     }
6783     if (Universe::narrow_klass_shift() != 0) {
6784       assert (LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6785       shrq(dst, LogKlassAlignmentInBytes);
6786     }
6787   }
6788 }
6789 
6790 // Function instr_size_for_decode_klass_not_null() counts the instructions
6791 // generated by decode_klass_not_null(register r) and reinit_heapbase(),
6792 // when (Universe::heap() != NULL).  Hence, if the instructions they
6793 // generate change, then this method needs to be updated.
6794 int MacroAssembler::instr_size_for_decode_klass_not_null() {
6795   assert (UseCompressedClassPointers, "only for compressed klass ptrs");
6796   if (Universe::narrow_klass_base() != NULL) {
6797     // mov64 + addq + shlq? + mov64  (for reinit_heapbase()).
6798     return (Universe::narrow_klass_shift() == 0 ? 20 : 24);
6799   } else {
6800     // longest load decode klass function, mov64, leaq
6801     return 16;
6802   }
6803 }
6804 
6805 // !!! If the instructions that get generated here change then function
6806 // instr_size_for_decode_klass_not_null() needs to get updated.
6807 void  MacroAssembler::decode_klass_not_null(Register r) {
6808   // Note: it will change flags
6809   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6810   assert(r != r12_heapbase, "Decoding a klass in r12");
6811   // Cannot assert, unverified entry point counts instructions (see .ad file)
6812   // vtableStubs also counts instructions in pd_code_size_limit.


6828   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6829   if (dst == src) {
6830     decode_klass_not_null(dst);
6831   } else {
6832     // Cannot assert, unverified entry point counts instructions (see .ad file)
6833     // vtableStubs also counts instructions in pd_code_size_limit.
6834     // Also do not verify_oop as this is called by verify_oop.
6835     mov64(dst, (int64_t)Universe::narrow_klass_base());
6836     if (Universe::narrow_klass_shift() != 0) {
6837       assert(LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6838       assert(LogKlassAlignmentInBytes == Address::times_8, "klass not aligned on 64bits?");
6839       leaq(dst, Address(dst, src, Address::times_8, 0));
6840     } else {
6841       addq(dst, src);
6842     }
6843   }
6844 }
6845 
6846 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
6847   assert (UseCompressedOops, "should only be used for compressed headers");
6848   assert (Universe::heap() != NULL, "java heap should be initialized");
6849   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6850   int oop_index = oop_recorder()->find_index(obj);
6851   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6852   mov_narrow_oop(dst, oop_index, rspec);
6853 }
6854 
6855 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
6856   assert (UseCompressedOops, "should only be used for compressed headers");
6857   assert (Universe::heap() != NULL, "java heap should be initialized");
6858   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6859   int oop_index = oop_recorder()->find_index(obj);
6860   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6861   mov_narrow_oop(dst, oop_index, rspec);
6862 }
6863 
6864 void  MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
6865   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6866   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6867   int klass_index = oop_recorder()->find_index(k);
6868   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6869   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6870 }
6871 
6872 void  MacroAssembler::set_narrow_klass(Address dst, Klass* k) {
6873   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6874   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6875   int klass_index = oop_recorder()->find_index(k);
6876   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6877   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6878 }
6879 
6880 void  MacroAssembler::cmp_narrow_oop(Register dst, jobject obj) {
6881   assert (UseCompressedOops, "should only be used for compressed headers");
6882   assert (Universe::heap() != NULL, "java heap should be initialized");
6883   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6884   int oop_index = oop_recorder()->find_index(obj);
6885   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6886   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6887 }
6888 
6889 void  MacroAssembler::cmp_narrow_oop(Address dst, jobject obj) {
6890   assert (UseCompressedOops, "should only be used for compressed headers");
6891   assert (Universe::heap() != NULL, "java heap should be initialized");
6892   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6893   int oop_index = oop_recorder()->find_index(obj);
6894   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6895   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6896 }
6897 
6898 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
6899   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6900   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6901   int klass_index = oop_recorder()->find_index(k);
6902   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6903   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6904 }
6905 
6906 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
6907   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6908   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6909   int klass_index = oop_recorder()->find_index(k);
6910   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6911   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6912 }
6913 
6914 void MacroAssembler::reinit_heapbase() {
6915   if (UseCompressedOops || UseCompressedClassPointers) {
6916     if (Universe::heap() != NULL) {
6917       if (Universe::narrow_oop_base() == NULL) {
6918         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
6919       } else {
6920         mov64(r12_heapbase, (int64_t)Universe::narrow_ptrs_base());
6921       }
6922     } else {
6923       movptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6924     }
6925   }
6926 }
6927 
6928 #endif // _LP64
6929 
6930 
6931 // C2 compiled method's prolog code.
6932 void MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool fp_mode_24b) {
6933 
6934   // WARNING: Initial instruction MUST be 5 bytes or longer so that
6935   // NativeJump::patch_verified_entry will be able to patch out the entry
6936   // code safely. The push to verify stack depth is ok at 5 bytes,




2918 // !defined(COMPILER2) is because of stupid core builds
2919 #if !defined(_LP64) || defined(COMPILER1) || !defined(COMPILER2) || INCLUDE_JVMCI
2920 void MacroAssembler::empty_FPU_stack() {
2921   if (VM_Version::supports_mmx()) {
2922     emms();
2923   } else {
2924     for (int i = 8; i-- > 0; ) ffree(i);
2925   }
2926 }
2927 #endif // !LP64 || C1 || !C2 || INCLUDE_JVMCI
2928 
2929 
2930 // Defines obj, preserves var_size_in_bytes
2931 void MacroAssembler::eden_allocate(Register obj,
2932                                    Register var_size_in_bytes,
2933                                    int con_size_in_bytes,
2934                                    Register t1,
2935                                    Label& slow_case) {
2936   assert(obj == rax, "obj must be in rax, for cmpxchg");
2937   assert_different_registers(obj, var_size_in_bytes, t1);
2938   if (!GC::gc()->heap()->supports_inline_contig_alloc()) {
2939     jmp(slow_case);
2940   } else {
2941     Register end = t1;
2942     Label retry;
2943     bind(retry);
2944     ExternalAddress heap_top((address) GC::gc()->heap()->top_addr());
2945     movptr(obj, heap_top);
2946     if (var_size_in_bytes == noreg) {
2947       lea(end, Address(obj, con_size_in_bytes));
2948     } else {
2949       lea(end, Address(obj, var_size_in_bytes, Address::times_1));
2950     }
2951     // if end < obj then we wrapped around => object too long => slow case
2952     cmpptr(end, obj);
2953     jcc(Assembler::below, slow_case);
2954     cmpptr(end, ExternalAddress((address) GC::gc()->heap()->end_addr()));
2955     jcc(Assembler::above, slow_case);
2956     // Compare obj with the top addr, and if still equal, store the new top addr in
2957     // end at the address of the top addr pointer. Sets ZF if was equal, and clears
2958     // it otherwise. Use lock prefix for atomicity on MPs.
2959     locked_cmpxchgptr(end, heap_top);
2960     jcc(Assembler::notEqual, retry);
2961   }
2962 }
2963 
2964 void MacroAssembler::enter() {
2965   push(rbp);
2966   mov(rbp, rsp);
2967 }
2968 
2969 // A 5 byte nop that is safe for patching (see patch_verified_entry)
2970 void MacroAssembler::fat_nop() {
2971   if (UseAddressNop) {
2972     addr_nop_5();
2973   } else {
2974     emit_int8(0x26); // es:


5244   if(tosca_live) pop(rax);
5245 
5246   bind(done);
5247 }
5248 
5249 void MacroAssembler::g1_write_barrier_post(Register store_addr,
5250                                            Register new_val,
5251                                            Register thread,
5252                                            Register tmp,
5253                                            Register tmp2) {
5254 #ifdef _LP64
5255   assert(thread == r15_thread, "must be");
5256 #endif // _LP64
5257 
5258   Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5259                                        DirtyCardQueue::byte_offset_of_index()));
5260   Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5261                                        DirtyCardQueue::byte_offset_of_buf()));
5262 
5263   CardTableModRefBS* ct =
5264     barrier_set_cast<CardTableModRefBS>(GC::gc()->heap()->barrier_set());
5265   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5266 
5267   Label done;
5268   Label runtime;
5269 
5270   // Does store cross heap regions?
5271 
5272   movptr(tmp, store_addr);
5273   xorptr(tmp, new_val);
5274   shrptr(tmp, HeapRegion::LogOfHRGrainBytes);
5275   jcc(Assembler::equal, done);
5276 
5277   // crosses regions, storing NULL?
5278 
5279   cmpptr(new_val, (int32_t) NULL_WORD);
5280   jcc(Assembler::equal, done);
5281 
5282   // storing region crossing non-NULL, is card already dirty?
5283 
5284   const Register card_addr = tmp;


5329   call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_post), card_addr, thread);
5330   pop(thread);
5331 #endif
5332   pop(new_val);
5333   pop(store_addr);
5334 
5335   bind(done);
5336 }
5337 
5338 #endif // INCLUDE_ALL_GCS
5339 //////////////////////////////////////////////////////////////////////////////////
5340 
5341 
5342 void MacroAssembler::store_check(Register obj, Address dst) {
5343   store_check(obj);
5344 }
5345 
5346 void MacroAssembler::store_check(Register obj) {
5347   // Does a store check for the oop in register obj. The content of
5348   // register obj is destroyed afterwards.
5349   BarrierSet* bs = GC::gc()->heap()->barrier_set();
5350   assert(bs->kind() == BarrierSet::CardTableForRS ||
5351          bs->kind() == BarrierSet::CardTableExtension,
5352          "Wrong barrier set kind");
5353 
5354   CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs);
5355   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5356 
5357   shrptr(obj, CardTableModRefBS::card_shift);
5358 
5359   Address card_addr;
5360 
5361   // The calculation for byte_map_base is as follows:
5362   // byte_map_base = _byte_map - (uintptr_t(low_bound) >> card_shift);
5363   // So this essentially converts an address to a displacement and it will
5364   // never need to be relocated. On 64bit however the value may be too
5365   // large for a 32bit displacement.
5366   intptr_t disp = (intptr_t) ct->byte_map_base;
5367   if (is_simm32(disp)) {
5368     card_addr = Address(noreg, obj, Address::times_1, disp);
5369   } else {


5451   movptr(Address(thread, JavaThread::tlab_top_offset()), end);
5452 
5453   // recover var_size_in_bytes if necessary
5454   if (var_size_in_bytes == end) {
5455     subptr(var_size_in_bytes, obj);
5456   }
5457   verify_tlab();
5458 }
5459 
5460 // Preserves rbx, and rdx.
5461 Register MacroAssembler::tlab_refill(Label& retry,
5462                                      Label& try_eden,
5463                                      Label& slow_case) {
5464   Register top = rax;
5465   Register t1  = rcx; // object size
5466   Register t2  = rsi;
5467   Register thread_reg = NOT_LP64(rdi) LP64_ONLY(r15_thread);
5468   assert_different_registers(top, thread_reg, t1, t2, /* preserve: */ rbx, rdx);
5469   Label do_refill, discard_tlab;
5470 
5471   if (!GC::gc()->heap()->supports_inline_contig_alloc()) {
5472     // No allocation in the shared eden.
5473     jmp(slow_case);
5474   }
5475 
5476   NOT_LP64(get_thread(thread_reg));
5477 
5478   movptr(top, Address(thread_reg, in_bytes(JavaThread::tlab_top_offset())));
5479   movptr(t1,  Address(thread_reg, in_bytes(JavaThread::tlab_end_offset())));
5480 
5481   // calculate amount of free space
5482   subptr(t1, top);
5483   shrptr(t1, LogHeapWordSize);
5484 
5485   // Retain tlab and allocate object in shared space if
5486   // the amount free in the tlab is too large to discard.
5487   cmpptr(t1, Address(thread_reg, in_bytes(JavaThread::tlab_refill_waste_limit_offset())));
5488   jcc(Assembler::lessEqual, discard_tlab);
5489 
5490   // Retain
5491   // %%% yuck as movptr...


6594     movl(dst, (int32_t)NULL_WORD);
6595   } else {
6596     movslq(dst, (int32_t)NULL_WORD);
6597   }
6598 #else
6599   movl(dst, (int32_t)NULL_WORD);
6600 #endif
6601 }
6602 
6603 #ifdef _LP64
6604 void MacroAssembler::store_klass_gap(Register dst, Register src) {
6605   if (UseCompressedClassPointers) {
6606     // Store to klass gap in destination
6607     movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
6608   }
6609 }
6610 
6611 #ifdef ASSERT
6612 void MacroAssembler::verify_heapbase(const char* msg) {
6613   assert (UseCompressedOops, "should be compressed");
6614   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6615   if (CheckCompressedOops) {
6616     Label ok;
6617     push(rscratch1); // cmpptr trashes rscratch1
6618     cmpptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6619     jcc(Assembler::equal, ok);
6620     STOP(msg);
6621     bind(ok);
6622     pop(rscratch1);
6623   }
6624 }
6625 #endif
6626 
6627 // Algorithm must match oop.inline.hpp encode_heap_oop.
6628 void MacroAssembler::encode_heap_oop(Register r) {
6629 #ifdef ASSERT
6630   verify_heapbase("MacroAssembler::encode_heap_oop: heap base corrupted?");
6631 #endif
6632   verify_oop(r, "broken oop in encode_heap_oop");
6633   if (Universe::narrow_oop_base() == NULL) {
6634     if (Universe::narrow_oop_shift() != 0) {


6693   verify_heapbase("MacroAssembler::decode_heap_oop: heap base corrupted?");
6694 #endif
6695   if (Universe::narrow_oop_base() == NULL) {
6696     if (Universe::narrow_oop_shift() != 0) {
6697       assert (LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6698       shlq(r, LogMinObjAlignmentInBytes);
6699     }
6700   } else {
6701     Label done;
6702     shlq(r, LogMinObjAlignmentInBytes);
6703     jccb(Assembler::equal, done);
6704     addq(r, r12_heapbase);
6705     bind(done);
6706   }
6707   verify_oop(r, "broken oop in decode_heap_oop");
6708 }
6709 
6710 void  MacroAssembler::decode_heap_oop_not_null(Register r) {
6711   // Note: it will change flags
6712   assert (UseCompressedOops, "should only be used for compressed headers");
6713   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6714   // Cannot assert, unverified entry point counts instructions (see .ad file)
6715   // vtableStubs also counts instructions in pd_code_size_limit.
6716   // Also do not verify_oop as this is called by verify_oop.
6717   if (Universe::narrow_oop_shift() != 0) {
6718     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6719     shlq(r, LogMinObjAlignmentInBytes);
6720     if (Universe::narrow_oop_base() != NULL) {
6721       addq(r, r12_heapbase);
6722     }
6723   } else {
6724     assert (Universe::narrow_oop_base() == NULL, "sanity");
6725   }
6726 }
6727 
6728 void  MacroAssembler::decode_heap_oop_not_null(Register dst, Register src) {
6729   // Note: it will change flags
6730   assert (UseCompressedOops, "should only be used for compressed headers");
6731   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6732   // Cannot assert, unverified entry point counts instructions (see .ad file)
6733   // vtableStubs also counts instructions in pd_code_size_limit.
6734   // Also do not verify_oop as this is called by verify_oop.
6735   if (Universe::narrow_oop_shift() != 0) {
6736     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6737     if (LogMinObjAlignmentInBytes == Address::times_8) {
6738       leaq(dst, Address(r12_heapbase, src, Address::times_8, 0));
6739     } else {
6740       if (dst != src) {
6741         movq(dst, src);
6742       }
6743       shlq(dst, LogMinObjAlignmentInBytes);
6744       if (Universe::narrow_oop_base() != NULL) {
6745         addq(dst, r12_heapbase);
6746       }
6747     }
6748   } else {
6749     assert (Universe::narrow_oop_base() == NULL, "sanity");
6750     if (dst != src) {
6751       movq(dst, src);


6772 void MacroAssembler::encode_klass_not_null(Register dst, Register src) {
6773   if (dst == src) {
6774     encode_klass_not_null(src);
6775   } else {
6776     if (Universe::narrow_klass_base() != NULL) {
6777       mov64(dst, (int64_t)Universe::narrow_klass_base());
6778       negq(dst);
6779       addq(dst, src);
6780     } else {
6781       movptr(dst, src);
6782     }
6783     if (Universe::narrow_klass_shift() != 0) {
6784       assert (LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6785       shrq(dst, LogKlassAlignmentInBytes);
6786     }
6787   }
6788 }
6789 
6790 // Function instr_size_for_decode_klass_not_null() counts the instructions
6791 // generated by decode_klass_not_null(register r) and reinit_heapbase(),
6792 // when (GC::gc()->heap() != NULL).  Hence, if the instructions they
6793 // generate change, then this method needs to be updated.
6794 int MacroAssembler::instr_size_for_decode_klass_not_null() {
6795   assert (UseCompressedClassPointers, "only for compressed klass ptrs");
6796   if (Universe::narrow_klass_base() != NULL) {
6797     // mov64 + addq + shlq? + mov64  (for reinit_heapbase()).
6798     return (Universe::narrow_klass_shift() == 0 ? 20 : 24);
6799   } else {
6800     // longest load decode klass function, mov64, leaq
6801     return 16;
6802   }
6803 }
6804 
6805 // !!! If the instructions that get generated here change then function
6806 // instr_size_for_decode_klass_not_null() needs to get updated.
6807 void  MacroAssembler::decode_klass_not_null(Register r) {
6808   // Note: it will change flags
6809   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6810   assert(r != r12_heapbase, "Decoding a klass in r12");
6811   // Cannot assert, unverified entry point counts instructions (see .ad file)
6812   // vtableStubs also counts instructions in pd_code_size_limit.


6828   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6829   if (dst == src) {
6830     decode_klass_not_null(dst);
6831   } else {
6832     // Cannot assert, unverified entry point counts instructions (see .ad file)
6833     // vtableStubs also counts instructions in pd_code_size_limit.
6834     // Also do not verify_oop as this is called by verify_oop.
6835     mov64(dst, (int64_t)Universe::narrow_klass_base());
6836     if (Universe::narrow_klass_shift() != 0) {
6837       assert(LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6838       assert(LogKlassAlignmentInBytes == Address::times_8, "klass not aligned on 64bits?");
6839       leaq(dst, Address(dst, src, Address::times_8, 0));
6840     } else {
6841       addq(dst, src);
6842     }
6843   }
6844 }
6845 
6846 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
6847   assert (UseCompressedOops, "should only be used for compressed headers");
6848   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6849   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6850   int oop_index = oop_recorder()->find_index(obj);
6851   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6852   mov_narrow_oop(dst, oop_index, rspec);
6853 }
6854 
6855 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
6856   assert (UseCompressedOops, "should only be used for compressed headers");
6857   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6858   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6859   int oop_index = oop_recorder()->find_index(obj);
6860   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6861   mov_narrow_oop(dst, oop_index, rspec);
6862 }
6863 
6864 void  MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
6865   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6866   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6867   int klass_index = oop_recorder()->find_index(k);
6868   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6869   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6870 }
6871 
6872 void  MacroAssembler::set_narrow_klass(Address dst, Klass* k) {
6873   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6874   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6875   int klass_index = oop_recorder()->find_index(k);
6876   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6877   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6878 }
6879 
6880 void  MacroAssembler::cmp_narrow_oop(Register dst, jobject obj) {
6881   assert (UseCompressedOops, "should only be used for compressed headers");
6882   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6883   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6884   int oop_index = oop_recorder()->find_index(obj);
6885   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6886   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6887 }
6888 
6889 void  MacroAssembler::cmp_narrow_oop(Address dst, jobject obj) {
6890   assert (UseCompressedOops, "should only be used for compressed headers");
6891   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6892   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6893   int oop_index = oop_recorder()->find_index(obj);
6894   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6895   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6896 }
6897 
6898 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
6899   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6900   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6901   int klass_index = oop_recorder()->find_index(k);
6902   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6903   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6904 }
6905 
6906 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
6907   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6908   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6909   int klass_index = oop_recorder()->find_index(k);
6910   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6911   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6912 }
6913 
6914 void MacroAssembler::reinit_heapbase() {
6915   if (UseCompressedOops || UseCompressedClassPointers) {
6916     if (GC::is_initialized()) {
6917       if (Universe::narrow_oop_base() == NULL) {
6918         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
6919       } else {
6920         mov64(r12_heapbase, (int64_t)Universe::narrow_ptrs_base());
6921       }
6922     } else {
6923       movptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6924     }
6925   }
6926 }
6927 
6928 #endif // _LP64
6929 
6930 
6931 // C2 compiled method's prolog code.
6932 void MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool fp_mode_24b) {
6933 
6934   // WARNING: Initial instruction MUST be 5 bytes or longer so that
6935   // NativeJump::patch_verified_entry will be able to patch out the entry
6936   // code safely. The push to verify stack depth is ok at 5 bytes,


< prev index next >