< prev index next >

src/cpu/x86/vm/macroAssembler_x86.cpp

Print this page
rev 12854 : [mq]: gcinterface.patch


2918 // !defined(COMPILER2) is because of stupid core builds
2919 #if !defined(_LP64) || defined(COMPILER1) || !defined(COMPILER2) || INCLUDE_JVMCI
2920 void MacroAssembler::empty_FPU_stack() {
2921   if (VM_Version::supports_mmx()) {
2922     emms();
2923   } else {
2924     for (int i = 8; i-- > 0; ) ffree(i);
2925   }
2926 }
2927 #endif // !LP64 || C1 || !C2 || INCLUDE_JVMCI
2928 
2929 
2930 // Defines obj, preserves var_size_in_bytes
2931 void MacroAssembler::eden_allocate(Register obj,
2932                                    Register var_size_in_bytes,
2933                                    int con_size_in_bytes,
2934                                    Register t1,
2935                                    Label& slow_case) {
2936   assert(obj == rax, "obj must be in rax, for cmpxchg");
2937   assert_different_registers(obj, var_size_in_bytes, t1);
2938   if (!Universe::heap()->supports_inline_contig_alloc()) {
2939     jmp(slow_case);
2940   } else {
2941     Register end = t1;
2942     Label retry;
2943     bind(retry);
2944     ExternalAddress heap_top((address) Universe::heap()->top_addr());
2945     movptr(obj, heap_top);
2946     if (var_size_in_bytes == noreg) {
2947       lea(end, Address(obj, con_size_in_bytes));
2948     } else {
2949       lea(end, Address(obj, var_size_in_bytes, Address::times_1));
2950     }
2951     // if end < obj then we wrapped around => object too long => slow case
2952     cmpptr(end, obj);
2953     jcc(Assembler::below, slow_case);
2954     cmpptr(end, ExternalAddress((address) Universe::heap()->end_addr()));
2955     jcc(Assembler::above, slow_case);
2956     // Compare obj with the top addr, and if still equal, store the new top addr in
2957     // end at the address of the top addr pointer. Sets ZF if was equal, and clears
2958     // it otherwise. Use lock prefix for atomicity on MPs.
2959     locked_cmpxchgptr(end, heap_top);
2960     jcc(Assembler::notEqual, retry);
2961   }
2962 }
2963 
2964 void MacroAssembler::enter() {
2965   push(rbp);
2966   mov(rbp, rsp);
2967 }
2968 
2969 // A 5 byte nop that is safe for patching (see patch_verified_entry)
2970 void MacroAssembler::fat_nop() {
2971   if (UseAddressNop) {
2972     addr_nop_5();
2973   } else {
2974     emit_int8(0x26); // es:


5281   if(tosca_live) pop(rax);
5282 
5283   bind(done);
5284 }
5285 
5286 void MacroAssembler::g1_write_barrier_post(Register store_addr,
5287                                            Register new_val,
5288                                            Register thread,
5289                                            Register tmp,
5290                                            Register tmp2) {
5291 #ifdef _LP64
5292   assert(thread == r15_thread, "must be");
5293 #endif // _LP64
5294 
5295   Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5296                                        DirtyCardQueue::byte_offset_of_index()));
5297   Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5298                                        DirtyCardQueue::byte_offset_of_buf()));
5299 
5300   CardTableModRefBS* ct =
5301     barrier_set_cast<CardTableModRefBS>(Universe::heap()->barrier_set());
5302   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5303 
5304   Label done;
5305   Label runtime;
5306 
5307   // Does store cross heap regions?
5308 
5309   movptr(tmp, store_addr);
5310   xorptr(tmp, new_val);
5311   shrptr(tmp, HeapRegion::LogOfHRGrainBytes);
5312   jcc(Assembler::equal, done);
5313 
5314   // crosses regions, storing NULL?
5315 
5316   cmpptr(new_val, (int32_t) NULL_WORD);
5317   jcc(Assembler::equal, done);
5318 
5319   // storing region crossing non-NULL, is card already dirty?
5320 
5321   const Register card_addr = tmp;


5366   call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_post), card_addr, thread);
5367   pop(thread);
5368 #endif
5369   pop(new_val);
5370   pop(store_addr);
5371 
5372   bind(done);
5373 }
5374 
5375 #endif // INCLUDE_ALL_GCS
5376 //////////////////////////////////////////////////////////////////////////////////
5377 
5378 
5379 void MacroAssembler::store_check(Register obj, Address dst) {
5380   store_check(obj);
5381 }
5382 
5383 void MacroAssembler::store_check(Register obj) {
5384   // Does a store check for the oop in register obj. The content of
5385   // register obj is destroyed afterwards.
5386   BarrierSet* bs = Universe::heap()->barrier_set();
5387   assert(bs->kind() == BarrierSet::CardTableForRS ||
5388          bs->kind() == BarrierSet::CardTableExtension,
5389          "Wrong barrier set kind");
5390 
5391   CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs);
5392   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5393 
5394   shrptr(obj, CardTableModRefBS::card_shift);
5395 
5396   Address card_addr;
5397 
5398   // The calculation for byte_map_base is as follows:
5399   // byte_map_base = _byte_map - (uintptr_t(low_bound) >> card_shift);
5400   // So this essentially converts an address to a displacement and it will
5401   // never need to be relocated. On 64bit however the value may be too
5402   // large for a 32bit displacement.
5403   intptr_t disp = (intptr_t) ct->byte_map_base;
5404   if (is_simm32(disp)) {
5405     card_addr = Address(noreg, obj, Address::times_1, disp);
5406   } else {


5488   movptr(Address(thread, JavaThread::tlab_top_offset()), end);
5489 
5490   // recover var_size_in_bytes if necessary
5491   if (var_size_in_bytes == end) {
5492     subptr(var_size_in_bytes, obj);
5493   }
5494   verify_tlab();
5495 }
5496 
5497 // Preserves rbx, and rdx.
5498 Register MacroAssembler::tlab_refill(Label& retry,
5499                                      Label& try_eden,
5500                                      Label& slow_case) {
5501   Register top = rax;
5502   Register t1  = rcx; // object size
5503   Register t2  = rsi;
5504   Register thread_reg = NOT_LP64(rdi) LP64_ONLY(r15_thread);
5505   assert_different_registers(top, thread_reg, t1, t2, /* preserve: */ rbx, rdx);
5506   Label do_refill, discard_tlab;
5507 
5508   if (!Universe::heap()->supports_inline_contig_alloc()) {
5509     // No allocation in the shared eden.
5510     jmp(slow_case);
5511   }
5512 
5513   NOT_LP64(get_thread(thread_reg));
5514 
5515   movptr(top, Address(thread_reg, in_bytes(JavaThread::tlab_top_offset())));
5516   movptr(t1,  Address(thread_reg, in_bytes(JavaThread::tlab_end_offset())));
5517 
5518   // calculate amount of free space
5519   subptr(t1, top);
5520   shrptr(t1, LogHeapWordSize);
5521 
5522   // Retain tlab and allocate object in shared space if
5523   // the amount free in the tlab is too large to discard.
5524   cmpptr(t1, Address(thread_reg, in_bytes(JavaThread::tlab_refill_waste_limit_offset())));
5525   jcc(Assembler::lessEqual, discard_tlab);
5526 
5527   // Retain
5528   // %%% yuck as movptr...


6631     movl(dst, (int32_t)NULL_WORD);
6632   } else {
6633     movslq(dst, (int32_t)NULL_WORD);
6634   }
6635 #else
6636   movl(dst, (int32_t)NULL_WORD);
6637 #endif
6638 }
6639 
6640 #ifdef _LP64
6641 void MacroAssembler::store_klass_gap(Register dst, Register src) {
6642   if (UseCompressedClassPointers) {
6643     // Store to klass gap in destination
6644     movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
6645   }
6646 }
6647 
6648 #ifdef ASSERT
6649 void MacroAssembler::verify_heapbase(const char* msg) {
6650   assert (UseCompressedOops, "should be compressed");
6651   assert (Universe::heap() != NULL, "java heap should be initialized");
6652   if (CheckCompressedOops) {
6653     Label ok;
6654     push(rscratch1); // cmpptr trashes rscratch1
6655     cmpptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6656     jcc(Assembler::equal, ok);
6657     STOP(msg);
6658     bind(ok);
6659     pop(rscratch1);
6660   }
6661 }
6662 #endif
6663 
6664 // Algorithm must match oop.inline.hpp encode_heap_oop.
6665 void MacroAssembler::encode_heap_oop(Register r) {
6666 #ifdef ASSERT
6667   verify_heapbase("MacroAssembler::encode_heap_oop: heap base corrupted?");
6668 #endif
6669   verify_oop(r, "broken oop in encode_heap_oop");
6670   if (Universe::narrow_oop_base() == NULL) {
6671     if (Universe::narrow_oop_shift() != 0) {


6730   verify_heapbase("MacroAssembler::decode_heap_oop: heap base corrupted?");
6731 #endif
6732   if (Universe::narrow_oop_base() == NULL) {
6733     if (Universe::narrow_oop_shift() != 0) {
6734       assert (LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6735       shlq(r, LogMinObjAlignmentInBytes);
6736     }
6737   } else {
6738     Label done;
6739     shlq(r, LogMinObjAlignmentInBytes);
6740     jccb(Assembler::equal, done);
6741     addq(r, r12_heapbase);
6742     bind(done);
6743   }
6744   verify_oop(r, "broken oop in decode_heap_oop");
6745 }
6746 
6747 void  MacroAssembler::decode_heap_oop_not_null(Register r) {
6748   // Note: it will change flags
6749   assert (UseCompressedOops, "should only be used for compressed headers");
6750   assert (Universe::heap() != NULL, "java heap should be initialized");
6751   // Cannot assert, unverified entry point counts instructions (see .ad file)
6752   // vtableStubs also counts instructions in pd_code_size_limit.
6753   // Also do not verify_oop as this is called by verify_oop.
6754   if (Universe::narrow_oop_shift() != 0) {
6755     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6756     shlq(r, LogMinObjAlignmentInBytes);
6757     if (Universe::narrow_oop_base() != NULL) {
6758       addq(r, r12_heapbase);
6759     }
6760   } else {
6761     assert (Universe::narrow_oop_base() == NULL, "sanity");
6762   }
6763 }
6764 
6765 void  MacroAssembler::decode_heap_oop_not_null(Register dst, Register src) {
6766   // Note: it will change flags
6767   assert (UseCompressedOops, "should only be used for compressed headers");
6768   assert (Universe::heap() != NULL, "java heap should be initialized");
6769   // Cannot assert, unverified entry point counts instructions (see .ad file)
6770   // vtableStubs also counts instructions in pd_code_size_limit.
6771   // Also do not verify_oop as this is called by verify_oop.
6772   if (Universe::narrow_oop_shift() != 0) {
6773     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6774     if (LogMinObjAlignmentInBytes == Address::times_8) {
6775       leaq(dst, Address(r12_heapbase, src, Address::times_8, 0));
6776     } else {
6777       if (dst != src) {
6778         movq(dst, src);
6779       }
6780       shlq(dst, LogMinObjAlignmentInBytes);
6781       if (Universe::narrow_oop_base() != NULL) {
6782         addq(dst, r12_heapbase);
6783       }
6784     }
6785   } else {
6786     assert (Universe::narrow_oop_base() == NULL, "sanity");
6787     if (dst != src) {
6788       movq(dst, src);


6809 void MacroAssembler::encode_klass_not_null(Register dst, Register src) {
6810   if (dst == src) {
6811     encode_klass_not_null(src);
6812   } else {
6813     if (Universe::narrow_klass_base() != NULL) {
6814       mov64(dst, (int64_t)Universe::narrow_klass_base());
6815       negq(dst);
6816       addq(dst, src);
6817     } else {
6818       movptr(dst, src);
6819     }
6820     if (Universe::narrow_klass_shift() != 0) {
6821       assert (LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6822       shrq(dst, LogKlassAlignmentInBytes);
6823     }
6824   }
6825 }
6826 
6827 // Function instr_size_for_decode_klass_not_null() counts the instructions
6828 // generated by decode_klass_not_null(register r) and reinit_heapbase(),
6829 // when (Universe::heap() != NULL).  Hence, if the instructions they
6830 // generate change, then this method needs to be updated.
6831 int MacroAssembler::instr_size_for_decode_klass_not_null() {
6832   assert (UseCompressedClassPointers, "only for compressed klass ptrs");
6833   if (Universe::narrow_klass_base() != NULL) {
6834     // mov64 + addq + shlq? + mov64  (for reinit_heapbase()).
6835     return (Universe::narrow_klass_shift() == 0 ? 20 : 24);
6836   } else {
6837     // longest load decode klass function, mov64, leaq
6838     return 16;
6839   }
6840 }
6841 
6842 // !!! If the instructions that get generated here change then function
6843 // instr_size_for_decode_klass_not_null() needs to get updated.
6844 void  MacroAssembler::decode_klass_not_null(Register r) {
6845   // Note: it will change flags
6846   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6847   assert(r != r12_heapbase, "Decoding a klass in r12");
6848   // Cannot assert, unverified entry point counts instructions (see .ad file)
6849   // vtableStubs also counts instructions in pd_code_size_limit.


6865   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6866   if (dst == src) {
6867     decode_klass_not_null(dst);
6868   } else {
6869     // Cannot assert, unverified entry point counts instructions (see .ad file)
6870     // vtableStubs also counts instructions in pd_code_size_limit.
6871     // Also do not verify_oop as this is called by verify_oop.
6872     mov64(dst, (int64_t)Universe::narrow_klass_base());
6873     if (Universe::narrow_klass_shift() != 0) {
6874       assert(LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6875       assert(LogKlassAlignmentInBytes == Address::times_8, "klass not aligned on 64bits?");
6876       leaq(dst, Address(dst, src, Address::times_8, 0));
6877     } else {
6878       addq(dst, src);
6879     }
6880   }
6881 }
6882 
6883 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
6884   assert (UseCompressedOops, "should only be used for compressed headers");
6885   assert (Universe::heap() != NULL, "java heap should be initialized");
6886   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6887   int oop_index = oop_recorder()->find_index(obj);
6888   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6889   mov_narrow_oop(dst, oop_index, rspec);
6890 }
6891 
6892 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
6893   assert (UseCompressedOops, "should only be used for compressed headers");
6894   assert (Universe::heap() != NULL, "java heap should be initialized");
6895   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6896   int oop_index = oop_recorder()->find_index(obj);
6897   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6898   mov_narrow_oop(dst, oop_index, rspec);
6899 }
6900 
6901 void  MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
6902   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6903   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6904   int klass_index = oop_recorder()->find_index(k);
6905   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6906   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6907 }
6908 
6909 void  MacroAssembler::set_narrow_klass(Address dst, Klass* k) {
6910   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6911   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6912   int klass_index = oop_recorder()->find_index(k);
6913   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6914   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6915 }
6916 
6917 void  MacroAssembler::cmp_narrow_oop(Register dst, jobject obj) {
6918   assert (UseCompressedOops, "should only be used for compressed headers");
6919   assert (Universe::heap() != NULL, "java heap should be initialized");
6920   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6921   int oop_index = oop_recorder()->find_index(obj);
6922   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6923   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6924 }
6925 
6926 void  MacroAssembler::cmp_narrow_oop(Address dst, jobject obj) {
6927   assert (UseCompressedOops, "should only be used for compressed headers");
6928   assert (Universe::heap() != NULL, "java heap should be initialized");
6929   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6930   int oop_index = oop_recorder()->find_index(obj);
6931   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6932   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6933 }
6934 
6935 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
6936   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6937   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6938   int klass_index = oop_recorder()->find_index(k);
6939   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6940   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6941 }
6942 
6943 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
6944   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6945   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6946   int klass_index = oop_recorder()->find_index(k);
6947   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6948   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6949 }
6950 
6951 void MacroAssembler::reinit_heapbase() {
6952   if (UseCompressedOops || UseCompressedClassPointers) {
6953     if (Universe::heap() != NULL) {
6954       if (Universe::narrow_oop_base() == NULL) {
6955         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
6956       } else {
6957         mov64(r12_heapbase, (int64_t)Universe::narrow_ptrs_base());
6958       }
6959     } else {
6960       movptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6961     }
6962   }
6963 }
6964 
6965 #endif // _LP64
6966 
6967 
6968 // C2 compiled method's prolog code.
6969 void MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool fp_mode_24b) {
6970 
6971   // WARNING: Initial instruction MUST be 5 bytes or longer so that
6972   // NativeJump::patch_verified_entry will be able to patch out the entry
6973   // code safely. The push to verify stack depth is ok at 5 bytes,




2918 // !defined(COMPILER2) is because of stupid core builds
2919 #if !defined(_LP64) || defined(COMPILER1) || !defined(COMPILER2) || INCLUDE_JVMCI
2920 void MacroAssembler::empty_FPU_stack() {
2921   if (VM_Version::supports_mmx()) {
2922     emms();
2923   } else {
2924     for (int i = 8; i-- > 0; ) ffree(i);
2925   }
2926 }
2927 #endif // !LP64 || C1 || !C2 || INCLUDE_JVMCI
2928 
2929 
2930 // Defines obj, preserves var_size_in_bytes
2931 void MacroAssembler::eden_allocate(Register obj,
2932                                    Register var_size_in_bytes,
2933                                    int con_size_in_bytes,
2934                                    Register t1,
2935                                    Label& slow_case) {
2936   assert(obj == rax, "obj must be in rax, for cmpxchg");
2937   assert_different_registers(obj, var_size_in_bytes, t1);
2938   if (!GC::gc()->heap()->supports_inline_contig_alloc()) {
2939     jmp(slow_case);
2940   } else {
2941     Register end = t1;
2942     Label retry;
2943     bind(retry);
2944     ExternalAddress heap_top((address) GC::gc()->heap()->top_addr());
2945     movptr(obj, heap_top);
2946     if (var_size_in_bytes == noreg) {
2947       lea(end, Address(obj, con_size_in_bytes));
2948     } else {
2949       lea(end, Address(obj, var_size_in_bytes, Address::times_1));
2950     }
2951     // if end < obj then we wrapped around => object too long => slow case
2952     cmpptr(end, obj);
2953     jcc(Assembler::below, slow_case);
2954     cmpptr(end, ExternalAddress((address) GC::gc()->heap()->end_addr()));
2955     jcc(Assembler::above, slow_case);
2956     // Compare obj with the top addr, and if still equal, store the new top addr in
2957     // end at the address of the top addr pointer. Sets ZF if was equal, and clears
2958     // it otherwise. Use lock prefix for atomicity on MPs.
2959     locked_cmpxchgptr(end, heap_top);
2960     jcc(Assembler::notEqual, retry);
2961   }
2962 }
2963 
2964 void MacroAssembler::enter() {
2965   push(rbp);
2966   mov(rbp, rsp);
2967 }
2968 
2969 // A 5 byte nop that is safe for patching (see patch_verified_entry)
2970 void MacroAssembler::fat_nop() {
2971   if (UseAddressNop) {
2972     addr_nop_5();
2973   } else {
2974     emit_int8(0x26); // es:


5281   if(tosca_live) pop(rax);
5282 
5283   bind(done);
5284 }
5285 
5286 void MacroAssembler::g1_write_barrier_post(Register store_addr,
5287                                            Register new_val,
5288                                            Register thread,
5289                                            Register tmp,
5290                                            Register tmp2) {
5291 #ifdef _LP64
5292   assert(thread == r15_thread, "must be");
5293 #endif // _LP64
5294 
5295   Address queue_index(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5296                                        DirtyCardQueue::byte_offset_of_index()));
5297   Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() +
5298                                        DirtyCardQueue::byte_offset_of_buf()));
5299 
5300   CardTableModRefBS* ct =
5301     barrier_set_cast<CardTableModRefBS>(GC::gc()->heap()->barrier_set());
5302   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5303 
5304   Label done;
5305   Label runtime;
5306 
5307   // Does store cross heap regions?
5308 
5309   movptr(tmp, store_addr);
5310   xorptr(tmp, new_val);
5311   shrptr(tmp, HeapRegion::LogOfHRGrainBytes);
5312   jcc(Assembler::equal, done);
5313 
5314   // crosses regions, storing NULL?
5315 
5316   cmpptr(new_val, (int32_t) NULL_WORD);
5317   jcc(Assembler::equal, done);
5318 
5319   // storing region crossing non-NULL, is card already dirty?
5320 
5321   const Register card_addr = tmp;


5366   call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_post), card_addr, thread);
5367   pop(thread);
5368 #endif
5369   pop(new_val);
5370   pop(store_addr);
5371 
5372   bind(done);
5373 }
5374 
5375 #endif // INCLUDE_ALL_GCS
5376 //////////////////////////////////////////////////////////////////////////////////
5377 
5378 
5379 void MacroAssembler::store_check(Register obj, Address dst) {
5380   store_check(obj);
5381 }
5382 
5383 void MacroAssembler::store_check(Register obj) {
5384   // Does a store check for the oop in register obj. The content of
5385   // register obj is destroyed afterwards.
5386   BarrierSet* bs = GC::gc()->heap()->barrier_set();
5387   assert(bs->kind() == BarrierSet::CardTableForRS ||
5388          bs->kind() == BarrierSet::CardTableExtension,
5389          "Wrong barrier set kind");
5390 
5391   CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs);
5392   assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
5393 
5394   shrptr(obj, CardTableModRefBS::card_shift);
5395 
5396   Address card_addr;
5397 
5398   // The calculation for byte_map_base is as follows:
5399   // byte_map_base = _byte_map - (uintptr_t(low_bound) >> card_shift);
5400   // So this essentially converts an address to a displacement and it will
5401   // never need to be relocated. On 64bit however the value may be too
5402   // large for a 32bit displacement.
5403   intptr_t disp = (intptr_t) ct->byte_map_base;
5404   if (is_simm32(disp)) {
5405     card_addr = Address(noreg, obj, Address::times_1, disp);
5406   } else {


5488   movptr(Address(thread, JavaThread::tlab_top_offset()), end);
5489 
5490   // recover var_size_in_bytes if necessary
5491   if (var_size_in_bytes == end) {
5492     subptr(var_size_in_bytes, obj);
5493   }
5494   verify_tlab();
5495 }
5496 
5497 // Preserves rbx, and rdx.
5498 Register MacroAssembler::tlab_refill(Label& retry,
5499                                      Label& try_eden,
5500                                      Label& slow_case) {
5501   Register top = rax;
5502   Register t1  = rcx; // object size
5503   Register t2  = rsi;
5504   Register thread_reg = NOT_LP64(rdi) LP64_ONLY(r15_thread);
5505   assert_different_registers(top, thread_reg, t1, t2, /* preserve: */ rbx, rdx);
5506   Label do_refill, discard_tlab;
5507 
5508   if (!GC::gc()->heap()->supports_inline_contig_alloc()) {
5509     // No allocation in the shared eden.
5510     jmp(slow_case);
5511   }
5512 
5513   NOT_LP64(get_thread(thread_reg));
5514 
5515   movptr(top, Address(thread_reg, in_bytes(JavaThread::tlab_top_offset())));
5516   movptr(t1,  Address(thread_reg, in_bytes(JavaThread::tlab_end_offset())));
5517 
5518   // calculate amount of free space
5519   subptr(t1, top);
5520   shrptr(t1, LogHeapWordSize);
5521 
5522   // Retain tlab and allocate object in shared space if
5523   // the amount free in the tlab is too large to discard.
5524   cmpptr(t1, Address(thread_reg, in_bytes(JavaThread::tlab_refill_waste_limit_offset())));
5525   jcc(Assembler::lessEqual, discard_tlab);
5526 
5527   // Retain
5528   // %%% yuck as movptr...


6631     movl(dst, (int32_t)NULL_WORD);
6632   } else {
6633     movslq(dst, (int32_t)NULL_WORD);
6634   }
6635 #else
6636   movl(dst, (int32_t)NULL_WORD);
6637 #endif
6638 }
6639 
6640 #ifdef _LP64
6641 void MacroAssembler::store_klass_gap(Register dst, Register src) {
6642   if (UseCompressedClassPointers) {
6643     // Store to klass gap in destination
6644     movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
6645   }
6646 }
6647 
6648 #ifdef ASSERT
6649 void MacroAssembler::verify_heapbase(const char* msg) {
6650   assert (UseCompressedOops, "should be compressed");
6651   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6652   if (CheckCompressedOops) {
6653     Label ok;
6654     push(rscratch1); // cmpptr trashes rscratch1
6655     cmpptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6656     jcc(Assembler::equal, ok);
6657     STOP(msg);
6658     bind(ok);
6659     pop(rscratch1);
6660   }
6661 }
6662 #endif
6663 
6664 // Algorithm must match oop.inline.hpp encode_heap_oop.
6665 void MacroAssembler::encode_heap_oop(Register r) {
6666 #ifdef ASSERT
6667   verify_heapbase("MacroAssembler::encode_heap_oop: heap base corrupted?");
6668 #endif
6669   verify_oop(r, "broken oop in encode_heap_oop");
6670   if (Universe::narrow_oop_base() == NULL) {
6671     if (Universe::narrow_oop_shift() != 0) {


6730   verify_heapbase("MacroAssembler::decode_heap_oop: heap base corrupted?");
6731 #endif
6732   if (Universe::narrow_oop_base() == NULL) {
6733     if (Universe::narrow_oop_shift() != 0) {
6734       assert (LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6735       shlq(r, LogMinObjAlignmentInBytes);
6736     }
6737   } else {
6738     Label done;
6739     shlq(r, LogMinObjAlignmentInBytes);
6740     jccb(Assembler::equal, done);
6741     addq(r, r12_heapbase);
6742     bind(done);
6743   }
6744   verify_oop(r, "broken oop in decode_heap_oop");
6745 }
6746 
6747 void  MacroAssembler::decode_heap_oop_not_null(Register r) {
6748   // Note: it will change flags
6749   assert (UseCompressedOops, "should only be used for compressed headers");
6750   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6751   // Cannot assert, unverified entry point counts instructions (see .ad file)
6752   // vtableStubs also counts instructions in pd_code_size_limit.
6753   // Also do not verify_oop as this is called by verify_oop.
6754   if (Universe::narrow_oop_shift() != 0) {
6755     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6756     shlq(r, LogMinObjAlignmentInBytes);
6757     if (Universe::narrow_oop_base() != NULL) {
6758       addq(r, r12_heapbase);
6759     }
6760   } else {
6761     assert (Universe::narrow_oop_base() == NULL, "sanity");
6762   }
6763 }
6764 
6765 void  MacroAssembler::decode_heap_oop_not_null(Register dst, Register src) {
6766   // Note: it will change flags
6767   assert (UseCompressedOops, "should only be used for compressed headers");
6768   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6769   // Cannot assert, unverified entry point counts instructions (see .ad file)
6770   // vtableStubs also counts instructions in pd_code_size_limit.
6771   // Also do not verify_oop as this is called by verify_oop.
6772   if (Universe::narrow_oop_shift() != 0) {
6773     assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
6774     if (LogMinObjAlignmentInBytes == Address::times_8) {
6775       leaq(dst, Address(r12_heapbase, src, Address::times_8, 0));
6776     } else {
6777       if (dst != src) {
6778         movq(dst, src);
6779       }
6780       shlq(dst, LogMinObjAlignmentInBytes);
6781       if (Universe::narrow_oop_base() != NULL) {
6782         addq(dst, r12_heapbase);
6783       }
6784     }
6785   } else {
6786     assert (Universe::narrow_oop_base() == NULL, "sanity");
6787     if (dst != src) {
6788       movq(dst, src);


6809 void MacroAssembler::encode_klass_not_null(Register dst, Register src) {
6810   if (dst == src) {
6811     encode_klass_not_null(src);
6812   } else {
6813     if (Universe::narrow_klass_base() != NULL) {
6814       mov64(dst, (int64_t)Universe::narrow_klass_base());
6815       negq(dst);
6816       addq(dst, src);
6817     } else {
6818       movptr(dst, src);
6819     }
6820     if (Universe::narrow_klass_shift() != 0) {
6821       assert (LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6822       shrq(dst, LogKlassAlignmentInBytes);
6823     }
6824   }
6825 }
6826 
6827 // Function instr_size_for_decode_klass_not_null() counts the instructions
6828 // generated by decode_klass_not_null(register r) and reinit_heapbase(),
6829 // when (GC::gc()->heap() != NULL).  Hence, if the instructions they
6830 // generate change, then this method needs to be updated.
6831 int MacroAssembler::instr_size_for_decode_klass_not_null() {
6832   assert (UseCompressedClassPointers, "only for compressed klass ptrs");
6833   if (Universe::narrow_klass_base() != NULL) {
6834     // mov64 + addq + shlq? + mov64  (for reinit_heapbase()).
6835     return (Universe::narrow_klass_shift() == 0 ? 20 : 24);
6836   } else {
6837     // longest load decode klass function, mov64, leaq
6838     return 16;
6839   }
6840 }
6841 
6842 // !!! If the instructions that get generated here change then function
6843 // instr_size_for_decode_klass_not_null() needs to get updated.
6844 void  MacroAssembler::decode_klass_not_null(Register r) {
6845   // Note: it will change flags
6846   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6847   assert(r != r12_heapbase, "Decoding a klass in r12");
6848   // Cannot assert, unverified entry point counts instructions (see .ad file)
6849   // vtableStubs also counts instructions in pd_code_size_limit.


6865   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6866   if (dst == src) {
6867     decode_klass_not_null(dst);
6868   } else {
6869     // Cannot assert, unverified entry point counts instructions (see .ad file)
6870     // vtableStubs also counts instructions in pd_code_size_limit.
6871     // Also do not verify_oop as this is called by verify_oop.
6872     mov64(dst, (int64_t)Universe::narrow_klass_base());
6873     if (Universe::narrow_klass_shift() != 0) {
6874       assert(LogKlassAlignmentInBytes == Universe::narrow_klass_shift(), "decode alg wrong");
6875       assert(LogKlassAlignmentInBytes == Address::times_8, "klass not aligned on 64bits?");
6876       leaq(dst, Address(dst, src, Address::times_8, 0));
6877     } else {
6878       addq(dst, src);
6879     }
6880   }
6881 }
6882 
6883 void  MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
6884   assert (UseCompressedOops, "should only be used for compressed headers");
6885   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6886   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6887   int oop_index = oop_recorder()->find_index(obj);
6888   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6889   mov_narrow_oop(dst, oop_index, rspec);
6890 }
6891 
6892 void  MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
6893   assert (UseCompressedOops, "should only be used for compressed headers");
6894   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6895   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6896   int oop_index = oop_recorder()->find_index(obj);
6897   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6898   mov_narrow_oop(dst, oop_index, rspec);
6899 }
6900 
6901 void  MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
6902   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6903   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6904   int klass_index = oop_recorder()->find_index(k);
6905   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6906   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6907 }
6908 
6909 void  MacroAssembler::set_narrow_klass(Address dst, Klass* k) {
6910   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6911   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6912   int klass_index = oop_recorder()->find_index(k);
6913   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6914   mov_narrow_oop(dst, Klass::encode_klass(k), rspec);
6915 }
6916 
6917 void  MacroAssembler::cmp_narrow_oop(Register dst, jobject obj) {
6918   assert (UseCompressedOops, "should only be used for compressed headers");
6919   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6920   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6921   int oop_index = oop_recorder()->find_index(obj);
6922   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6923   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6924 }
6925 
6926 void  MacroAssembler::cmp_narrow_oop(Address dst, jobject obj) {
6927   assert (UseCompressedOops, "should only be used for compressed headers");
6928   assert (GC::gc()->heap() != NULL, "java heap should be initialized");
6929   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6930   int oop_index = oop_recorder()->find_index(obj);
6931   RelocationHolder rspec = oop_Relocation::spec(oop_index);
6932   Assembler::cmp_narrow_oop(dst, oop_index, rspec);
6933 }
6934 
6935 void  MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
6936   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6937   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6938   int klass_index = oop_recorder()->find_index(k);
6939   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6940   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6941 }
6942 
6943 void  MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
6944   assert (UseCompressedClassPointers, "should only be used for compressed headers");
6945   assert (oop_recorder() != NULL, "this assembler needs an OopRecorder");
6946   int klass_index = oop_recorder()->find_index(k);
6947   RelocationHolder rspec = metadata_Relocation::spec(klass_index);
6948   Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec);
6949 }
6950 
6951 void MacroAssembler::reinit_heapbase() {
6952   if (UseCompressedOops || UseCompressedClassPointers) {
6953     if (GC::is_initialized()) {
6954       if (Universe::narrow_oop_base() == NULL) {
6955         MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
6956       } else {
6957         mov64(r12_heapbase, (int64_t)Universe::narrow_ptrs_base());
6958       }
6959     } else {
6960       movptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr()));
6961     }
6962   }
6963 }
6964 
6965 #endif // _LP64
6966 
6967 
6968 // C2 compiled method's prolog code.
6969 void MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool fp_mode_24b) {
6970 
6971   // WARNING: Initial instruction MUST be 5 bytes or longer so that
6972   // NativeJump::patch_verified_entry will be able to patch out the entry
6973   // code safely. The push to verify stack depth is ok at 5 bytes,


< prev index next >