< prev index next >

src/cpu/x86/vm/macroAssembler_x86.cpp

Print this page
rev 12310 : [mq]: gcinterface.patch

*** 2933,2959 **** int con_size_in_bytes, Register t1, Label& slow_case) { assert(obj == rax, "obj must be in rax, for cmpxchg"); assert_different_registers(obj, var_size_in_bytes, t1); ! if (!Universe::heap()->supports_inline_contig_alloc()) { jmp(slow_case); } else { Register end = t1; Label retry; bind(retry); ! ExternalAddress heap_top((address) Universe::heap()->top_addr()); movptr(obj, heap_top); if (var_size_in_bytes == noreg) { lea(end, Address(obj, con_size_in_bytes)); } else { lea(end, Address(obj, var_size_in_bytes, Address::times_1)); } // if end < obj then we wrapped around => object too long => slow case cmpptr(end, obj); jcc(Assembler::below, slow_case); ! cmpptr(end, ExternalAddress((address) Universe::heap()->end_addr())); jcc(Assembler::above, slow_case); // Compare obj with the top addr, and if still equal, store the new top addr in // end at the address of the top addr pointer. Sets ZF if was equal, and clears // it otherwise. Use lock prefix for atomicity on MPs. locked_cmpxchgptr(end, heap_top); --- 2933,2959 ---- int con_size_in_bytes, Register t1, Label& slow_case) { assert(obj == rax, "obj must be in rax, for cmpxchg"); assert_different_registers(obj, var_size_in_bytes, t1); ! if (!GC::gc()->heap()->supports_inline_contig_alloc()) { jmp(slow_case); } else { Register end = t1; Label retry; bind(retry); ! ExternalAddress heap_top((address) GC::gc()->heap()->top_addr()); movptr(obj, heap_top); if (var_size_in_bytes == noreg) { lea(end, Address(obj, con_size_in_bytes)); } else { lea(end, Address(obj, var_size_in_bytes, Address::times_1)); } // if end < obj then we wrapped around => object too long => slow case cmpptr(end, obj); jcc(Assembler::below, slow_case); ! cmpptr(end, ExternalAddress((address) GC::gc()->heap()->end_addr())); jcc(Assembler::above, slow_case); // Compare obj with the top addr, and if still equal, store the new top addr in // end at the address of the top addr pointer. Sets ZF if was equal, and clears // it otherwise. Use lock prefix for atomicity on MPs. locked_cmpxchgptr(end, heap_top);
*** 5259,5269 **** DirtyCardQueue::byte_offset_of_index())); Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() + DirtyCardQueue::byte_offset_of_buf())); CardTableModRefBS* ct = ! barrier_set_cast<CardTableModRefBS>(Universe::heap()->barrier_set()); assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code"); Label done; Label runtime; --- 5259,5269 ---- DirtyCardQueue::byte_offset_of_index())); Address buffer(thread, in_bytes(JavaThread::dirty_card_queue_offset() + DirtyCardQueue::byte_offset_of_buf())); CardTableModRefBS* ct = ! barrier_set_cast<CardTableModRefBS>(GC::gc()->heap()->barrier_set()); assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code"); Label done; Label runtime;
*** 5344,5354 **** } void MacroAssembler::store_check(Register obj) { // Does a store check for the oop in register obj. The content of // register obj is destroyed afterwards. ! BarrierSet* bs = Universe::heap()->barrier_set(); assert(bs->kind() == BarrierSet::CardTableForRS || bs->kind() == BarrierSet::CardTableExtension, "Wrong barrier set kind"); CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs); --- 5344,5354 ---- } void MacroAssembler::store_check(Register obj) { // Does a store check for the oop in register obj. The content of // register obj is destroyed afterwards. ! BarrierSet* bs = GC::gc()->heap()->barrier_set(); assert(bs->kind() == BarrierSet::CardTableForRS || bs->kind() == BarrierSet::CardTableExtension, "Wrong barrier set kind"); CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs);
*** 5466,5476 **** Register t2 = rsi; Register thread_reg = NOT_LP64(rdi) LP64_ONLY(r15_thread); assert_different_registers(top, thread_reg, t1, t2, /* preserve: */ rbx, rdx); Label do_refill, discard_tlab; ! if (!Universe::heap()->supports_inline_contig_alloc()) { // No allocation in the shared eden. jmp(slow_case); } NOT_LP64(get_thread(thread_reg)); --- 5466,5476 ---- Register t2 = rsi; Register thread_reg = NOT_LP64(rdi) LP64_ONLY(r15_thread); assert_different_registers(top, thread_reg, t1, t2, /* preserve: */ rbx, rdx); Label do_refill, discard_tlab; ! if (!GC::gc()->heap()->supports_inline_contig_alloc()) { // No allocation in the shared eden. jmp(slow_case); } NOT_LP64(get_thread(thread_reg));
*** 6609,6619 **** } #ifdef ASSERT void MacroAssembler::verify_heapbase(const char* msg) { assert (UseCompressedOops, "should be compressed"); ! assert (Universe::heap() != NULL, "java heap should be initialized"); if (CheckCompressedOops) { Label ok; push(rscratch1); // cmpptr trashes rscratch1 cmpptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr())); jcc(Assembler::equal, ok); --- 6609,6619 ---- } #ifdef ASSERT void MacroAssembler::verify_heapbase(const char* msg) { assert (UseCompressedOops, "should be compressed"); ! assert (GC::gc()->heap() != NULL, "java heap should be initialized"); if (CheckCompressedOops) { Label ok; push(rscratch1); // cmpptr trashes rscratch1 cmpptr(r12_heapbase, ExternalAddress((address)Universe::narrow_ptrs_base_addr())); jcc(Assembler::equal, ok);
*** 6708,6718 **** } void MacroAssembler::decode_heap_oop_not_null(Register r) { // Note: it will change flags assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (Universe::heap() != NULL, "java heap should be initialized"); // Cannot assert, unverified entry point counts instructions (see .ad file) // vtableStubs also counts instructions in pd_code_size_limit. // Also do not verify_oop as this is called by verify_oop. if (Universe::narrow_oop_shift() != 0) { assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong"); --- 6708,6718 ---- } void MacroAssembler::decode_heap_oop_not_null(Register r) { // Note: it will change flags assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (GC::gc()->heap() != NULL, "java heap should be initialized"); // Cannot assert, unverified entry point counts instructions (see .ad file) // vtableStubs also counts instructions in pd_code_size_limit. // Also do not verify_oop as this is called by verify_oop. if (Universe::narrow_oop_shift() != 0) { assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
*** 6726,6736 **** } void MacroAssembler::decode_heap_oop_not_null(Register dst, Register src) { // Note: it will change flags assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (Universe::heap() != NULL, "java heap should be initialized"); // Cannot assert, unverified entry point counts instructions (see .ad file) // vtableStubs also counts instructions in pd_code_size_limit. // Also do not verify_oop as this is called by verify_oop. if (Universe::narrow_oop_shift() != 0) { assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong"); --- 6726,6736 ---- } void MacroAssembler::decode_heap_oop_not_null(Register dst, Register src) { // Note: it will change flags assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (GC::gc()->heap() != NULL, "java heap should be initialized"); // Cannot assert, unverified entry point counts instructions (see .ad file) // vtableStubs also counts instructions in pd_code_size_limit. // Also do not verify_oop as this is called by verify_oop. if (Universe::narrow_oop_shift() != 0) { assert(LogMinObjAlignmentInBytes == Universe::narrow_oop_shift(), "decode alg wrong");
*** 6787,6797 **** } } // Function instr_size_for_decode_klass_not_null() counts the instructions // generated by decode_klass_not_null(register r) and reinit_heapbase(), ! // when (Universe::heap() != NULL). Hence, if the instructions they // generate change, then this method needs to be updated. int MacroAssembler::instr_size_for_decode_klass_not_null() { assert (UseCompressedClassPointers, "only for compressed klass ptrs"); if (Universe::narrow_klass_base() != NULL) { // mov64 + addq + shlq? + mov64 (for reinit_heapbase()). --- 6787,6797 ---- } } // Function instr_size_for_decode_klass_not_null() counts the instructions // generated by decode_klass_not_null(register r) and reinit_heapbase(), ! // when (GC::gc()->heap() != NULL). Hence, if the instructions they // generate change, then this method needs to be updated. int MacroAssembler::instr_size_for_decode_klass_not_null() { assert (UseCompressedClassPointers, "only for compressed klass ptrs"); if (Universe::narrow_klass_base() != NULL) { // mov64 + addq + shlq? + mov64 (for reinit_heapbase()).
*** 6843,6862 **** } } void MacroAssembler::set_narrow_oop(Register dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (Universe::heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); mov_narrow_oop(dst, oop_index, rspec); } void MacroAssembler::set_narrow_oop(Address dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (Universe::heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); mov_narrow_oop(dst, oop_index, rspec); } --- 6843,6862 ---- } } void MacroAssembler::set_narrow_oop(Register dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (GC::gc()->heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); mov_narrow_oop(dst, oop_index, rspec); } void MacroAssembler::set_narrow_oop(Address dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (GC::gc()->heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); mov_narrow_oop(dst, oop_index, rspec); }
*** 6877,6896 **** mov_narrow_oop(dst, Klass::encode_klass(k), rspec); } void MacroAssembler::cmp_narrow_oop(Register dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (Universe::heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); Assembler::cmp_narrow_oop(dst, oop_index, rspec); } void MacroAssembler::cmp_narrow_oop(Address dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (Universe::heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); Assembler::cmp_narrow_oop(dst, oop_index, rspec); } --- 6877,6896 ---- mov_narrow_oop(dst, Klass::encode_klass(k), rspec); } void MacroAssembler::cmp_narrow_oop(Register dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (GC::gc()->heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); Assembler::cmp_narrow_oop(dst, oop_index, rspec); } void MacroAssembler::cmp_narrow_oop(Address dst, jobject obj) { assert (UseCompressedOops, "should only be used for compressed headers"); ! assert (GC::gc()->heap() != NULL, "java heap should be initialized"); assert (oop_recorder() != NULL, "this assembler needs an OopRecorder"); int oop_index = oop_recorder()->find_index(obj); RelocationHolder rspec = oop_Relocation::spec(oop_index); Assembler::cmp_narrow_oop(dst, oop_index, rspec); }
*** 6911,6921 **** Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec); } void MacroAssembler::reinit_heapbase() { if (UseCompressedOops || UseCompressedClassPointers) { ! if (Universe::heap() != NULL) { if (Universe::narrow_oop_base() == NULL) { MacroAssembler::xorptr(r12_heapbase, r12_heapbase); } else { mov64(r12_heapbase, (int64_t)Universe::narrow_ptrs_base()); } --- 6911,6921 ---- Assembler::cmp_narrow_oop(dst, Klass::encode_klass(k), rspec); } void MacroAssembler::reinit_heapbase() { if (UseCompressedOops || UseCompressedClassPointers) { ! if (GC::is_initialized()) { if (Universe::narrow_oop_base() == NULL) { MacroAssembler::xorptr(r12_heapbase, r12_heapbase); } else { mov64(r12_heapbase, (int64_t)Universe::narrow_ptrs_base()); }
< prev index next >