src/cpu/sparc/vm/assembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/cpu/sparc/vm/assembler_sparc.cpp	Tue Dec 13 20:05:39 2011
--- new/src/cpu/sparc/vm/assembler_sparc.cpp	Tue Dec 13 20:05:39 2011

*** 3034,3047 **** --- 3034,3045 ---- Register temp2_reg, Label* L_success, Label* L_failure, Label* L_slow_path, RegisterOrConstant super_check_offset) { ! int sc_offset = (klassOopDesc::header_size() * HeapWordSize + ! Klass::secondary_super_cache_offset_in_bytes()); int sco_offset = (klassOopDesc::header_size() * HeapWordSize + Klass::super_check_offset_offset_in_bytes()); ! int sc_offset = in_bytes(Klass::secondary_super_cache_offset()); ! int sco_offset = in_bytes(Klass::super_check_offset_offset()); bool must_load_sco = (super_check_offset.constant_or_zero() == -1); bool need_slow_path = (must_load_sco || super_check_offset.constant_or_zero() == sco_offset);
*** 3157,3170 **** --- 3155,3166 ---- if (L_success == NULL) { L_success = &L_fallthrough; label_nulls++; } if (L_failure == NULL) { L_failure = &L_fallthrough; label_nulls++; } assert(label_nulls <= 1, "at most one NULL in the batch"); // a couple of useful fields in sub_klass: ! int ss_offset = (klassOopDesc::header_size() * HeapWordSize + ! Klass::secondary_supers_offset_in_bytes()); int sc_offset = (klassOopDesc::header_size() * HeapWordSize + Klass::secondary_super_cache_offset_in_bytes()); ! int ss_offset = in_bytes(Klass::secondary_supers_offset()); ! int sc_offset = in_bytes(Klass::secondary_super_cache_offset()); // Do a linear scan of the secondary super-klass chain. // This code is rarely used, so simplicity is a virtue here. #ifndef PRODUCT
*** 3334,3344 **** --- 3330,3340 ---- assert(markOopDesc::age_shift == markOopDesc::lock_bits + markOopDesc::biased_lock_bits, "biased locking makes assumptions about bit layout"); and3(mark_reg, markOopDesc::biased_lock_mask_in_place, temp_reg); cmp_and_brx_short(temp_reg, markOopDesc::biased_lock_pattern, Assembler::notEqual, Assembler::pn, cas_label); load_klass(obj_reg, temp_reg); - ld_ptr(Address(temp_reg, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()), temp_reg); or3(G2_thread, temp_reg, temp_reg); xor3(mark_reg, temp_reg, temp_reg); andcc(temp_reg, ~((int) markOopDesc::age_mask_in_place), temp_reg); if (counters != NULL) { cond_inc(Assembler::equal, (address) counters->biased_lock_entry_count_addr(), mark_reg, temp_reg);
*** 3411,3421 **** --- 3407,3417 ---- // the bias from one thread to another directly in this situation. // // FIXME: due to a lack of registers we currently blow away the age // bits in this situation. Should attempt to preserve them. load_klass(obj_reg, temp_reg); - ld_ptr(Address(temp_reg, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()), temp_reg); or3(G2_thread, temp_reg, temp_reg); casn(mark_addr.base(), mark_reg, temp_reg); // If the biasing toward our thread failed, this means that // another thread succeeded in biasing it toward itself and we // need to revoke that bias. The revocation will occur in the
*** 3441,3451 **** --- 3437,3447 ---- // normal locking code. // // FIXME: due to a lack of registers we currently blow away the age // bits in this situation. Should attempt to preserve them. load_klass(obj_reg, temp_reg); - ld_ptr(Address(temp_reg, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()), temp_reg); casn(mark_addr.base(), mark_reg, temp_reg); // Fall through to the normal CAS-based lock, because no matter what // the result of the above CAS, some thread must have succeeded in // removing the bias bit from the object's header. if (counters != NULL) {

src/cpu/sparc/vm/assembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File