src/cpu/sparc/vm/sharedRuntime_sparc.cpp
Index
Unified diffs
Context diffs
Sdiffs
Wdiffs
Patch
New
Old
Previous File
Next File
*** old/src/cpu/sparc/vm/sharedRuntime_sparc.cpp Thu Oct 1 06:20:39 2009
--- new/src/cpu/sparc/vm/sharedRuntime_sparc.cpp Thu Oct 1 06:20:39 2009
*** 538,574 ****
--- 538,575 ----
// retun the amount of stack space these arguments will need.
return stk_reg_pairs;
}
- // Helper class mostly to avoid passing masm everywhere, and handle store
// displacement overflow logic for LP64
+ // store displacement overflow logic.
class AdapterGenerator {
MacroAssembler *masm;
#ifdef _LP64
Register Rdisp;
void set_Rdisp(Register r) { Rdisp = r; }
#endif // _LP64
void patch_callers_callsite();
void tag_c2i_arg(frame::Tag t, Register base, int st_off, Register scratch);
// base+st_off points to top of argument
int arg_offset(const int st_off) { return st_off + Interpreter::value_offset_in_bytes(); }
int next_arg_offset(const int st_off) {
return st_off - Interpreter::stackElementSize() + Interpreter::value_offset_in_bytes();
}
#ifdef _LP64
// On _LP64 argument slot values are loaded first into a register
// because they might not fit into displacement.
Register arg_slot(const int st_off);
Register next_arg_slot(const int st_off);
#else
int arg_slot(const int st_off) { return arg_offset(st_off); }
int next_arg_slot(const int st_off) { return next_arg_offset(st_off); }
#endif // _LP64
+ int tag_offset(const int st_off) { return st_off + Interpreter::tag_offset_in_bytes(); }
+ int next_tag_offset(const int st_off) {
+ return st_off - Interpreter::stackElementSize() + Interpreter::tag_offset_in_bytes();
+ }
+
+ // Argument slot values may be loaded first into a register because
+ // they might not fit into displacement.
+ RegisterOrConstant arg_slot(const int st_off);
+ RegisterOrConstant next_arg_slot(const int st_off);
+
+ RegisterOrConstant tag_slot(const int st_off);
+ RegisterOrConstant next_tag_slot(const int st_off);
// Stores long into offset pointed to by base
void store_c2i_long(Register r, Register base,
const int st_off, bool is_stack);
void store_c2i_object(Register r, Register base,
*** 654,701 ****
--- 655,708 ----
}
void AdapterGenerator::tag_c2i_arg(frame::Tag t, Register base, int st_off,
Register scratch) {
if (TaggedStackInterpreter) {
! int tag_off = st_off + Interpreter::tag_offset_in_bytes();
#ifdef _LP64
Register tag_slot = Rdisp;
__ set(tag_off, tag_slot);
#else
int tag_slot = tag_off;
#endif // _LP64
! RegisterOrConstant slot = tag_slot(st_off);
// have to store zero because local slots can be reused (rats!)
if (t == frame::TagValue) {
- __ st_ptr(G0, base, tag_slot);
} else if (t == frame::TagCategory2) {
- __ st_ptr(G0, base, tag_slot);
int next_tag_off = st_off - Interpreter::stackElementSize() +
Interpreter::tag_offset_in_bytes();
#ifdef _LP64
__ set(next_tag_off, tag_slot);
#else
tag_slot = next_tag_off;
#endif // _LP64
__ st_ptr(G0, base, tag_slot);
+ __ st_ptr(G0, base, next_tag_slot(st_off));
} else {
__ mov(t, scratch);
- __ st_ptr(scratch, base, tag_slot);
}
}
}
#ifdef _LP64
! RegisterOrConstant AdapterGenerator::arg_slot(const int st_off) {
! __ set( arg_offset(st_off), Rdisp);
! return Rdisp;
+
! int offset = arg_offset(st_off);
! if (Assembler::is_simm13(offset)) return RegisterOrConstant(offset);
+ __ set(offset, Rdisp);
+ return RegisterOrConstant(Rdisp);
}
! RegisterOrConstant AdapterGenerator::next_arg_slot(const int st_off) {
! __ set( next_arg_offset(st_off), Rdisp);
! return Rdisp;
! int offset = next_arg_offset(st_off);
! if (Assembler::is_simm13(offset)) return RegisterOrConstant(offset);
+ __ set(offset, Rdisp);
+ return RegisterOrConstant(Rdisp);
+ }
+
+
+ RegisterOrConstant AdapterGenerator::tag_slot(const int st_off) {
+ int offset = tag_offset(st_off);
+ if (Assembler::is_simm13(offset)) return RegisterOrConstant(offset);
+ __ set(offset, Rdisp);
+ return RegisterOrConstant(Rdisp);
+ }
+
+ RegisterOrConstant AdapterGenerator::next_tag_slot(const int st_off) {
+ int offset = next_tag_offset(st_off);
+ if (Assembler::is_simm13(offset)) return RegisterOrConstant(offset);
+ __ set(offset, Rdisp);
+ return RegisterOrConstant(Rdisp);
}
#endif // _LP64
+
// Stores long into offset pointed to by base
void AdapterGenerator::store_c2i_long(Register r, Register base,
const int st_off, bool is_stack) {
#ifdef _LP64
*** 1050,1062 ****
--- 1057,1067 ----
// 32-bit build and aligned in the 64-bit build. Look for the obvious
// ldx/lddf optimizations.
// Load in argument order going down.
const int ld_off = (total_args_passed-i)*Interpreter::stackElementSize();
#ifdef _LP64
set_Rdisp(G1_scratch);
#endif // _LP64
VMReg r_1 = regs[i].first();
VMReg r_2 = regs[i].second();
if (!r_1->is_valid()) {
assert(!r_2->is_valid(), "");
*** 1072,1082 ****
--- 1077,1087 ----
__ ld(Gargs, arg_slot(ld_off), r);
} else {
#ifdef _LP64
// In V9, longs are given 2 64-bit slots in the interpreter, but the
// data is passed in only 1 slot.
! Register slot = (sig_bt[i]==T_LONG) ?
! RegisterOrConstant slot = (sig_bt[i] == T_LONG) ?
next_arg_slot(ld_off) : arg_slot(ld_off);
__ ldx(Gargs, slot, r);
#else
// Need to load a 64-bit value into G1/G4, but G1/G4 is being used in the
// stack shuffle. Load the first 2 longs into G1/G4 later.
*** 1090,1100 ****
--- 1095,1105 ----
#ifdef _LP64
// In V9, doubles are given 2 64-bit slots in the interpreter, but the
// data is passed in only 1 slot. This code also handles longs that
// are passed on the stack, but need a stack-to-stack move through a
// spare float register.
! Register slot = (sig_bt[i]==T_LONG || sig_bt[i] == T_DOUBLE) ?
! RegisterOrConstant slot = (sig_bt[i] == T_LONG || sig_bt[i] == T_DOUBLE) ?
next_arg_slot(ld_off) : arg_slot(ld_off);
__ ldf(FloatRegisterImpl::D, Gargs, slot, r_1->as_FloatRegister());
#else
// Need to marshal 64-bit value from misaligned Lesp loads
__ ldf(FloatRegisterImpl::S, Gargs, next_arg_slot(ld_off), r_1->as_FloatRegister());
*** 1107,1118 ****
--- 1112,1126 ----
if (regs[i].first()->is_stack()) {
assert(r_1->as_FloatRegister() == F8, "fix this code");
// Convert stack slot to an SP offset
int st_off = reg2offset(regs[i].first()) + STACK_BIAS;
// Store down the shuffled stack word. Target address _is_ aligned.
if (!r_2->is_valid()) __ stf(FloatRegisterImpl::S, r_1->as_FloatRegister(), SP, st_off);
! else __ stf(FloatRegisterImpl::D, r_1->as_FloatRegister(), SP, st_off);
+ // If the following assert fails, don't use arg_slot as the offset will be wrong.
! assert(Interpreter::value_offset_in_bytes() == 0, "should be 0");
+ RegisterOrConstant slot = arg_slot(st_off);
+ if (!r_2->is_valid()) __ stf(FloatRegisterImpl::S, r_1->as_FloatRegister(), SP, slot);
+ else __ stf(FloatRegisterImpl::D, r_1->as_FloatRegister(), SP, slot);
}
}
bool made_space = false;
#ifndef _LP64
// May need to pick up a few long args in G1/G4
src/cpu/sparc/vm/sharedRuntime_sparc.cpp
Index
Unified diffs
Context diffs
Sdiffs
Wdiffs
Patch
New
Old
Previous File
Next File