< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page


6353     if (reg->is_stack()) {
6354       // Update source stack location by adding stack increment
6355       reg = VMRegImpl::stack2reg(reg->reg2stack() + sp_inc/VMRegImpl::stack_slot_size);
6356       regs[i] = reg;
6357     }
6358     assert(reg->value() >= 0 && reg->value() < max_reg, "reg value out of bounds");
6359     reg_state[reg->value()] = MacroAssembler::reg_readonly;
6360   }
6361   if (is_packing) {
6362     // The reserved entries are not used by the packed args, so make them writable
6363     mark_reserved_entries_writable(sig_cc, regs, num_regs, reg_state);
6364   }
6365 
6366   return reg_state;
6367 }
6368 
6369 int MacroAssembler::shuffle_value_args(bool is_packing, bool receiver_only, int extra_stack_offset,
6370                                        BasicType* sig_bt, const GrowableArray<SigEntry>* sig_cc,
6371                                        int args_passed, int args_on_stack, VMRegPair* regs,            // from
6372                                        int args_passed_to, int args_on_stack_to, VMRegPair* regs_to) { // to
6373   // Check if we need to extend the stack for unpacking
6374   int sp_inc = (args_on_stack_to - args_on_stack) * VMRegImpl::stack_slot_size;
6375   if (sp_inc > 0) {


6376     // Save the return address, adjust the stack (make sure it is properly
6377     // 16-byte aligned) and copy the return address to the new top of the stack.

6378     pop(r13);
6379     sp_inc = align_up(sp_inc, StackAlignmentInBytes);
6380     subptr(rsp, sp_inc);
6381     push(r13);

6382   } else {
6383     // The scalarized calling convention needs less stack space than the unscalarized one.
6384     // No need to extend the stack, the caller will take care of these adjustments.
6385     sp_inc = 0;
6386   }
6387 
6388   int ret_off; // make sure we don't overwrite the return address
6389   if (is_packing) {
6390     // For C1 code, the VVEP doesn't have reserved slots, so we store the returned address at
6391     // rsp[0] during shuffling.
6392     ret_off = 0;
6393   } else {
6394     // C2 code ensures that sp_inc is a reserved slot.
6395     ret_off = sp_inc;
6396   }
6397 
6398   int max_stack = MAX2(args_on_stack + sp_inc/VMRegImpl::stack_slot_size, args_on_stack_to);
6399   RegState* reg_state = init_reg_state(is_packing, sig_cc, regs, args_passed, sp_inc, max_stack);
6400 
6401   // Emit code for packing/unpacking value type arguments




6353     if (reg->is_stack()) {
6354       // Update source stack location by adding stack increment
6355       reg = VMRegImpl::stack2reg(reg->reg2stack() + sp_inc/VMRegImpl::stack_slot_size);
6356       regs[i] = reg;
6357     }
6358     assert(reg->value() >= 0 && reg->value() < max_reg, "reg value out of bounds");
6359     reg_state[reg->value()] = MacroAssembler::reg_readonly;
6360   }
6361   if (is_packing) {
6362     // The reserved entries are not used by the packed args, so make them writable
6363     mark_reserved_entries_writable(sig_cc, regs, num_regs, reg_state);
6364   }
6365 
6366   return reg_state;
6367 }
6368 
6369 int MacroAssembler::shuffle_value_args(bool is_packing, bool receiver_only, int extra_stack_offset,
6370                                        BasicType* sig_bt, const GrowableArray<SigEntry>* sig_cc,
6371                                        int args_passed, int args_on_stack, VMRegPair* regs,            // from
6372                                        int args_passed_to, int args_on_stack_to, VMRegPair* regs_to) { // to
6373   // Check if we need to extend the stack for packing/unpacking
6374   int sp_inc = (args_on_stack_to - args_on_stack) * VMRegImpl::stack_slot_size;
6375   if (sp_inc > 0) {
6376     sp_inc = align_up(sp_inc, StackAlignmentInBytes);
6377     if (!is_packing) {
6378       // Save the return address, adjust the stack (make sure it is properly
6379       // 16-byte aligned) and copy the return address to the new top of the stack.
6380       // (Note: C1 does this in C1_MacroAssembler::scalarized_entry).
6381       pop(r13);

6382       subptr(rsp, sp_inc);
6383       push(r13);
6384     }
6385   } else {
6386     // The scalarized calling convention needs less stack space than the unscalarized one.
6387     // No need to extend the stack, the caller will take care of these adjustments.
6388     sp_inc = 0;
6389   }
6390 
6391   int ret_off; // make sure we don't overwrite the return address
6392   if (is_packing) {
6393     // For C1 code, the VVEP doesn't have reserved slots, so we store the returned address at
6394     // rsp[0] during shuffling.
6395     ret_off = 0;
6396   } else {
6397     // C2 code ensures that sp_inc is a reserved slot.
6398     ret_off = sp_inc;
6399   }
6400 
6401   int max_stack = MAX2(args_on_stack + sp_inc/VMRegImpl::stack_slot_size, args_on_stack_to);
6402   RegState* reg_state = init_reg_state(is_packing, sig_cc, regs, args_passed, sp_inc, max_stack);
6403 
6404   // Emit code for packing/unpacking value type arguments


< prev index next >