< prev index next >
src/hotspot/share/opto/compile.cpp
Print this page
*** 545,554 ****
--- 545,560 ----
}
ResourceMark rm;
_scratch_const_size = const_size;
int size = C2Compiler::initial_code_buffer_size(const_size);
+ #ifdef ASSERT
+ if (C->has_scalarized_args()) {
+ // Oop verification for loading object fields from scalarized value types in the new entry point requires lots of space
+ size += 5120;
+ }
+ #endif
blob = BufferBlob::create("Compile::scratch_buffer", size);
// Record the buffer blob for next time.
set_scratch_buffer_blob(blob);
// Have we run out of code space?
if (scratch_buffer_blob() == NULL) {
*** 609,627 ****
--- 615,638 ----
if (is_branch) {
MacroAssembler masm(&buf);
masm.bind(fakeL);
n->as_MachBranch()->save_label(&saveL, &save_bnum);
n->as_MachBranch()->label_set(&fakeL, 0);
+ } else if (n->is_MachProlog()) {
+ saveL = ((MachPrologNode*)n)->_verified_entry;
+ ((MachPrologNode*)n)->_verified_entry = &fakeL;
}
n->emit(buf, this->regalloc());
// Emitting into the scratch buffer should not fail
assert (!failing(), "Must not have pending failure. Reason is: %s", failure_reason());
// Restore label.
if (is_branch) {
n->as_MachBranch()->label_set(saveL, save_bnum);
+ } else if (n->is_MachProlog()) {
+ ((MachPrologNode*)n)->_verified_entry = saveL;
}
// End scratch_emit_size section.
set_in_scratch_emit_size(false);
*** 651,660 ****
--- 662,673 ----
_stub_name(NULL),
_stub_entry_point(NULL),
_max_node_limit(MaxNodeLimit),
_orig_pc_slot(0),
_orig_pc_slot_offset_in_bytes(0),
+ _sp_inc_slot(0),
+ _sp_inc_slot_offset_in_bytes(0),
_inlining_progress(false),
_inlining_incrementally(false),
_has_reserved_stack_access(target->has_reserved_stack_access()),
#ifndef PRODUCT
_trace_opto_output(directive->TraceOptoOutputOption),
*** 912,921 ****
--- 925,941 ----
// Now that we know the size of all the monitors we can add a fixed slot
// for the original deopt pc.
_orig_pc_slot = fixed_slots();
int next_slot = _orig_pc_slot + (sizeof(address) / VMRegImpl::stack_slot_size);
+
+ if (method()->get_Method()->needs_stack_repair()) {
+ // One extra slot for the special stack increment value
+ _sp_inc_slot = next_slot;
+ next_slot += 2;
+ }
+
set_fixed_slots(next_slot);
// Compute when to use implicit null checks. Used by matching trap based
// nodes and NullCheck optimization.
set_allowed_deopt_reasons();
*** 937,946 ****
--- 957,973 ----
if (is_osr_compilation()) {
_code_offsets.set_value(CodeOffsets::Verified_Entry, 0);
_code_offsets.set_value(CodeOffsets::OSR_Entry, _first_block_size);
} else {
_code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
+ if (_code_offsets.value(CodeOffsets::Verified_Value_Entry) == -1) {
+ _code_offsets.set_value(CodeOffsets::Verified_Value_Entry, _first_block_size);
+ }
+ if (_code_offsets.value(CodeOffsets::Entry) == -1) {
+ // We emitted a value type entry point, adjust normal entry
+ _code_offsets.set_value(CodeOffsets::Entry, _first_block_size);
+ }
_code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
}
env()->register_method(_method, _entry_bci,
&_code_offsets,
*** 982,991 ****
--- 1009,1020 ----
_stub_name(stub_name),
_stub_entry_point(NULL),
_max_node_limit(MaxNodeLimit),
_orig_pc_slot(0),
_orig_pc_slot_offset_in_bytes(0),
+ _sp_inc_slot(0),
+ _sp_inc_slot_offset_in_bytes(0),
_inlining_progress(false),
_inlining_incrementally(false),
_has_reserved_stack_access(false),
#ifndef PRODUCT
_trace_opto_output(directive->TraceOptoOutputOption),
< prev index next >