src/share/vm/c1/c1_LIRGenerator.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6986046-fallout Sdiff src/share/vm/c1

src/share/vm/c1/c1_LIRGenerator.cpp

Print this page




2292       Local* receiver = x->state()->local_at(0)->as_Local();
2293       assert(receiver != NULL, "must already exist");
2294       obj = receiver->operand();
2295     }
2296     assert(obj->is_valid(), "must be valid");
2297 
2298     if (method()->is_synchronized() && GenerateSynchronizationCode) {
2299       LIR_Opr lock = new_register(T_INT);
2300       __ load_stack_address_monitor(0, lock);
2301 
2302       CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
2303       CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2304 
2305       // receiver is guaranteed non-NULL so don't need CodeEmitInfo
2306       __ lock_object(syncTempOpr(), obj, lock, new_register(T_OBJECT), slow_path, NULL);
2307     }
2308   }
2309 
2310   // increment invocation counters if needed
2311   if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
2312     CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state(), NULL);
2313     increment_invocation_counter(info);
2314   }
2315 
2316   // all blocks with a successor must end with an unconditional jump
2317   // to the successor even if they are consecutive
2318   __ jump(x->default_sux());
2319 }
2320 
2321 
2322 void LIRGenerator::do_OsrEntry(OsrEntry* x) {
2323   // construct our frame and model the production of incoming pointer
2324   // to the OSR buffer.
2325   __ osr_entry(LIR_Assembler::osrBufferPointer());
2326   LIR_Opr result = rlock_result(x);
2327   __ move(LIR_Assembler::osrBufferPointer(), result);
2328 }
2329 
2330 
2331 void LIRGenerator::invoke_load_arguments(Invoke* x, LIRItemList* args, const LIR_OprList* arg_list) {
2332   int i = (x->has_receiver() || x->is_invokedynamic()) ? 1 : 0;




2292       Local* receiver = x->state()->local_at(0)->as_Local();
2293       assert(receiver != NULL, "must already exist");
2294       obj = receiver->operand();
2295     }
2296     assert(obj->is_valid(), "must be valid");
2297 
2298     if (method()->is_synchronized() && GenerateSynchronizationCode) {
2299       LIR_Opr lock = new_register(T_INT);
2300       __ load_stack_address_monitor(0, lock);
2301 
2302       CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
2303       CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2304 
2305       // receiver is guaranteed non-NULL so don't need CodeEmitInfo
2306       __ lock_object(syncTempOpr(), obj, lock, new_register(T_OBJECT), slow_path, NULL);
2307     }
2308   }
2309 
2310   // increment invocation counters if needed
2311   if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
2312     CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
2313     increment_invocation_counter(info);
2314   }
2315 
2316   // all blocks with a successor must end with an unconditional jump
2317   // to the successor even if they are consecutive
2318   __ jump(x->default_sux());
2319 }
2320 
2321 
2322 void LIRGenerator::do_OsrEntry(OsrEntry* x) {
2323   // construct our frame and model the production of incoming pointer
2324   // to the OSR buffer.
2325   __ osr_entry(LIR_Assembler::osrBufferPointer());
2326   LIR_Opr result = rlock_result(x);
2327   __ move(LIR_Assembler::osrBufferPointer(), result);
2328 }
2329 
2330 
2331 void LIRGenerator::invoke_load_arguments(Invoke* x, LIRItemList* args, const LIR_OprList* arg_list) {
2332   int i = (x->has_receiver() || x->is_invokedynamic()) ? 1 : 0;


src/share/vm/c1/c1_LIRGenerator.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File