< prev index next >

src/hotspot/cpu/x86/c1_LIRAssembler_x86.cpp

Print this page




 344   Register ic_klass = IC_Klass;
 345   const int ic_cmp_size = LP64_ONLY(10) NOT_LP64(9);
 346   const bool do_post_padding = VerifyOops || UseCompressedClassPointers;
 347   if (!do_post_padding) {
 348     // insert some nops so that the verified entry point is aligned on CodeEntryAlignment
 349     __ align(CodeEntryAlignment, __ offset() + ic_cmp_size);
 350   }
 351   int offset = __ offset();
 352   __ inline_cache_check(receiver, IC_Klass);
 353   assert(__ offset() % CodeEntryAlignment == 0 || do_post_padding, "alignment must be correct");
 354   if (do_post_padding) {
 355     // force alignment after the cache check.
 356     // It's been verified to be aligned if !VerifyOops
 357     __ align(CodeEntryAlignment);
 358   }
 359   return offset;
 360 }
 361 
 362 void LIR_Assembler::clinit_barrier(ciMethod* method) {
 363   assert(VM_Version::supports_fast_class_init_checks(), "sanity");
 364   assert(method->holder()->is_being_initialized() || method->holder()->is_initialized(),
 365          "initialization should have been started");
 366 
 367   Label L_skip_barrier;
 368   Register klass = rscratch1;
 369   Register thread = LP64_ONLY( r15_thread ) NOT_LP64( noreg );
 370   assert(thread != noreg, "x86_32 not implemented");
 371 
 372   __ mov_metadata(klass, method->holder()->constant_encoding());
 373   __ clinit_barrier(klass, thread, &L_skip_barrier /*L_fast_path*/);
 374 
 375   __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub()));
 376 
 377   __ bind(L_skip_barrier);
 378 }
 379 
 380 void LIR_Assembler::jobject2reg_with_patching(Register reg, CodeEmitInfo* info) {
 381   jobject o = NULL;
 382   PatchingStub* patch = new PatchingStub(_masm, patching_id(info));
 383   __ movoop(reg, o);
 384   patching_epilog(patch, lir_patch_normal, reg, info);
 385 }




 344   Register ic_klass = IC_Klass;
 345   const int ic_cmp_size = LP64_ONLY(10) NOT_LP64(9);
 346   const bool do_post_padding = VerifyOops || UseCompressedClassPointers;
 347   if (!do_post_padding) {
 348     // insert some nops so that the verified entry point is aligned on CodeEntryAlignment
 349     __ align(CodeEntryAlignment, __ offset() + ic_cmp_size);
 350   }
 351   int offset = __ offset();
 352   __ inline_cache_check(receiver, IC_Klass);
 353   assert(__ offset() % CodeEntryAlignment == 0 || do_post_padding, "alignment must be correct");
 354   if (do_post_padding) {
 355     // force alignment after the cache check.
 356     // It's been verified to be aligned if !VerifyOops
 357     __ align(CodeEntryAlignment);
 358   }
 359   return offset;
 360 }
 361 
 362 void LIR_Assembler::clinit_barrier(ciMethod* method) {
 363   assert(VM_Version::supports_fast_class_init_checks(), "sanity");
 364   assert(!method->holder()->is_not_initialized(), "initialization should have been started");

 365 
 366   Label L_skip_barrier;
 367   Register klass = rscratch1;
 368   Register thread = LP64_ONLY( r15_thread ) NOT_LP64( noreg );
 369   assert(thread != noreg, "x86_32 not implemented");
 370 
 371   __ mov_metadata(klass, method->holder()->constant_encoding());
 372   __ clinit_barrier(klass, thread, &L_skip_barrier /*L_fast_path*/);
 373 
 374   __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub()));
 375 
 376   __ bind(L_skip_barrier);
 377 }
 378 
 379 void LIR_Assembler::jobject2reg_with_patching(Register reg, CodeEmitInfo* info) {
 380   jobject o = NULL;
 381   PatchingStub* patch = new PatchingStub(_masm, patching_id(info));
 382   __ movoop(reg, o);
 383   patching_epilog(patch, lir_patch_normal, reg, info);
 384 }


< prev index next >