521 // Call to compiled code 522 // 523 // Note: the following problem exists with Compiler1: 524 // - at compile time we may or may not know if the destination is final 525 // - if we know that the destination is final (is_optimized), we will emit 526 // an optimized virtual call (no inline cache), and need a Method* to make 527 // a call to the interpreter 528 // - if we don't know if the destination is final, we emit a standard 529 // virtual call, and use CompiledICHolder to call interpreted code 530 // (no static call stub has been generated) 531 // - In the case that we here notice the call is static bound we 532 // convert the call into what looks to be an optimized virtual call, 533 // but we must use the unverified entry point (since there will be no 534 // null check on a call when the target isn't loaded). 535 // This causes problems when verifying the IC because 536 // it looks vanilla but is optimized. Code in is_call_to_interpreted 537 // is aware of this and weakens its asserts. 538 if (is_optimized) { 539 entry = caller_is_c1 ? method_code->verified_value_entry_point() : method_code->verified_entry_point(); 540 } else { 541 //assert(!(caller_is_c1 && method->has_scalarized_args()), "FIXME - what to do with c1 caller??"); 542 entry = method_code->entry_point(); 543 } 544 } 545 bool far_c2a = entry != NULL && caller_is_nmethod && method_code->is_far_code(); 546 if (entry != NULL && !far_c2a) { 547 // Call to near compiled code (nmethod or aot). 548 info.set_compiled_entry(entry, is_optimized ? NULL : receiver_klass, is_optimized); 549 } else { 550 if (is_optimized) { 551 if (far_c2a) { 552 // Call to aot code from nmethod. 553 info.set_aot_entry(entry, method()); 554 } else { 555 // Use stub entry 556 address entry = caller_is_c1 ? method()->get_c2i_value_entry() : method()->get_c2i_entry(); 557 info.set_interpreter_entry(entry, method()); 558 } 559 } else { 560 // Use icholder entry 561 assert(method_code == NULL || method_code->is_compiled(), "must be compiled"); 562 CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass); 563 //assert(!(caller_is_c1 && method->has_scalarized_args()), "FIXME - what to do with c1 caller??"); 564 info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder); 565 } 566 } 567 assert(info.is_optimized() == is_optimized, "must agree"); 568 } 569 570 571 bool CompiledIC::is_icholder_entry(address entry) { 572 CodeBlob* cb = CodeCache::find_blob_unsafe(entry); 573 if (cb != NULL && cb->is_adapter_blob()) { 574 return true; 575 } 576 // itable stubs also use CompiledICHolder 577 if (cb != NULL && cb->is_vtable_blob()) { 578 VtableStub* s = VtableStubs::entry_point(entry); 579 return (s != NULL) && s->is_itable_stub(); 580 } 581 582 return false; 583 } 584 | 521 // Call to compiled code 522 // 523 // Note: the following problem exists with Compiler1: 524 // - at compile time we may or may not know if the destination is final 525 // - if we know that the destination is final (is_optimized), we will emit 526 // an optimized virtual call (no inline cache), and need a Method* to make 527 // a call to the interpreter 528 // - if we don't know if the destination is final, we emit a standard 529 // virtual call, and use CompiledICHolder to call interpreted code 530 // (no static call stub has been generated) 531 // - In the case that we here notice the call is static bound we 532 // convert the call into what looks to be an optimized virtual call, 533 // but we must use the unverified entry point (since there will be no 534 // null check on a call when the target isn't loaded). 535 // This causes problems when verifying the IC because 536 // it looks vanilla but is optimized. Code in is_call_to_interpreted 537 // is aware of this and weakens its asserts. 538 if (is_optimized) { 539 entry = caller_is_c1 ? method_code->verified_value_entry_point() : method_code->verified_entry_point(); 540 } else { 541 entry = caller_is_c1 ? method_code->value_entry_point() : method_code->entry_point(); 542 } 543 } 544 bool far_c2a = entry != NULL && caller_is_nmethod && method_code->is_far_code(); 545 if (entry != NULL && !far_c2a) { 546 // Call to near compiled code (nmethod or aot). 547 info.set_compiled_entry(entry, is_optimized ? NULL : receiver_klass, is_optimized); 548 } else { 549 if (is_optimized) { 550 if (far_c2a) { 551 // Call to aot code from nmethod. 552 info.set_aot_entry(entry, method()); 553 } else { 554 // Use stub entry 555 address entry = caller_is_c1 ? method()->get_c2i_value_entry() : method()->get_c2i_entry(); 556 info.set_interpreter_entry(entry, method()); 557 } 558 } else { 559 // Use icholder entry 560 assert(method_code == NULL || method_code->is_compiled(), "must be compiled"); 561 CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass); 562 entry = (caller_is_c1)? method()->get_c2i_unverified_value_entry() : method()->get_c2i_unverified_entry(); 563 info.set_icholder_entry(entry, holder); 564 } 565 } 566 assert(info.is_optimized() == is_optimized, "must agree"); 567 } 568 569 570 bool CompiledIC::is_icholder_entry(address entry) { 571 CodeBlob* cb = CodeCache::find_blob_unsafe(entry); 572 if (cb != NULL && cb->is_adapter_blob()) { 573 return true; 574 } 575 // itable stubs also use CompiledICHolder 576 if (cb != NULL && cb->is_vtable_blob()) { 577 VtableStub* s = VtableStubs::entry_point(entry); 578 return (s != NULL) && s->is_itable_stub(); 579 } 580 581 return false; 582 } 583 |