< prev index next >

src/share/vm/opto/library_call.cpp

Print this page
rev 8979 : [mq]: vmerr_static

*** 131,141 **** } } private: void fatal_unexpected_iid(vmIntrinsics::ID iid) { ! fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid))); } void set_result(Node* n) { assert(_result == NULL, "only set once"); _result = n; } void set_result(RegionNode* region, PhiNode* value); Node* result() { return _result; } --- 131,141 ---- } } private: void fatal_unexpected_iid(vmIntrinsics::ID iid) { ! fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)); } void set_result(Node* n) { assert(_result == NULL, "only set once"); _result = n; } void set_result(RegionNode* region, PhiNode* value); Node* result() { return _result; }
*** 2464,2474 **** // Cast to an int type. p = _gvn.transform(new CastP2XNode(NULL, p)); p = ConvX2UL(p); break; default: ! fatal(err_msg_res("unexpected type %d: %s", type, type2name(type))); break; } } // The load node has the control of the preceding MemBarCPUOrder. All // following nodes will have the control of the MemBarCPUOrder inserted at --- 2464,2474 ---- // Cast to an int type. p = _gvn.transform(new CastP2XNode(NULL, p)); p = ConvX2UL(p); break; default: ! fatal("unexpected type %d: %s", type, type2name(type)); break; } } // The load node has the control of the preceding MemBarCPUOrder. All // following nodes will have the control of the MemBarCPUOrder inserted at
*** 2753,2763 **** } else { post_barrier(control(), load_store, base, adr, alias_idx, newval, T_OBJECT, true); } break; default: ! fatal(err_msg_res("unexpected type %d: %s", type, type2name(type))); break; } // SCMemProjNodes represent the memory state of a LoadStore. Their // main role is to prevent LoadStore nodes from being optimized away --- 2753,2763 ---- } else { post_barrier(control(), load_store, base, adr, alias_idx, newval, T_OBJECT, true); } break; default: ! fatal("unexpected type %d: %s", type, type2name(type)); break; } // SCMemProjNodes represent the memory state of a LoadStore. Their // main role is to prevent LoadStore nodes from being optimized away
*** 3805,3815 **** Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass, RegionNode* slow_region) { ciMethod* method = callee(); int vtable_index = method->vtable_index(); assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, ! err_msg_res("bad index %d", vtable_index)); // Get the Method* out of the appropriate vtable entry. int entry_offset = (InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size()) * wordSize + vtableEntry::method_offset_in_bytes(); Node* entry_addr = basic_plus_adr(obj_klass, entry_offset); --- 3805,3815 ---- Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass, RegionNode* slow_region) { ciMethod* method = callee(); int vtable_index = method->vtable_index(); assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, ! "bad index %d", vtable_index); // Get the Method* out of the appropriate vtable entry. int entry_offset = (InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size()) * wordSize + vtableEntry::method_offset_in_bytes(); Node* entry_addr = basic_plus_adr(obj_klass, entry_offset);
*** 3857,3867 **** // hashCode and clone are not a miranda methods, // so the vtable index is fixed. // No need to use the linkResolver to get it. vtable_index = method->vtable_index(); assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, ! err_msg_res("bad index %d", vtable_index)); } slow_call = new CallDynamicJavaNode(tf, SharedRuntime::get_resolve_virtual_call_stub(), method, vtable_index, bci()); } else { // neither virtual nor static: opt_virtual --- 3857,3867 ---- // hashCode and clone are not a miranda methods, // so the vtable index is fixed. // No need to use the linkResolver to get it. vtable_index = method->vtable_index(); assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, ! "bad index %d", vtable_index); } slow_call = new CallDynamicJavaNode(tf, SharedRuntime::get_resolve_virtual_call_stub(), method, vtable_index, bci()); } else { // neither virtual nor static: opt_virtual
*** 6129,6139 **** stub_addr = StubRoutines::sha512_implCompressMB(); long_state = true; } break; default: ! fatal(err_msg_res("unknown SHA intrinsic predicate: %d", predicate)); } if (klass_SHA_name != NULL) { // get DigestBase klass to lookup for SHA klass const TypeInstPtr* tinst = _gvn.type(digestBase_obj)->isa_instptr(); assert(tinst != NULL, "digestBase_obj is not instance???"); --- 6129,6139 ---- stub_addr = StubRoutines::sha512_implCompressMB(); long_state = true; } break; default: ! fatal("unknown SHA intrinsic predicate: %d", predicate); } if (klass_SHA_name != NULL) { // get DigestBase klass to lookup for SHA klass const TypeInstPtr* tinst = _gvn.type(digestBase_obj)->isa_instptr(); assert(tinst != NULL, "digestBase_obj is not instance???");
*** 6234,6244 **** // we want to do an instanceof comparison against the SHA5 class klass_SHA_name = "sun/security/provider/SHA5"; } break; default: ! fatal(err_msg_res("unknown SHA intrinsic predicate: %d", predicate)); } ciKlass* klass_SHA = NULL; if (klass_SHA_name != NULL) { klass_SHA = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make(klass_SHA_name)); --- 6234,6244 ---- // we want to do an instanceof comparison against the SHA5 class klass_SHA_name = "sun/security/provider/SHA5"; } break; default: ! fatal("unknown SHA intrinsic predicate: %d", predicate); } ciKlass* klass_SHA = NULL; if (klass_SHA_name != NULL) { klass_SHA = tinst->klass()->as_instance_klass()->find_klass(ciSymbol::make(klass_SHA_name));
< prev index next >