180 Node* generate_non_array_guard(Node* kls, RegionNode* region) {
181 return generate_array_guard_common(kls, region, false, true);
182 }
183 Node* generate_objArray_guard(Node* kls, RegionNode* region) {
184 return generate_array_guard_common(kls, region, true, false);
185 }
186 Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
187 return generate_array_guard_common(kls, region, true, true);
188 }
189 Node* generate_array_guard_common(Node* kls, RegionNode* region,
190 bool obj_array, bool not_array);
191 Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
192 CallJavaNode* generate_method_call(vmIntrinsics::ID method_id,
193 bool is_virtual = false, bool is_static = false);
194 CallJavaNode* generate_method_call_static(vmIntrinsics::ID method_id) {
195 return generate_method_call(method_id, false, true);
196 }
197 CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
198 return generate_method_call(method_id, true, false);
199 }
200 Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static);
201
202 Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2);
203 Node* make_string_method_node(int opcode, Node* str1, Node* str2);
204 bool inline_string_compareTo();
205 bool inline_string_indexOf();
206 Node* string_indexOf(Node* string_object, ciTypeArray* target_array, jint offset, jint cache_i, jint md2_i);
207 bool inline_string_equals();
208 Node* round_double_node(Node* n);
209 bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
210 bool inline_math_native(vmIntrinsics::ID id);
211 bool inline_trig(vmIntrinsics::ID id);
212 bool inline_math(vmIntrinsics::ID id);
213 template <typename OverflowOp>
214 bool inline_math_overflow(Node* arg1, Node* arg2);
215 void inline_math_mathExact(Node* math, Node* test);
216 bool inline_math_addExactI(bool is_increment);
217 bool inline_math_addExactL(bool is_increment);
218 bool inline_math_multiplyExactI();
219 bool inline_math_multiplyExactL();
220 bool inline_math_negateExactI();
274 bool inline_reference_get();
275 bool inline_Class_cast();
276 bool inline_aescrypt_Block(vmIntrinsics::ID id);
277 bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
278 Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
279 Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
280 Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
281 bool inline_ghash_processBlocks();
282 bool inline_sha_implCompress(vmIntrinsics::ID id);
283 bool inline_digestBase_implCompressMB(int predicate);
284 bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
285 bool long_state, address stubAddr, const char *stubName,
286 Node* src_start, Node* ofs, Node* limit);
287 Node* get_state_from_sha_object(Node *sha_object);
288 Node* get_state_from_sha5_object(Node *sha_object);
289 Node* inline_digestBase_implCompressMB_predicate(int predicate);
290 bool inline_encodeISOArray();
291 bool inline_updateCRC32();
292 bool inline_updateBytesCRC32();
293 bool inline_updateByteBufferCRC32();
294 bool inline_multiplyToLen();
295 bool inline_squareToLen();
296 bool inline_mulAdd();
297
298 bool inline_profileBoolean();
299 bool inline_isCompileConstant();
300 };
301
302
303 //---------------------------make_vm_intrinsic----------------------------
304 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
305 vmIntrinsics::ID id = m->intrinsic_id();
306 assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
307
308 ccstr disable_intr = NULL;
309
310 if ((DisableIntrinsic[0] != '\0'
311 && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
312 (method_has_option_value("DisableIntrinsic", disable_intr)
313 && strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)) {
522
523 case vmIntrinsics::_sha5_implCompress:
524 if (!UseSHA512Intrinsics) return NULL;
525 break;
526
527 case vmIntrinsics::_digestBase_implCompressMB:
528 if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return NULL;
529 predicates = 3;
530 break;
531
532 case vmIntrinsics::_ghash_processBlocks:
533 if (!UseGHASHIntrinsics) return NULL;
534 break;
535
536 case vmIntrinsics::_updateCRC32:
537 case vmIntrinsics::_updateBytesCRC32:
538 case vmIntrinsics::_updateByteBufferCRC32:
539 if (!UseCRC32Intrinsics) return NULL;
540 break;
541
542 case vmIntrinsics::_incrementExactI:
543 case vmIntrinsics::_addExactI:
544 if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL;
545 break;
546 case vmIntrinsics::_incrementExactL:
547 case vmIntrinsics::_addExactL:
548 if (!Matcher::match_rule_supported(Op_OverflowAddL) || !UseMathExactIntrinsics) return NULL;
549 break;
550 case vmIntrinsics::_decrementExactI:
551 case vmIntrinsics::_subtractExactI:
552 if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
553 break;
554 case vmIntrinsics::_decrementExactL:
555 case vmIntrinsics::_subtractExactL:
556 if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
557 break;
558 case vmIntrinsics::_negateExactI:
559 if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
560 break;
561 case vmIntrinsics::_negateExactL:
930
931 case vmIntrinsics::_squareToLen:
932 return inline_squareToLen();
933
934 case vmIntrinsics::_mulAdd:
935 return inline_mulAdd();
936
937 case vmIntrinsics::_ghash_processBlocks:
938 return inline_ghash_processBlocks();
939
940 case vmIntrinsics::_encodeISOArray:
941 return inline_encodeISOArray();
942
943 case vmIntrinsics::_updateCRC32:
944 return inline_updateCRC32();
945 case vmIntrinsics::_updateBytesCRC32:
946 return inline_updateBytesCRC32();
947 case vmIntrinsics::_updateByteBufferCRC32:
948 return inline_updateByteBufferCRC32();
949
950 case vmIntrinsics::_profileBoolean:
951 return inline_profileBoolean();
952 case vmIntrinsics::_isCompileConstant:
953 return inline_isCompileConstant();
954
955 default:
956 // If you get here, it may be that someone has added a new intrinsic
957 // to the list in vmSymbols.hpp without implementing it here.
958 #ifndef PRODUCT
959 if ((PrintMiscellaneous && (Verbose || WizardMode)) || PrintOpto) {
960 tty->print_cr("*** Warning: Unimplemented intrinsic %s(%d)",
961 vmIntrinsics::name_at(intrinsic_id()), intrinsic_id());
962 }
963 #endif
964 return false;
965 }
966 }
967
968 Node* LibraryCallKit::try_to_predicate(int predicate) {
969 if (!jvms()->has_method()) {
5519
5520 src = ConvL2X(src); // adjust Java long to machine word
5521 Node* base = _gvn.transform(new CastX2PNode(src));
5522 offset = ConvI2X(offset);
5523
5524 // 'src_start' points to src array + scaled offset
5525 Node* src_start = basic_plus_adr(top(), base, offset);
5526
5527 // Call the stub.
5528 address stubAddr = StubRoutines::updateBytesCRC32();
5529 const char *stubName = "updateBytesCRC32";
5530
5531 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::updateBytesCRC32_Type(),
5532 stubAddr, stubName, TypePtr::BOTTOM,
5533 crc, src_start, length);
5534 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5535 set_result(result);
5536 return true;
5537 }
5538
5539 //----------------------------inline_reference_get----------------------------
5540 // public T java.lang.ref.Reference.get();
5541 bool LibraryCallKit::inline_reference_get() {
5542 const int referent_offset = java_lang_ref_Reference::referent_offset;
5543 guarantee(referent_offset > 0, "should have already been set");
5544
5545 // Get the argument:
5546 Node* reference_obj = null_check_receiver();
5547 if (stopped()) return true;
5548
5549 Node* adr = basic_plus_adr(reference_obj, reference_obj, referent_offset);
5550
5551 ciInstanceKlass* klass = env()->Object_klass();
5552 const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass);
5553
5554 Node* no_ctrl = NULL;
5555 Node* result = make_load(no_ctrl, adr, object_type, T_OBJECT, MemNode::unordered);
5556
5557 // Use the pre-barrier to record the value in the referent field
5558 pre_barrier(false /* do_load */,
5559 control(),
5560 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,
5561 result /* pre_val */,
5562 T_OBJECT);
5563
5564 // Add memory barrier to prevent commoning reads from this field
5565 // across safepoint since GC can change its value.
5566 insert_mem_bar(Op_MemBarCPUOrder);
5567
5568 set_result(result);
5569 return true;
5570 }
5571
5572
5573 Node * LibraryCallKit::load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString,
5574 bool is_exact=true, bool is_static=false) {
5575
5576 const TypeInstPtr* tinst = _gvn.type(fromObj)->isa_instptr();
5577 assert(tinst != NULL, "obj is null");
5578 assert(tinst->klass()->is_loaded(), "obj is not loaded");
5579 assert(!is_exact || tinst->klass_is_exact(), "klass not exact");
5580
5581 ciField* field = tinst->klass()->as_instance_klass()->get_field_by_name(ciSymbol::make(fieldName),
5582 ciSymbol::make(fieldTypeString),
5583 is_static);
5584 if (field == NULL) return (Node *) NULL;
5585 assert (field != NULL, "undefined field");
5586
5587 // Next code copied from Parse::do_get_xxx():
5588
5589 // Compute address and memory type.
5590 int offset = field->offset_in_bytes();
5591 bool is_vol = field->is_volatile();
5592 ciType* field_klass = field->type();
5593 assert(field_klass->is_loaded(), "should be loaded");
5594 const TypePtr* adr_type = C->alias_type(field)->adr_type();
5595 Node *adr = basic_plus_adr(fromObj, fromObj, offset);
5596 BasicType bt = field->layout_type();
5597
5598 // Build the resultant type of the load
5599 const Type *type;
5600 if (bt == T_OBJECT) {
5601 type = TypeOopPtr::make_from_klass(field_klass->as_klass());
5602 } else {
5603 type = Type::get_const_basic_type(bt);
5604 }
5605
|
180 Node* generate_non_array_guard(Node* kls, RegionNode* region) {
181 return generate_array_guard_common(kls, region, false, true);
182 }
183 Node* generate_objArray_guard(Node* kls, RegionNode* region) {
184 return generate_array_guard_common(kls, region, true, false);
185 }
186 Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
187 return generate_array_guard_common(kls, region, true, true);
188 }
189 Node* generate_array_guard_common(Node* kls, RegionNode* region,
190 bool obj_array, bool not_array);
191 Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
192 CallJavaNode* generate_method_call(vmIntrinsics::ID method_id,
193 bool is_virtual = false, bool is_static = false);
194 CallJavaNode* generate_method_call_static(vmIntrinsics::ID method_id) {
195 return generate_method_call(method_id, false, true);
196 }
197 CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
198 return generate_method_call(method_id, true, false);
199 }
200 Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
201
202 Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2);
203 Node* make_string_method_node(int opcode, Node* str1, Node* str2);
204 bool inline_string_compareTo();
205 bool inline_string_indexOf();
206 Node* string_indexOf(Node* string_object, ciTypeArray* target_array, jint offset, jint cache_i, jint md2_i);
207 bool inline_string_equals();
208 Node* round_double_node(Node* n);
209 bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
210 bool inline_math_native(vmIntrinsics::ID id);
211 bool inline_trig(vmIntrinsics::ID id);
212 bool inline_math(vmIntrinsics::ID id);
213 template <typename OverflowOp>
214 bool inline_math_overflow(Node* arg1, Node* arg2);
215 void inline_math_mathExact(Node* math, Node* test);
216 bool inline_math_addExactI(bool is_increment);
217 bool inline_math_addExactL(bool is_increment);
218 bool inline_math_multiplyExactI();
219 bool inline_math_multiplyExactL();
220 bool inline_math_negateExactI();
274 bool inline_reference_get();
275 bool inline_Class_cast();
276 bool inline_aescrypt_Block(vmIntrinsics::ID id);
277 bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
278 Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
279 Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
280 Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
281 bool inline_ghash_processBlocks();
282 bool inline_sha_implCompress(vmIntrinsics::ID id);
283 bool inline_digestBase_implCompressMB(int predicate);
284 bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
285 bool long_state, address stubAddr, const char *stubName,
286 Node* src_start, Node* ofs, Node* limit);
287 Node* get_state_from_sha_object(Node *sha_object);
288 Node* get_state_from_sha5_object(Node *sha_object);
289 Node* inline_digestBase_implCompressMB_predicate(int predicate);
290 bool inline_encodeISOArray();
291 bool inline_updateCRC32();
292 bool inline_updateBytesCRC32();
293 bool inline_updateByteBufferCRC32();
294 Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
295 bool inline_updateBytesCRC32C();
296 bool inline_updateDirectByteBufferCRC32C();
297 bool inline_multiplyToLen();
298 bool inline_squareToLen();
299 bool inline_mulAdd();
300
301 bool inline_profileBoolean();
302 bool inline_isCompileConstant();
303 };
304
305
306 //---------------------------make_vm_intrinsic----------------------------
307 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
308 vmIntrinsics::ID id = m->intrinsic_id();
309 assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
310
311 ccstr disable_intr = NULL;
312
313 if ((DisableIntrinsic[0] != '\0'
314 && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
315 (method_has_option_value("DisableIntrinsic", disable_intr)
316 && strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)) {
525
526 case vmIntrinsics::_sha5_implCompress:
527 if (!UseSHA512Intrinsics) return NULL;
528 break;
529
530 case vmIntrinsics::_digestBase_implCompressMB:
531 if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return NULL;
532 predicates = 3;
533 break;
534
535 case vmIntrinsics::_ghash_processBlocks:
536 if (!UseGHASHIntrinsics) return NULL;
537 break;
538
539 case vmIntrinsics::_updateCRC32:
540 case vmIntrinsics::_updateBytesCRC32:
541 case vmIntrinsics::_updateByteBufferCRC32:
542 if (!UseCRC32Intrinsics) return NULL;
543 break;
544
545 case vmIntrinsics::_updateBytesCRC32C:
546 case vmIntrinsics::_updateDirectByteBufferCRC32C:
547 if (!UseCRC32CIntrinsics) return NULL;
548 break;
549
550 case vmIntrinsics::_incrementExactI:
551 case vmIntrinsics::_addExactI:
552 if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL;
553 break;
554 case vmIntrinsics::_incrementExactL:
555 case vmIntrinsics::_addExactL:
556 if (!Matcher::match_rule_supported(Op_OverflowAddL) || !UseMathExactIntrinsics) return NULL;
557 break;
558 case vmIntrinsics::_decrementExactI:
559 case vmIntrinsics::_subtractExactI:
560 if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
561 break;
562 case vmIntrinsics::_decrementExactL:
563 case vmIntrinsics::_subtractExactL:
564 if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
565 break;
566 case vmIntrinsics::_negateExactI:
567 if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
568 break;
569 case vmIntrinsics::_negateExactL:
938
939 case vmIntrinsics::_squareToLen:
940 return inline_squareToLen();
941
942 case vmIntrinsics::_mulAdd:
943 return inline_mulAdd();
944
945 case vmIntrinsics::_ghash_processBlocks:
946 return inline_ghash_processBlocks();
947
948 case vmIntrinsics::_encodeISOArray:
949 return inline_encodeISOArray();
950
951 case vmIntrinsics::_updateCRC32:
952 return inline_updateCRC32();
953 case vmIntrinsics::_updateBytesCRC32:
954 return inline_updateBytesCRC32();
955 case vmIntrinsics::_updateByteBufferCRC32:
956 return inline_updateByteBufferCRC32();
957
958 case vmIntrinsics::_updateBytesCRC32C:
959 return inline_updateBytesCRC32C();
960 case vmIntrinsics::_updateDirectByteBufferCRC32C:
961 return inline_updateDirectByteBufferCRC32C();
962
963 case vmIntrinsics::_profileBoolean:
964 return inline_profileBoolean();
965 case vmIntrinsics::_isCompileConstant:
966 return inline_isCompileConstant();
967
968 default:
969 // If you get here, it may be that someone has added a new intrinsic
970 // to the list in vmSymbols.hpp without implementing it here.
971 #ifndef PRODUCT
972 if ((PrintMiscellaneous && (Verbose || WizardMode)) || PrintOpto) {
973 tty->print_cr("*** Warning: Unimplemented intrinsic %s(%d)",
974 vmIntrinsics::name_at(intrinsic_id()), intrinsic_id());
975 }
976 #endif
977 return false;
978 }
979 }
980
981 Node* LibraryCallKit::try_to_predicate(int predicate) {
982 if (!jvms()->has_method()) {
5532
5533 src = ConvL2X(src); // adjust Java long to machine word
5534 Node* base = _gvn.transform(new CastX2PNode(src));
5535 offset = ConvI2X(offset);
5536
5537 // 'src_start' points to src array + scaled offset
5538 Node* src_start = basic_plus_adr(top(), base, offset);
5539
5540 // Call the stub.
5541 address stubAddr = StubRoutines::updateBytesCRC32();
5542 const char *stubName = "updateBytesCRC32";
5543
5544 Node* call = make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::updateBytesCRC32_Type(),
5545 stubAddr, stubName, TypePtr::BOTTOM,
5546 crc, src_start, length);
5547 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5548 set_result(result);
5549 return true;
5550 }
5551
5552 //------------------------------get_table_from_crc32c_class-----------------------
5553 Node * LibraryCallKit::get_table_from_crc32c_class(ciInstanceKlass *crc32c_class) {
5554 Node* table = load_field_from_object(NULL, "byteTable", "[I", /*is_exact*/ false, /*is_static*/ true, crc32c_class);
5555 assert (table != NULL, "wrong version of java.util.zip.CRC32C");
5556
5557 return table;
5558 }
5559
5560 //------------------------------inline_updateBytesCRC32C-----------------------
5561 //
5562 // Calculate CRC32C for byte[] array.
5563 // int java.util.zip.CRC32C.updateBytes(int crc, byte[] buf, int off, int end)
5564 //
5565 bool LibraryCallKit::inline_updateBytesCRC32C() {
5566 assert(UseCRC32CIntrinsics, "need CRC32C instruction support");
5567 assert(callee()->signature()->size() == 4, "updateBytes has 4 parameters");
5568 assert(callee()->holder()->is_loaded(), "CRC32C class must be loaded");
5569 // no receiver since it is a static method
5570 Node* crc = argument(0); // type: int
5571 Node* src = argument(1); // type: oop
5572 Node* offset = argument(2); // type: int
5573 Node* end = argument(3); // type: int
5574
5575 Node* length = _gvn.transform(new SubINode(end, offset));
5576
5577 const Type* src_type = src->Value(&_gvn);
5578 const TypeAryPtr* top_src = src_type->isa_aryptr();
5579 if (top_src == NULL || top_src->klass() == NULL) {
5580 // failed array check
5581 return false;
5582 }
5583
5584 // Figure out the size and type of the elements we will be copying.
5585 BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
5586 if (src_elem != T_BYTE) {
5587 return false;
5588 }
5589
5590 // 'src_start' points to src array + scaled offset
5591 Node* src_start = array_element_address(src, offset, src_elem);
5592
5593 // static final int[] byteTable in class CRC32C
5594 Node* table = get_table_from_crc32c_class(callee()->holder());
5595 Node* table_start = array_element_address(table, intcon(0), T_INT);
5596
5597 // We assume that range check is done by caller.
5598 // TODO: generate range check (offset+length < src.length) in debug VM.
5599
5600 // Call the stub.
5601 address stubAddr = StubRoutines::updateBytesCRC32C();
5602 const char *stubName = "updateBytesCRC32C";
5603
5604 Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesCRC32C_Type(),
5605 stubAddr, stubName, TypePtr::BOTTOM,
5606 crc, src_start, length, table_start);
5607 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5608 set_result(result);
5609 return true;
5610 }
5611
5612 //------------------------------inline_updateDirectByteBufferCRC32C-----------------------
5613 //
5614 // Calculate CRC32C for DirectByteBuffer.
5615 // int java.util.zip.CRC32C.updateDirectByteBuffer(int crc, long buf, int off, int end)
5616 //
5617 bool LibraryCallKit::inline_updateDirectByteBufferCRC32C() {
5618 assert(UseCRC32CIntrinsics, "need CRC32C instruction support");
5619 assert(callee()->signature()->size() == 5, "updateDirectByteBuffer has 4 parameters and one is long");
5620 assert(callee()->holder()->is_loaded(), "CRC32C class must be loaded");
5621 // no receiver since it is a static method
5622 Node* crc = argument(0); // type: int
5623 Node* src = argument(1); // type: long
5624 Node* offset = argument(3); // type: int
5625 Node* end = argument(4); // type: int
5626
5627 Node* length = _gvn.transform(new SubINode(end, offset));
5628
5629 src = ConvL2X(src); // adjust Java long to machine word
5630 Node* base = _gvn.transform(new CastX2PNode(src));
5631 offset = ConvI2X(offset);
5632
5633 // 'src_start' points to src array + scaled offset
5634 Node* src_start = basic_plus_adr(top(), base, offset);
5635
5636 // static final int[] byteTable in class CRC32C
5637 Node* table = get_table_from_crc32c_class(callee()->holder());
5638 Node* table_start = array_element_address(table, intcon(0), T_INT);
5639
5640 // Call the stub.
5641 address stubAddr = StubRoutines::updateBytesCRC32C();
5642 const char *stubName = "updateBytesCRC32C";
5643
5644 Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesCRC32C_Type(),
5645 stubAddr, stubName, TypePtr::BOTTOM,
5646 crc, src_start, length, table_start);
5647 Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
5648 set_result(result);
5649 return true;
5650 }
5651
5652 //----------------------------inline_reference_get----------------------------
5653 // public T java.lang.ref.Reference.get();
5654 bool LibraryCallKit::inline_reference_get() {
5655 const int referent_offset = java_lang_ref_Reference::referent_offset;
5656 guarantee(referent_offset > 0, "should have already been set");
5657
5658 // Get the argument:
5659 Node* reference_obj = null_check_receiver();
5660 if (stopped()) return true;
5661
5662 Node* adr = basic_plus_adr(reference_obj, reference_obj, referent_offset);
5663
5664 ciInstanceKlass* klass = env()->Object_klass();
5665 const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass);
5666
5667 Node* no_ctrl = NULL;
5668 Node* result = make_load(no_ctrl, adr, object_type, T_OBJECT, MemNode::unordered);
5669
5670 // Use the pre-barrier to record the value in the referent field
5671 pre_barrier(false /* do_load */,
5672 control(),
5673 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,
5674 result /* pre_val */,
5675 T_OBJECT);
5676
5677 // Add memory barrier to prevent commoning reads from this field
5678 // across safepoint since GC can change its value.
5679 insert_mem_bar(Op_MemBarCPUOrder);
5680
5681 set_result(result);
5682 return true;
5683 }
5684
5685
5686 Node * LibraryCallKit::load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString,
5687 bool is_exact=true, bool is_static=false,
5688 ciInstanceKlass * fromKls=NULL) {
5689 if (fromKls == NULL) {
5690 const TypeInstPtr* tinst = _gvn.type(fromObj)->isa_instptr();
5691 assert(tinst != NULL, "obj is null");
5692 assert(tinst->klass()->is_loaded(), "obj is not loaded");
5693 assert(!is_exact || tinst->klass_is_exact(), "klass not exact");
5694 fromKls = tinst->klass()->as_instance_klass();
5695 } else {
5696 assert(is_static, "only for static field access");
5697 }
5698 ciField* field = fromKls->get_field_by_name(ciSymbol::make(fieldName),
5699 ciSymbol::make(fieldTypeString),
5700 is_static);
5701
5702 assert (field != NULL, "undefined field");
5703 if (field == NULL) return (Node *) NULL;
5704
5705 if (is_static) {
5706 const TypeInstPtr* tip = TypeInstPtr::make(fromKls->java_mirror());
5707 fromObj = makecon(tip);
5708 }
5709
5710 // Next code copied from Parse::do_get_xxx():
5711
5712 // Compute address and memory type.
5713 int offset = field->offset_in_bytes();
5714 bool is_vol = field->is_volatile();
5715 ciType* field_klass = field->type();
5716 assert(field_klass->is_loaded(), "should be loaded");
5717 const TypePtr* adr_type = C->alias_type(field)->adr_type();
5718 Node *adr = basic_plus_adr(fromObj, fromObj, offset);
5719 BasicType bt = field->layout_type();
5720
5721 // Build the resultant type of the load
5722 const Type *type;
5723 if (bt == T_OBJECT) {
5724 type = TypeOopPtr::make_from_klass(field_klass->as_klass());
5725 } else {
5726 type = Type::get_const_basic_type(bt);
5727 }
5728
|