379 region->init_req(1, kit->control()); 380 PhiNode* oop = PhiNode::make(region, not_null_oop, value_ptr()); 381 PhiNode* io = PhiNode::make(region, kit->i_o(), Type::ABIO); 382 PhiNode* mem = PhiNode::make(region, kit->merged_memory(), Type::MEMORY, TypePtr::BOTTOM); 383 384 int bci = kit->bci(); 385 bool reexecute = kit->jvms()->should_reexecute(); 386 { 387 // Oop is NULL, allocate and initialize buffer 388 PreserveJVMState pjvms(kit); 389 // Propagate re-execution state and bci 390 kit->set_bci(bci); 391 kit->jvms()->set_bci(bci); 392 kit->jvms()->set_should_reexecute(reexecute); 393 kit->set_control(null_ctl); 394 kit->kill_dead_locals(); 395 ciValueKlass* vk = value_klass(); 396 Node* klass_node = kit->makecon(TypeKlassPtr::make(vk)); 397 Node* alloc_oop = kit->new_instance(klass_node, NULL, NULL, /* deoptimize_on_exception */ true, this); 398 store(kit, alloc_oop, alloc_oop, vk, 0); 399 region->init_req(2, kit->control()); 400 oop ->init_req(2, alloc_oop); 401 io ->init_req(2, kit->i_o()); 402 mem ->init_req(2, kit->merged_memory()); 403 } 404 405 // Update GraphKit 406 kit->set_control(kit->gvn().transform(region)); 407 kit->set_i_o(kit->gvn().transform(io)); 408 kit->set_all_memory(kit->gvn().transform(mem)); 409 kit->record_for_igvn(region); 410 kit->record_for_igvn(oop); 411 kit->record_for_igvn(io); 412 kit->record_for_igvn(mem); 413 414 // Use cloned ValueTypeNode to propagate oop from now on 415 Node* res_oop = kit->gvn().transform(oop); 416 ValueTypeBaseNode* vt = clone()->as_ValueTypeBase(); 417 vt->set_oop(res_oop); 418 vt = kit->gvn().transform(vt)->as_ValueTypeBase(); 419 if (safe_for_replace) { 420 kit->replace_in_map(this, vt); 421 } 422 return vt; 423 } 424 425 bool ValueTypeBaseNode::is_allocated(PhaseGVN* phase) const { 426 Node* oop = get_oop(); 427 const Type* oop_type = (phase != NULL) ? phase->type(oop) : oop->bottom_type(); 428 return !oop_type->maybe_null(); 429 } 430 431 // When a call returns multiple values, it has several result 432 // projections, one per field. Replacing the result of the call by a 433 // value type node (after late inlining) requires that for each result 434 // projection, we find the corresponding value type field. 435 void ValueTypeBaseNode::replace_call_results(GraphKit* kit, Node* call, Compile* C) { 436 ciValueKlass* vk = value_klass(); 437 for (DUIterator_Fast imax, i = call->fast_outs(imax); i < imax; i++) { 438 ProjNode* pn = call->fast_out(i)->as_Proj(); 439 uint con = pn->_con; 440 if (con >= TypeFunc::Parms+1) { 441 uint field_nb = con - (TypeFunc::Parms+1); 591 return kit->gvn().transform(vt)->as_ValueType(); 592 } 593 594 ValueTypeNode* ValueTypeNode::make_larval(GraphKit* kit, bool allocate) const { 595 ciValueKlass* vk = value_klass(); 596 ValueTypeNode* res = clone()->as_ValueType(); 597 if (allocate) { 598 // Re-execute if buffering triggers deoptimization 599 PreserveReexecuteState preexecs(kit); 600 kit->jvms()->set_should_reexecute(true); 601 Node* klass_node = kit->makecon(TypeKlassPtr::make(vk)); 602 Node* alloc_oop = kit->new_instance(klass_node, NULL, NULL, true); 603 AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn()); 604 alloc->_larval = true; 605 606 store(kit, alloc_oop, alloc_oop, vk, 0); 607 res->set_oop(alloc_oop); 608 } 609 res->set_type(TypeValueType::make(vk, true)); 610 res = kit->gvn().transform(res)->as_ValueType(); 611 return res; 612 } 613 614 ValueTypeNode* ValueTypeNode::finish_larval(GraphKit* kit) const { 615 Node* obj = get_oop(); 616 Node* mark_addr = kit->basic_plus_adr(obj, oopDesc::mark_offset_in_bytes()); 617 Node* mark = kit->make_load(NULL, mark_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered); 618 mark = kit->gvn().transform(new AndXNode(mark, kit->MakeConX(~markWord::larval_mask_in_place))); 619 kit->store_to_memory(kit->control(), mark_addr, mark, TypeX_X->basic_type(), kit->gvn().type(mark_addr)->is_ptr(), MemNode::unordered); 620 621 ciValueKlass* vk = value_klass(); 622 ValueTypeNode* res = clone()->as_ValueType(); 623 res->set_type(TypeValueType::make(vk, false)); 624 res = kit->gvn().transform(res)->as_ValueType(); 625 return res; 626 } 627 628 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, ciValueKlass* vk, Node* base, int holder_offset) { 629 if (vk == NULL) { 630 vk = value_klass(); 631 } 632 if (field_count() == 0) { 633 assert(is_allocated(phase), "must be allocated"); 634 return get_oop(); 635 } 636 for (uint i = 0; i < field_count(); ++i) { 637 int offset = holder_offset + field_offset(i); 638 Node* value = field_value(i); 639 if (value->is_ValueType()) { | 379 region->init_req(1, kit->control()); 380 PhiNode* oop = PhiNode::make(region, not_null_oop, value_ptr()); 381 PhiNode* io = PhiNode::make(region, kit->i_o(), Type::ABIO); 382 PhiNode* mem = PhiNode::make(region, kit->merged_memory(), Type::MEMORY, TypePtr::BOTTOM); 383 384 int bci = kit->bci(); 385 bool reexecute = kit->jvms()->should_reexecute(); 386 { 387 // Oop is NULL, allocate and initialize buffer 388 PreserveJVMState pjvms(kit); 389 // Propagate re-execution state and bci 390 kit->set_bci(bci); 391 kit->jvms()->set_bci(bci); 392 kit->jvms()->set_should_reexecute(reexecute); 393 kit->set_control(null_ctl); 394 kit->kill_dead_locals(); 395 ciValueKlass* vk = value_klass(); 396 Node* klass_node = kit->makecon(TypeKlassPtr::make(vk)); 397 Node* alloc_oop = kit->new_instance(klass_node, NULL, NULL, /* deoptimize_on_exception */ true, this); 398 store(kit, alloc_oop, alloc_oop, vk, 0); 399 400 // Do not let stores that initialize this buffer be reordered with a subsequent 401 // store that would make this buffer accessible by other threads. 402 AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn()); 403 assert(alloc != NULL, "must have an allocation node"); 404 kit->insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress)); 405 406 region->init_req(2, kit->control()); 407 oop ->init_req(2, alloc_oop); 408 io ->init_req(2, kit->i_o()); 409 mem ->init_req(2, kit->merged_memory()); 410 } 411 412 // Update GraphKit 413 kit->set_control(kit->gvn().transform(region)); 414 kit->set_i_o(kit->gvn().transform(io)); 415 kit->set_all_memory(kit->gvn().transform(mem)); 416 kit->record_for_igvn(region); 417 kit->record_for_igvn(oop); 418 kit->record_for_igvn(io); 419 kit->record_for_igvn(mem); 420 421 // Use cloned ValueTypeNode to propagate oop from now on 422 Node* res_oop = kit->gvn().transform(oop); 423 ValueTypeBaseNode* vt = clone()->as_ValueTypeBase(); 424 vt->set_oop(res_oop); 425 vt = kit->gvn().transform(vt)->as_ValueTypeBase(); 426 if (safe_for_replace) { 427 kit->replace_in_map(this, vt); 428 } 429 assert(vt->is_allocated(&kit->gvn()), "must be allocated"); 430 return vt; 431 } 432 433 bool ValueTypeBaseNode::is_allocated(PhaseGVN* phase) const { 434 Node* oop = get_oop(); 435 const Type* oop_type = (phase != NULL) ? phase->type(oop) : oop->bottom_type(); 436 return !oop_type->maybe_null(); 437 } 438 439 // When a call returns multiple values, it has several result 440 // projections, one per field. Replacing the result of the call by a 441 // value type node (after late inlining) requires that for each result 442 // projection, we find the corresponding value type field. 443 void ValueTypeBaseNode::replace_call_results(GraphKit* kit, Node* call, Compile* C) { 444 ciValueKlass* vk = value_klass(); 445 for (DUIterator_Fast imax, i = call->fast_outs(imax); i < imax; i++) { 446 ProjNode* pn = call->fast_out(i)->as_Proj(); 447 uint con = pn->_con; 448 if (con >= TypeFunc::Parms+1) { 449 uint field_nb = con - (TypeFunc::Parms+1); 599 return kit->gvn().transform(vt)->as_ValueType(); 600 } 601 602 ValueTypeNode* ValueTypeNode::make_larval(GraphKit* kit, bool allocate) const { 603 ciValueKlass* vk = value_klass(); 604 ValueTypeNode* res = clone()->as_ValueType(); 605 if (allocate) { 606 // Re-execute if buffering triggers deoptimization 607 PreserveReexecuteState preexecs(kit); 608 kit->jvms()->set_should_reexecute(true); 609 Node* klass_node = kit->makecon(TypeKlassPtr::make(vk)); 610 Node* alloc_oop = kit->new_instance(klass_node, NULL, NULL, true); 611 AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn()); 612 alloc->_larval = true; 613 614 store(kit, alloc_oop, alloc_oop, vk, 0); 615 res->set_oop(alloc_oop); 616 } 617 res->set_type(TypeValueType::make(vk, true)); 618 res = kit->gvn().transform(res)->as_ValueType(); 619 assert(!allocate || res->is_allocated(&kit->gvn()), "must be allocated"); 620 return res; 621 } 622 623 ValueTypeNode* ValueTypeNode::finish_larval(GraphKit* kit) const { 624 Node* obj = get_oop(); 625 Node* mark_addr = kit->basic_plus_adr(obj, oopDesc::mark_offset_in_bytes()); 626 Node* mark = kit->make_load(NULL, mark_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered); 627 mark = kit->gvn().transform(new AndXNode(mark, kit->MakeConX(~markWord::larval_mask_in_place))); 628 kit->store_to_memory(kit->control(), mark_addr, mark, TypeX_X->basic_type(), kit->gvn().type(mark_addr)->is_ptr(), MemNode::unordered); 629 630 // Do not let stores that initialize this buffer be reordered with a subsequent 631 // store that would make this buffer accessible by other threads. 632 AllocateNode* alloc = AllocateNode::Ideal_allocation(obj, &kit->gvn()); 633 assert(alloc != NULL, "must have an allocation node"); 634 kit->insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress)); 635 636 ciValueKlass* vk = value_klass(); 637 ValueTypeNode* res = clone()->as_ValueType(); 638 res->set_type(TypeValueType::make(vk, false)); 639 res = kit->gvn().transform(res)->as_ValueType(); 640 return res; 641 } 642 643 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, ciValueKlass* vk, Node* base, int holder_offset) { 644 if (vk == NULL) { 645 vk = value_klass(); 646 } 647 if (field_count() == 0) { 648 assert(is_allocated(phase), "must be allocated"); 649 return get_oop(); 650 } 651 for (uint i = 0; i < field_count(); ++i) { 652 int offset = holder_offset + field_offset(i); 653 Node* value = field_value(i); 654 if (value->is_ValueType()) { |