< prev index next >

src/hotspot/share/opto/parse1.cpp

Print this page




 108   BasicType bt = type->basic_type();
 109   if (type == TypePtr::NULL_PTR) {
 110     // Ptr types are mixed together with T_ADDRESS but NULL is
 111     // really for T_OBJECT types so correct it.
 112     bt = T_OBJECT;
 113   }
 114   Node *mem = memory(Compile::AliasIdxRaw);
 115   Node *adr = basic_plus_adr( local_addrs_base, local_addrs, -index*wordSize );
 116   Node *ctl = control();
 117 
 118   // Very similar to LoadNode::make, except we handle un-aligned longs and
 119   // doubles on Sparc.  Intel can handle them just fine directly.
 120   Node *l = NULL;
 121   switch (bt) {                // Signature is flattened
 122   case T_INT:     l = new LoadINode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInt::INT,        MemNode::unordered); break;
 123   case T_FLOAT:   l = new LoadFNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::FLOAT,         MemNode::unordered); break;
 124   case T_ADDRESS: l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM,  MemNode::unordered); break;
 125   case T_OBJECT:  l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInstPtr::BOTTOM, MemNode::unordered); break;
 126   case T_VALUETYPE: {
 127     // Load oop and create a new ValueTypeNode
 128     const TypeValueTypePtr* vtptr_type = TypeValueTypePtr::make(type->is_valuetype(), TypePtr::NotNull);
 129     l = _gvn.transform(new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, vtptr_type, MemNode::unordered));
 130     l = ValueTypeNode::make(this, l);
 131     break;
 132   }
 133   case T_VALUETYPEPTR: {
 134     l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeValueTypePtr::NOTNULL, MemNode::unordered);
 135     break;
 136   }
 137   case T_LONG:
 138   case T_DOUBLE: {
 139     // Since arguments are in reverse order, the argument address 'adr'
 140     // refers to the back half of the long/double.  Recompute adr.
 141     adr = basic_plus_adr(local_addrs_base, local_addrs, -(index+1)*wordSize);
 142     if (Matcher::misaligned_doubles_ok) {
 143       l = (bt == T_DOUBLE)
 144         ? (Node*)new LoadDNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::DOUBLE, MemNode::unordered)
 145         : (Node*)new LoadLNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeLong::LONG, MemNode::unordered);
 146     } else {
 147       l = (bt == T_DOUBLE)
 148         ? (Node*)new LoadD_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered)
 149         : (Node*)new LoadL_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered);
 150     }


 791       if (ret_bt == T_BOOLEAN ||
 792           ret_bt == T_CHAR ||
 793           ret_bt == T_BYTE ||
 794           ret_bt == T_SHORT) {
 795         ret_type = TypeInt::INT;
 796       }
 797     }
 798 
 799     // Don't "bind" an unloaded return klass to the ret_phi. If the klass
 800     // becomes loaded during the subsequent parsing, the loaded and unloaded
 801     // types will not join when we transform and push in do_exits().
 802     const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
 803     if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
 804       ret_type = TypeOopPtr::BOTTOM;
 805     }
 806     if ((_caller->has_method() || tf()->returns_value_type_as_fields()) &&
 807         ret_type->isa_valuetypeptr() &&
 808         !ret_type->is_valuetypeptr()->is__Value()) {
 809       // When inlining or with multiple return values: return value
 810       // type as ValueTypeNode not as oop
 811       ret_type = ret_type->is_valuetypeptr()->value_type();
 812     }
 813     int         ret_size = type2size[ret_type->basic_type()];
 814     Node*       ret_phi  = new PhiNode(region, ret_type);
 815     gvn().set_type_bottom(ret_phi);
 816     _exits.ensure_stack(ret_size);
 817     assert((int)(tf()->range_sig()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
 818     assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
 819     _exits.set_argument(0, ret_phi);  // here is where the parser finds it
 820     // Note:  ret_phi is not yet pushed, until do_exits.
 821   }
 822 }
 823 
 824 //----------------------------build_start_state-------------------------------
 825 // Construct a state which contains only the incoming arguments from an
 826 // unknown caller.  The method & bci will be NULL & InvocationEntryBci.
 827 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
 828   int        arg_size_sig = tf->domain_sig()->cnt();
 829   int        max_size = MAX2(arg_size_sig, (int)tf->range_cc()->cnt());
 830   JVMState*  jvms     = new (this) JVMState(max_size - TypeFunc::Parms);
 831   SafePointNode* map  = new SafePointNode(max_size, NULL);


 841     set_default_node_notes(entry_nn);
 842   }
 843   PhaseGVN& gvn = *initial_gvn();
 844   uint j = 0;
 845   for (uint i = 0; i < (uint)arg_size_sig; i++) {
 846     assert(j >= i, "less actual arguments than in the signature?");
 847     if (ValueTypePassFieldsAsArgs) {
 848       if (i < TypeFunc::Parms) {
 849         assert(i == j, "no change before the actual arguments");
 850         Node* parm = gvn.transform(new ParmNode(start, i));
 851         map->init_req(i, parm);
 852         // Record all these guys for later GVN.
 853         record_for_igvn(parm);
 854         j++;
 855       } else {
 856         // Value type arguments are not passed by reference: we get an
 857         // argument per field of the value type. Build ValueTypeNodes
 858         // from the value type arguments.
 859         const Type* t = tf->domain_sig()->field_at(i);
 860         if (t->isa_valuetypeptr() && !t->is_valuetypeptr()->is__Value()) {
 861           ciValueKlass* vk = t->is_valuetypeptr()->value_type()->value_klass();
 862           Node* ctl = map->control();
 863           Node* vt = ValueTypeNode::make(gvn, ctl, map->memory(), start, vk, j, true);
 864           map->set_control(ctl);
 865           map->init_req(i, gvn.transform(vt));
 866           j += vk->value_arg_slots();
 867         } else {
 868           Node* parm = gvn.transform(new ParmNode(start, j));
 869           map->init_req(i, parm);
 870           // Record all these guys for later GVN.
 871           record_for_igvn(parm);
 872           j++;
 873         }
 874       }
 875     } else {
 876       Node* parm = gvn.transform(new ParmNode(start, i));
 877       // Check if parameter is a value type pointer
 878       if (gvn.type(parm)->isa_valuetypeptr()) {
 879         // Create ValueTypeNode from the oop and replace the parameter
 880         Node* ctl = map->control();
 881         parm = ValueTypeNode::make(gvn, ctl, map->memory(), parm);
 882         map->set_control(ctl);
 883       }
 884       map->init_req(i, parm);
 885       // Record all these guys for later GVN.
 886       record_for_igvn(parm);
 887       j++;
 888     }
 889   }
 890   for (; j < map->req(); j++) {
 891     map->init_req(j, top());
 892   }
 893   assert(jvms->argoff() == TypeFunc::Parms, "parser gets arguments here");
 894   set_default_node_notes(old_nn);
 895   map->set_jvms(jvms);
 896   jvms->set_map(map);
 897   return jvms;
 898 }
 899 
 900 //-----------------------------make_node_notes---------------------------------
 901 Node_Notes* Parse::make_node_notes(Node_Notes* caller_nn) {




 108   BasicType bt = type->basic_type();
 109   if (type == TypePtr::NULL_PTR) {
 110     // Ptr types are mixed together with T_ADDRESS but NULL is
 111     // really for T_OBJECT types so correct it.
 112     bt = T_OBJECT;
 113   }
 114   Node *mem = memory(Compile::AliasIdxRaw);
 115   Node *adr = basic_plus_adr( local_addrs_base, local_addrs, -index*wordSize );
 116   Node *ctl = control();
 117 
 118   // Very similar to LoadNode::make, except we handle un-aligned longs and
 119   // doubles on Sparc.  Intel can handle them just fine directly.
 120   Node *l = NULL;
 121   switch (bt) {                // Signature is flattened
 122   case T_INT:     l = new LoadINode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInt::INT,        MemNode::unordered); break;
 123   case T_FLOAT:   l = new LoadFNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::FLOAT,         MemNode::unordered); break;
 124   case T_ADDRESS: l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM,  MemNode::unordered); break;
 125   case T_OBJECT:  l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInstPtr::BOTTOM, MemNode::unordered); break;
 126   case T_VALUETYPE: {
 127     // Load oop and create a new ValueTypeNode
 128     const TypeValueTypePtr* vtptr_type = TypeValueTypePtr::make(TypePtr::NotNull, type->is_valuetype()->value_klass());
 129     l = _gvn.transform(new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, vtptr_type, MemNode::unordered));
 130     l = ValueTypeNode::make_from_oop(this, l);
 131     break;
 132   }
 133   case T_VALUETYPEPTR: {
 134     l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeValueTypePtr::NOTNULL, MemNode::unordered);
 135     break;
 136   }
 137   case T_LONG:
 138   case T_DOUBLE: {
 139     // Since arguments are in reverse order, the argument address 'adr'
 140     // refers to the back half of the long/double.  Recompute adr.
 141     adr = basic_plus_adr(local_addrs_base, local_addrs, -(index+1)*wordSize);
 142     if (Matcher::misaligned_doubles_ok) {
 143       l = (bt == T_DOUBLE)
 144         ? (Node*)new LoadDNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::DOUBLE, MemNode::unordered)
 145         : (Node*)new LoadLNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeLong::LONG, MemNode::unordered);
 146     } else {
 147       l = (bt == T_DOUBLE)
 148         ? (Node*)new LoadD_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered)
 149         : (Node*)new LoadL_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered);
 150     }


 791       if (ret_bt == T_BOOLEAN ||
 792           ret_bt == T_CHAR ||
 793           ret_bt == T_BYTE ||
 794           ret_bt == T_SHORT) {
 795         ret_type = TypeInt::INT;
 796       }
 797     }
 798 
 799     // Don't "bind" an unloaded return klass to the ret_phi. If the klass
 800     // becomes loaded during the subsequent parsing, the loaded and unloaded
 801     // types will not join when we transform and push in do_exits().
 802     const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
 803     if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
 804       ret_type = TypeOopPtr::BOTTOM;
 805     }
 806     if ((_caller->has_method() || tf()->returns_value_type_as_fields()) &&
 807         ret_type->isa_valuetypeptr() &&
 808         !ret_type->is_valuetypeptr()->is__Value()) {
 809       // When inlining or with multiple return values: return value
 810       // type as ValueTypeNode not as oop
 811       ret_type = TypeValueType::make(ret_type->is_valuetypeptr()->value_klass());
 812     }
 813     int         ret_size = type2size[ret_type->basic_type()];
 814     Node*       ret_phi  = new PhiNode(region, ret_type);
 815     gvn().set_type_bottom(ret_phi);
 816     _exits.ensure_stack(ret_size);
 817     assert((int)(tf()->range_sig()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
 818     assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
 819     _exits.set_argument(0, ret_phi);  // here is where the parser finds it
 820     // Note:  ret_phi is not yet pushed, until do_exits.
 821   }
 822 }
 823 
 824 //----------------------------build_start_state-------------------------------
 825 // Construct a state which contains only the incoming arguments from an
 826 // unknown caller.  The method & bci will be NULL & InvocationEntryBci.
 827 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
 828   int        arg_size_sig = tf->domain_sig()->cnt();
 829   int        max_size = MAX2(arg_size_sig, (int)tf->range_cc()->cnt());
 830   JVMState*  jvms     = new (this) JVMState(max_size - TypeFunc::Parms);
 831   SafePointNode* map  = new SafePointNode(max_size, NULL);


 841     set_default_node_notes(entry_nn);
 842   }
 843   PhaseGVN& gvn = *initial_gvn();
 844   uint j = 0;
 845   for (uint i = 0; i < (uint)arg_size_sig; i++) {
 846     assert(j >= i, "less actual arguments than in the signature?");
 847     if (ValueTypePassFieldsAsArgs) {
 848       if (i < TypeFunc::Parms) {
 849         assert(i == j, "no change before the actual arguments");
 850         Node* parm = gvn.transform(new ParmNode(start, i));
 851         map->init_req(i, parm);
 852         // Record all these guys for later GVN.
 853         record_for_igvn(parm);
 854         j++;
 855       } else {
 856         // Value type arguments are not passed by reference: we get an
 857         // argument per field of the value type. Build ValueTypeNodes
 858         // from the value type arguments.
 859         const Type* t = tf->domain_sig()->field_at(i);
 860         if (t->isa_valuetypeptr() && !t->is_valuetypeptr()->is__Value()) {
 861           ciValueKlass* vk = t->is_valuetypeptr()->value_klass();
 862           Node* ctl = map->control();
 863           ValueTypeNode* vt = ValueTypeNode::make_from_multi(gvn, ctl, map->memory(), start, vk, j, true);
 864           map->set_control(ctl);
 865           map->init_req(i, vt);
 866           j += vk->value_arg_slots();
 867         } else {
 868           Node* parm = gvn.transform(new ParmNode(start, j));
 869           map->init_req(i, parm);
 870           // Record all these guys for later GVN.
 871           record_for_igvn(parm);
 872           j++;
 873         }
 874       }
 875     } else {
 876       Node* parm = gvn.transform(new ParmNode(start, i));
 877       // Check if parameter is a value type pointer
 878       if (gvn.type(parm)->isa_valuetypeptr()) {
 879         // Create ValueTypeNode from the oop and replace the parameter
 880         Node* ctl = map->control();
 881         parm = ValueTypeNode::make_from_oop(gvn, ctl, map->memory(), parm);
 882         map->set_control(ctl);
 883       }
 884       map->init_req(i, parm);
 885       // Record all these guys for later GVN.
 886       record_for_igvn(parm);
 887       j++;
 888     }
 889   }
 890   for (; j < map->req(); j++) {
 891     map->init_req(j, top());
 892   }
 893   assert(jvms->argoff() == TypeFunc::Parms, "parser gets arguments here");
 894   set_default_node_notes(old_nn);
 895   map->set_jvms(jvms);
 896   jvms->set_map(map);
 897   return jvms;
 898 }
 899 
 900 //-----------------------------make_node_notes---------------------------------
 901 Node_Notes* Parse::make_node_notes(Node_Notes* caller_nn) {


< prev index next >