< prev index next >

src/share/vm/opto/parse1.cpp

Print this page
rev 10494 : more
rev 10496 : more
rev 10512 : value type calling convention
rev 10513 : rebase


 771     // types will not join when we transform and push in do_exits().
 772     const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
 773     if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
 774       ret_type = TypeOopPtr::BOTTOM;
 775     }
 776     if (_caller->has_method() && ret_type->isa_valuetypeptr()) {
 777       // When inlining, return value type as ValueTypeNode not as oop
 778       ret_type = ret_type->is_valuetypeptr()->value_type();
 779     }
 780     int         ret_size = type2size[ret_type->basic_type()];
 781     Node*       ret_phi  = new PhiNode(region, ret_type);
 782     gvn().set_type_bottom(ret_phi);
 783     _exits.ensure_stack(ret_size);
 784     assert((int)(tf()->range()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
 785     assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
 786     _exits.set_argument(0, ret_phi);  // here is where the parser finds it
 787     // Note:  ret_phi is not yet pushed, until do_exits.
 788   }
 789 }
 790 



































 791 
 792 //----------------------------build_start_state-------------------------------
 793 // Construct a state which contains only the incoming arguments from an
 794 // unknown caller.  The method & bci will be NULL & InvocationEntryBci.
 795 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
 796   int        arg_size = tf->domain()->cnt();
 797   int        max_size = MAX2(arg_size, (int)tf->range()->cnt());
 798   JVMState*  jvms     = new (this) JVMState(max_size - TypeFunc::Parms);
 799   SafePointNode* map  = new SafePointNode(max_size, NULL);
 800   record_for_igvn(map);
 801   assert(arg_size == TypeFunc::Parms + (is_osr_compilation() ? 1 : method()->arg_size()), "correct arg_size");
 802   Node_Notes* old_nn = default_node_notes();
 803   if (old_nn != NULL && has_method()) {
 804     Node_Notes* entry_nn = old_nn->clone(this);
 805     JVMState* entry_jvms = new(this) JVMState(method(), old_nn->jvms());
 806     entry_jvms->set_offsets(0);
 807     entry_jvms->set_bci(entry_bci());
 808     entry_nn->set_jvms(entry_jvms);
 809     set_default_node_notes(entry_nn);
 810   }
 811   uint i;
 812   for (i = 0; i < (uint)arg_size; i++) {
 813     PhaseGVN& gvn = *initial_gvn();






































 814     Node* parm = gvn.transform(new ParmNode(start, i));
 815     // Check if parameter is a value type pointer
 816     if (gvn.type(parm)->isa_valuetypeptr()) {
 817       // Create ValueTypeNode from the oop and replace the parameter
 818       parm = ValueTypeNode::make(gvn, map->memory(), parm);
 819     }
 820     map->init_req(i, parm);
 821     // Record all these guys for later GVN.
 822     record_for_igvn(parm);


 823   }
 824   for (; i < map->req(); i++) {
 825     map->init_req(i, top());
 826   }
 827   assert(jvms->argoff() == TypeFunc::Parms, "parser gets arguments here");
 828   set_default_node_notes(old_nn);
 829   map->set_jvms(jvms);
 830   jvms->set_map(map);
 831   return jvms;
 832 }
 833 
 834 //-----------------------------make_node_notes---------------------------------
 835 Node_Notes* Parse::make_node_notes(Node_Notes* caller_nn) {
 836   if (caller_nn == NULL)  return NULL;
 837   Node_Notes* nn = caller_nn->clone(C);
 838   JVMState* caller_jvms = nn->jvms();
 839   JVMState* jvms = new (C) JVMState(method(), caller_jvms);
 840   jvms->set_offsets(0);
 841   jvms->set_bci(_entry_bci);
 842   nn->set_jvms(jvms);
 843   return nn;
 844 }
 845 


1144   SafePointNode* inmap = _caller->map();
1145   assert(inmap != NULL, "must have inmap");
1146   // In case of null check on receiver above
1147   map()->transfer_replaced_nodes_from(inmap, _new_idx);
1148 
1149   uint i;
1150 
1151   // Pass thru the predefined input parameters.
1152   for (i = 0; i < TypeFunc::Parms; i++) {
1153     map()->init_req(i, inmap->in(i));
1154   }
1155 
1156   if (depth() == 1) {
1157     assert(map()->memory()->Opcode() == Op_Parm, "");
1158     // Insert the memory aliasing node
1159     set_all_memory(reset_memory());
1160   }
1161   assert(merged_memory(), "");
1162 
1163   // Now add the locals which are initially bound to arguments:
1164   uint arg_size = tf()->domain()->cnt();
1165   ensure_stack(arg_size - TypeFunc::Parms);  // OSR methods have funny args
1166   for (i = TypeFunc::Parms; i < arg_size; i++) {
1167     map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));
1168   }
1169 
1170   // Clear out the rest of the map (locals and stack)
1171   for (i = arg_size; i < len; i++) {
1172     map()->init_req(i, top());
1173   }
1174 
1175   SafePointNode* entry_map = stop();
1176   return entry_map;
1177 }
1178 
1179 //-----------------------------do_method_entry--------------------------------
1180 // Emit any code needed in the pseudo-block before BCI zero.
1181 // The main thing to do is lock the receiver of a synchronized method.
1182 void Parse::do_method_entry() {
1183   set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1184   set_sp(0);                      // Java Stack Pointer




 771     // types will not join when we transform and push in do_exits().
 772     const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
 773     if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
 774       ret_type = TypeOopPtr::BOTTOM;
 775     }
 776     if (_caller->has_method() && ret_type->isa_valuetypeptr()) {
 777       // When inlining, return value type as ValueTypeNode not as oop
 778       ret_type = ret_type->is_valuetypeptr()->value_type();
 779     }
 780     int         ret_size = type2size[ret_type->basic_type()];
 781     Node*       ret_phi  = new PhiNode(region, ret_type);
 782     gvn().set_type_bottom(ret_phi);
 783     _exits.ensure_stack(ret_size);
 784     assert((int)(tf()->range()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
 785     assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
 786     _exits.set_argument(0, ret_phi);  // here is where the parser finds it
 787     // Note:  ret_phi is not yet pushed, until do_exits.
 788   }
 789 }
 790 
 791 // Helper function to create a ValueTypeNode from its fields passed as
 792 // arguments. Fields are passed in order of increasing offsets.
 793 static Node* create_vt_node(StartNode* start, ciValueKlass* vk, ciValueKlass* base_vk, int base_offset, int base_input, Compile* C) {
 794   assert(base_offset >= 0, "offset in value type always positive");
 795   PhaseGVN& gvn = *C->initial_gvn();
 796   ValueTypeNode* vt = ValueTypeNode::make(gvn, vk);
 797   for (uint i = 0; i < vt->field_count(); i++) {
 798     ciType* field_type = vt->get_field_type(i);
 799     int offset = base_offset + vt->get_field_offset(i) - (base_offset > 0 ? vk->first_field_offset() : 0);
 800     if (field_type->is_valuetype()) {
 801       ciValueKlass* embedded_vk = field_type->as_value_klass();
 802       Node* embedded_vt = create_vt_node(start, embedded_vk, base_vk, offset, base_input, C);
 803       vt->set_field_value(i, embedded_vt);
 804     } else {
 805       int j = 0; int extra = 0;
 806       for (; j < base_vk->nof_nonstatic_fields(); j++) {
 807         ciField* f = base_vk->nonstatic_field_at(j);
 808         if (offset == f->offset()) {
 809           assert(f->type() == field_type, "inconsistent field type");
 810           break;
 811         }
 812         BasicType bt = f->type()->basic_type();
 813         if (bt == T_LONG || bt == T_DOUBLE) {
 814           extra++;
 815         }
 816       }
 817       assert(j != base_vk->nof_nonstatic_fields(), "must find");
 818       Node* parm = gvn.transform(new ParmNode(start, base_input + j + extra));
 819       vt->set_field_value(i, parm);
 820       // Record all these guys for later GVN.
 821       C->record_for_igvn(parm);
 822     }
 823   }
 824   return gvn.transform(vt);
 825 }
 826 
 827 //----------------------------build_start_state-------------------------------
 828 // Construct a state which contains only the incoming arguments from an
 829 // unknown caller.  The method & bci will be NULL & InvocationEntryBci.
 830 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
 831   int        arg_size_sig = tf->domain_sig()->cnt();
 832   int        max_size = MAX2(arg_size_sig, (int)tf->range()->cnt());
 833   JVMState*  jvms     = new (this) JVMState(max_size - TypeFunc::Parms);
 834   SafePointNode* map  = new SafePointNode(max_size, NULL);
 835   record_for_igvn(map);
 836   assert(arg_size_sig == TypeFunc::Parms + (is_osr_compilation() ? 1 : method()->arg_size()), "correct arg_size");
 837   Node_Notes* old_nn = default_node_notes();
 838   if (old_nn != NULL && has_method()) {
 839     Node_Notes* entry_nn = old_nn->clone(this);
 840     JVMState* entry_jvms = new(this) JVMState(method(), old_nn->jvms());
 841     entry_jvms->set_offsets(0);
 842     entry_jvms->set_bci(entry_bci());
 843     entry_nn->set_jvms(entry_jvms);
 844     set_default_node_notes(entry_nn);
 845   }


 846   PhaseGVN& gvn = *initial_gvn();
 847   uint j = 0;
 848   for (uint i = 0; i < (uint)arg_size_sig; i++) {
 849     assert(j >= i, "less actual arguments than in the signature?");
 850     if (ValueTypePassFieldsAsArgs) {
 851       if (i < TypeFunc::Parms) {
 852         assert(i == j, "no change before the actual arguments");
 853         Node* parm = gvn.transform(new ParmNode(start, i));
 854         map->init_req(i, parm);
 855         // Record all these guys for later GVN.
 856         record_for_igvn(parm);
 857         j++;
 858       } else {
 859         // Value type arguments are not passed by reference: we get an
 860         // argument per field of the value type. Build ValueTypeNodes
 861         // from the value type arguments.
 862         const Type* t = tf->domain_sig()->field_at(i);
 863         if (t->isa_valuetypeptr()) {
 864           ciValueKlass* vk = t->is_valuetypeptr()->value_type()->value_klass();
 865           Node* vt = create_vt_node(start, vk, vk, 0, j, C);
 866           map->init_req(i, gvn.transform(vt));
 867           int extra = 0;
 868           for (int k = 0; k < vk->nof_nonstatic_fields(); k++) {
 869             ciField* f = vk->nonstatic_field_at(k);
 870             BasicType bt = f->type()->basic_type();
 871             if (bt == T_LONG || bt == T_DOUBLE) {
 872               extra++;
 873             }
 874           }
 875           j += extra + vk->nof_nonstatic_fields();
 876         } else {
 877           Node* parm = gvn.transform(new ParmNode(start, j));
 878           map->init_req(i, parm);
 879           // Record all these guys for later GVN.
 880           record_for_igvn(parm);
 881           j++;
 882         }
 883       }
 884     } else {
 885      Node* parm = gvn.transform(new ParmNode(start, i));
 886      // Check if parameter is a value type pointer
 887      if (gvn.type(parm)->isa_valuetypeptr()) {
 888        // Create ValueTypeNode from the oop and replace the parameter
 889        parm = ValueTypeNode::make(gvn, map->memory(), parm);
 890      }
 891      map->init_req(i, parm);
 892      // Record all these guys for later GVN.
 893      record_for_igvn(parm);
 894      j++;
 895     }
 896   }
 897   for (; j < map->req(); j++) {
 898     map->init_req(j, top());
 899   }
 900   assert(jvms->argoff() == TypeFunc::Parms, "parser gets arguments here");
 901   set_default_node_notes(old_nn);
 902   map->set_jvms(jvms);
 903   jvms->set_map(map);
 904   return jvms;
 905 }
 906 
 907 //-----------------------------make_node_notes---------------------------------
 908 Node_Notes* Parse::make_node_notes(Node_Notes* caller_nn) {
 909   if (caller_nn == NULL)  return NULL;
 910   Node_Notes* nn = caller_nn->clone(C);
 911   JVMState* caller_jvms = nn->jvms();
 912   JVMState* jvms = new (C) JVMState(method(), caller_jvms);
 913   jvms->set_offsets(0);
 914   jvms->set_bci(_entry_bci);
 915   nn->set_jvms(jvms);
 916   return nn;
 917 }
 918 


1217   SafePointNode* inmap = _caller->map();
1218   assert(inmap != NULL, "must have inmap");
1219   // In case of null check on receiver above
1220   map()->transfer_replaced_nodes_from(inmap, _new_idx);
1221 
1222   uint i;
1223 
1224   // Pass thru the predefined input parameters.
1225   for (i = 0; i < TypeFunc::Parms; i++) {
1226     map()->init_req(i, inmap->in(i));
1227   }
1228 
1229   if (depth() == 1) {
1230     assert(map()->memory()->Opcode() == Op_Parm, "");
1231     // Insert the memory aliasing node
1232     set_all_memory(reset_memory());
1233   }
1234   assert(merged_memory(), "");
1235 
1236   // Now add the locals which are initially bound to arguments:
1237   uint arg_size = tf()->domain_sig()->cnt();
1238   ensure_stack(arg_size - TypeFunc::Parms);  // OSR methods have funny args
1239   for (i = TypeFunc::Parms; i < arg_size; i++) {
1240     map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));
1241   }
1242 
1243   // Clear out the rest of the map (locals and stack)
1244   for (i = arg_size; i < len; i++) {
1245     map()->init_req(i, top());
1246   }
1247 
1248   SafePointNode* entry_map = stop();
1249   return entry_map;
1250 }
1251 
1252 //-----------------------------do_method_entry--------------------------------
1253 // Emit any code needed in the pseudo-block before BCI zero.
1254 // The main thing to do is lock the receiver of a synchronized method.
1255 void Parse::do_method_entry() {
1256   set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1257   set_sp(0);                      // Java Stack Pointer


< prev index next >