764 // types will not join when we transform and push in do_exits().
765 const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
766 if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
767 ret_type = TypeOopPtr::BOTTOM;
768 }
769 if (_caller->has_method() && ret_type->isa_valuetypeptr()) {
770 // When inlining, return value type as ValueTypeNode not as oop
771 ret_type = ret_type->is_valuetypeptr()->value_type();
772 }
773 int ret_size = type2size[ret_type->basic_type()];
774 Node* ret_phi = new PhiNode(region, ret_type);
775 gvn().set_type_bottom(ret_phi);
776 _exits.ensure_stack(ret_size);
777 assert((int)(tf()->range()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
778 assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
779 _exits.set_argument(0, ret_phi); // here is where the parser finds it
780 // Note: ret_phi is not yet pushed, until do_exits.
781 }
782 }
783
784
785 //----------------------------build_start_state-------------------------------
786 // Construct a state which contains only the incoming arguments from an
787 // unknown caller. The method & bci will be NULL & InvocationEntryBci.
788 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
789 int arg_size = tf->domain()->cnt();
790 int max_size = MAX2(arg_size, (int)tf->range()->cnt());
791 JVMState* jvms = new (this) JVMState(max_size - TypeFunc::Parms);
792 SafePointNode* map = new SafePointNode(max_size, NULL);
793 record_for_igvn(map);
794 assert(arg_size == TypeFunc::Parms + (is_osr_compilation() ? 1 : method()->arg_size()), "correct arg_size");
795 Node_Notes* old_nn = default_node_notes();
796 if (old_nn != NULL && has_method()) {
797 Node_Notes* entry_nn = old_nn->clone(this);
798 JVMState* entry_jvms = new(this) JVMState(method(), old_nn->jvms());
799 entry_jvms->set_offsets(0);
800 entry_jvms->set_bci(entry_bci());
801 entry_nn->set_jvms(entry_jvms);
802 set_default_node_notes(entry_nn);
803 }
804 uint i;
805 for (i = 0; i < (uint)arg_size; i++) {
806 PhaseGVN& gvn = *initial_gvn();
807 Node* parm = gvn.transform(new ParmNode(start, i));
808 // Check if parameter is a value type pointer
809 if (gvn.type(parm)->isa_valuetypeptr()) {
810 // Create ValueTypeNode from the oop and replace the parameter
811 parm = ValueTypeNode::make(gvn, map->memory(), parm);
812 }
813 map->init_req(i, parm);
814 // Record all these guys for later GVN.
815 record_for_igvn(parm);
816 }
817 for (; i < map->req(); i++) {
818 map->init_req(i, top());
819 }
820 assert(jvms->argoff() == TypeFunc::Parms, "parser gets arguments here");
821 set_default_node_notes(old_nn);
822 map->set_jvms(jvms);
823 jvms->set_map(map);
824 return jvms;
825 }
826
827 //-----------------------------make_node_notes---------------------------------
828 Node_Notes* Parse::make_node_notes(Node_Notes* caller_nn) {
829 if (caller_nn == NULL) return NULL;
830 Node_Notes* nn = caller_nn->clone(C);
831 JVMState* caller_jvms = nn->jvms();
832 JVMState* jvms = new (C) JVMState(method(), caller_jvms);
833 jvms->set_offsets(0);
834 jvms->set_bci(_entry_bci);
835 nn->set_jvms(jvms);
836 return nn;
837 }
838
1137 SafePointNode* inmap = _caller->map();
1138 assert(inmap != NULL, "must have inmap");
1139 // In case of null check on receiver above
1140 map()->transfer_replaced_nodes_from(inmap, _new_idx);
1141
1142 uint i;
1143
1144 // Pass thru the predefined input parameters.
1145 for (i = 0; i < TypeFunc::Parms; i++) {
1146 map()->init_req(i, inmap->in(i));
1147 }
1148
1149 if (depth() == 1) {
1150 assert(map()->memory()->Opcode() == Op_Parm, "");
1151 // Insert the memory aliasing node
1152 set_all_memory(reset_memory());
1153 }
1154 assert(merged_memory(), "");
1155
1156 // Now add the locals which are initially bound to arguments:
1157 uint arg_size = tf()->domain()->cnt();
1158 ensure_stack(arg_size - TypeFunc::Parms); // OSR methods have funny args
1159 for (i = TypeFunc::Parms; i < arg_size; i++) {
1160 map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));
1161 }
1162
1163 // Clear out the rest of the map (locals and stack)
1164 for (i = arg_size; i < len; i++) {
1165 map()->init_req(i, top());
1166 }
1167
1168 SafePointNode* entry_map = stop();
1169 return entry_map;
1170 }
1171
1172 //-----------------------------do_method_entry--------------------------------
1173 // Emit any code needed in the pseudo-block before BCI zero.
1174 // The main thing to do is lock the receiver of a synchronized method.
1175 void Parse::do_method_entry() {
1176 set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1177 set_sp(0); // Java Stack Pointer
|
764 // types will not join when we transform and push in do_exits().
765 const TypeOopPtr* ret_oop_type = ret_type->isa_oopptr();
766 if (ret_oop_type && !ret_oop_type->klass()->is_loaded()) {
767 ret_type = TypeOopPtr::BOTTOM;
768 }
769 if (_caller->has_method() && ret_type->isa_valuetypeptr()) {
770 // When inlining, return value type as ValueTypeNode not as oop
771 ret_type = ret_type->is_valuetypeptr()->value_type();
772 }
773 int ret_size = type2size[ret_type->basic_type()];
774 Node* ret_phi = new PhiNode(region, ret_type);
775 gvn().set_type_bottom(ret_phi);
776 _exits.ensure_stack(ret_size);
777 assert((int)(tf()->range()->cnt() - TypeFunc::Parms) == ret_size, "good tf range");
778 assert(method()->return_type()->size() == ret_size, "tf agrees w/ method");
779 _exits.set_argument(0, ret_phi); // here is where the parser finds it
780 // Note: ret_phi is not yet pushed, until do_exits.
781 }
782 }
783
784 // Helper function to create a ValueTypeNode from its fields passed as
785 // arguments. Fields are passed in order of increasing offsets.
786 static Node* create_vt_node(StartNode* start, ciValueKlass* vk, ciValueKlass* base_vk, int base_offset, int base_input, Compile* C) {
787 assert(base_offset >= 0, "");
788 PhaseGVN& gvn = *C->initial_gvn();
789 ValueTypeNode* vt = ValueTypeNode::make(gvn, vk);
790 for (uint i = 0; i < vt->field_count(); i++) {
791 ciType* field_type = vt->get_field_type(i);
792 int offset = base_offset + vt->get_field_offset(i) - (base_offset > 0 ? vk->get_first_field_offset() : 0);
793 if (field_type->is_valuetype()) {
794 ciValueKlass* embedded_vk = field_type->as_value_klass();
795 Node* embedded_vt = create_vt_node(start, embedded_vk, base_vk, offset, base_input, C);
796 vt->set_field_value(i, embedded_vt);
797 } else {
798 int j = 0; int extra = 0;
799 for (; j < base_vk->nof_nonstatic_fields(); j++) {
800 ciField* f = base_vk->nonstatic_field_at(j);
801 if (offset == f->offset()) {
802 assert(f->type() == field_type, "");
803 break;
804 }
805 BasicType bt = f->type()->basic_type();
806 if (bt == T_LONG || bt == T_DOUBLE) {
807 extra++;
808 }
809 }
810 assert(j != base_vk->nof_nonstatic_fields(), "must find");
811 Node* parm = gvn.transform(new ParmNode(start, base_input + j + extra));
812 vt->set_field_value(i, parm);
813 // Record all these guys for later GVN.
814 C->record_for_igvn(parm);
815 }
816 }
817 return gvn.transform(vt);
818 }
819
820 //----------------------------build_start_state-------------------------------
821 // Construct a state which contains only the incoming arguments from an
822 // unknown caller. The method & bci will be NULL & InvocationEntryBci.
823 JVMState* Compile::build_start_state(StartNode* start, const TypeFunc* tf) {
824 int arg_size_sig = tf->domain_sig()->cnt();
825 int max_size = MAX2(arg_size_sig, (int)tf->range()->cnt());
826 JVMState* jvms = new (this) JVMState(max_size - TypeFunc::Parms);
827 SafePointNode* map = new SafePointNode(max_size, NULL);
828 record_for_igvn(map);
829 assert(arg_size_sig == TypeFunc::Parms + (is_osr_compilation() ? 1 : method()->arg_size()), "correct arg_size");
830 Node_Notes* old_nn = default_node_notes();
831 if (old_nn != NULL && has_method()) {
832 Node_Notes* entry_nn = old_nn->clone(this);
833 JVMState* entry_jvms = new(this) JVMState(method(), old_nn->jvms());
834 entry_jvms->set_offsets(0);
835 entry_jvms->set_bci(entry_bci());
836 entry_nn->set_jvms(entry_jvms);
837 set_default_node_notes(entry_nn);
838 }
839 PhaseGVN& gvn = *initial_gvn();
840 uint j = 0;
841 for (uint i = 0; i < (uint)arg_size_sig; i++) {
842 assert(j >= i, "");
843 if (ValueTypePassFieldsAsArgs) {
844 if (i < TypeFunc::Parms) {
845 assert(i == j, "");
846 Node* parm = gvn.transform(new ParmNode(start, i));
847 map->init_req(i, parm);
848 // Record all these guys for later GVN.
849 record_for_igvn(parm);
850 j++;
851 } else {
852 // Value type arguments are not passed by reference: we get an
853 // argument per field of the value type. Build ValueTypeNodes
854 // from the value type arguments.
855 const Type* t = tf->domain_sig()->field_at(i);
856 if (t->isa_valuetypeptr()) {
857 ciValueKlass* vk = t->is_valuetypeptr()->value_type()->value_klass();
858 Node* vt = create_vt_node(start, vk, vk, 0, j, C);
859 map->init_req(i, gvn.transform(vt));
860 int extra = 0;
861 for (int k = 0; k < vk->nof_nonstatic_fields(); k++) {
862 ciField* f = vk->nonstatic_field_at(k);
863 BasicType bt = f->type()->basic_type();
864 if (bt == T_LONG || bt == T_DOUBLE) {
865 extra++;
866 }
867 }
868 j += extra + vk->nof_nonstatic_fields();
869 } else {
870 Node* parm = gvn.transform(new ParmNode(start, j));
871 map->init_req(i, parm);
872 // Record all these guys for later GVN.
873 record_for_igvn(parm);
874 j++;
875 }
876 }
877 } else {
878 Node* parm = gvn.transform(new ParmNode(start, i));
879 // Check if parameter is a value type pointer
880 if (gvn.type(parm)->isa_valuetypeptr()) {
881 // Create ValueTypeNode from the oop and replace the parameter
882 parm = ValueTypeNode::make(gvn, map->memory(), parm);
883 }
884 map->init_req(i, parm);
885 // Record all these guys for later GVN.
886 record_for_igvn(parm);
887 j++;
888 }
889 }
890 for (; j < map->req(); j++) {
891 map->init_req(j, top());
892 }
893 assert(jvms->argoff() == TypeFunc::Parms, "parser gets arguments here");
894 set_default_node_notes(old_nn);
895 map->set_jvms(jvms);
896 jvms->set_map(map);
897 return jvms;
898 }
899
900 //-----------------------------make_node_notes---------------------------------
901 Node_Notes* Parse::make_node_notes(Node_Notes* caller_nn) {
902 if (caller_nn == NULL) return NULL;
903 Node_Notes* nn = caller_nn->clone(C);
904 JVMState* caller_jvms = nn->jvms();
905 JVMState* jvms = new (C) JVMState(method(), caller_jvms);
906 jvms->set_offsets(0);
907 jvms->set_bci(_entry_bci);
908 nn->set_jvms(jvms);
909 return nn;
910 }
911
1210 SafePointNode* inmap = _caller->map();
1211 assert(inmap != NULL, "must have inmap");
1212 // In case of null check on receiver above
1213 map()->transfer_replaced_nodes_from(inmap, _new_idx);
1214
1215 uint i;
1216
1217 // Pass thru the predefined input parameters.
1218 for (i = 0; i < TypeFunc::Parms; i++) {
1219 map()->init_req(i, inmap->in(i));
1220 }
1221
1222 if (depth() == 1) {
1223 assert(map()->memory()->Opcode() == Op_Parm, "");
1224 // Insert the memory aliasing node
1225 set_all_memory(reset_memory());
1226 }
1227 assert(merged_memory(), "");
1228
1229 // Now add the locals which are initially bound to arguments:
1230 uint arg_size = tf()->domain_sig()->cnt();
1231 ensure_stack(arg_size - TypeFunc::Parms); // OSR methods have funny args
1232 for (i = TypeFunc::Parms; i < arg_size; i++) {
1233 map()->init_req(i, inmap->argument(_caller, i - TypeFunc::Parms));
1234 }
1235
1236 // Clear out the rest of the map (locals and stack)
1237 for (i = arg_size; i < len; i++) {
1238 map()->init_req(i, top());
1239 }
1240
1241 SafePointNode* entry_map = stop();
1242 return entry_map;
1243 }
1244
1245 //-----------------------------do_method_entry--------------------------------
1246 // Emit any code needed in the pseudo-block before BCI zero.
1247 // The main thing to do is lock the receiver of a synchronized method.
1248 void Parse::do_method_entry() {
1249 set_parse_bci(InvocationEntryBci); // Pseudo-BCP
1250 set_sp(0); // Java Stack Pointer
|