579 int lsize = MAX_locs_size / 3;
580 buf.consts()->initialize_shared_locs(&locs_buf[lsize * 0], lsize);
581 buf.insts()->initialize_shared_locs( &locs_buf[lsize * 1], lsize);
582 buf.stubs()->initialize_shared_locs( &locs_buf[lsize * 2], lsize);
583
584 // Do the emission.
585
586 Label fakeL; // Fake label for branch instructions.
587 Label* saveL = NULL;
588 uint save_bnum = 0;
589 bool is_branch = n->is_MachBranch();
590 if (is_branch) {
591 MacroAssembler masm(&buf);
592 masm.bind(fakeL);
593 n->as_MachBranch()->save_label(&saveL, &save_bnum);
594 n->as_MachBranch()->label_set(&fakeL, 0);
595 }
596 n->emit(buf, this->regalloc());
597
598 // Emitting into the scratch buffer should not fail
599 assert (!failing(), err_msg_res("Must not have pending failure. Reason is: %s", failure_reason()));
600
601 if (is_branch) // Restore label.
602 n->as_MachBranch()->label_set(saveL, save_bnum);
603
604 // End scratch_emit_size section.
605 set_in_scratch_emit_size(false);
606
607 return buf.insts_size();
608 }
609
610
611 // ============================================================================
612 //------------------------------Compile standard-------------------------------
613 debug_only( int Compile::_debug_idx = 100000; )
614
615 // Compile a method. entry_bci is -1 for normal compilations and indicates
616 // the continuation bci for on stack replacement.
617
618
619 Compile::Compile( ciEnv* ci_env, C2Compiler* compiler, ciMethod* target, int osr_bci,
1172 _macro_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8, 0, NULL);
1173 _predicate_opaqs = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8, 0, NULL);
1174 _expensive_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8, 0, NULL);
1175 register_library_intrinsics();
1176 }
1177
1178 //---------------------------init_start----------------------------------------
1179 // Install the StartNode on this compile object.
1180 void Compile::init_start(StartNode* s) {
1181 if (failing())
1182 return; // already failing
1183 assert(s == start(), "");
1184 }
1185
1186 /**
1187 * Return the 'StartNode'. We must not have a pending failure, since the ideal graph
1188 * can be in an inconsistent state, i.e., we can get segmentation faults when traversing
1189 * the ideal graph.
1190 */
1191 StartNode* Compile::start() const {
1192 assert (!failing(), err_msg_res("Must not have pending failure. Reason is: %s", failure_reason()));
1193 for (DUIterator_Fast imax, i = root()->fast_outs(imax); i < imax; i++) {
1194 Node* start = root()->fast_out(i);
1195 if (start->is_Start()) {
1196 return start->as_Start();
1197 }
1198 }
1199 fatal("Did not find Start node!");
1200 return NULL;
1201 }
1202
1203 //-------------------------------immutable_memory-------------------------------------
1204 // Access immutable memory
1205 Node* Compile::immutable_memory() {
1206 if (_immutable_memory != NULL) {
1207 return _immutable_memory;
1208 }
1209 StartNode* s = start();
1210 for (DUIterator_Fast imax, i = s->fast_outs(imax); true; i++) {
1211 Node *p = s->fast_out(i);
1212 if (p != s && p->as_Proj()->_con == TypeFunc::Memory) {
3779 // filled in later in fill_jump_table.
3780 address dummy = (address) n;
3781 constant_addr = _masm.address_constant(dummy);
3782 // Expand jump-table
3783 for (uint i = 1; i < n->outcnt(); i++) {
3784 address temp_addr = _masm.address_constant(dummy + i);
3785 assert(temp_addr, "consts section too small");
3786 }
3787 break;
3788 }
3789 case T_METADATA: {
3790 Metadata* obj = con.get_metadata();
3791 int metadata_index = _masm.oop_recorder()->find_index(obj);
3792 constant_addr = _masm.address_constant((address) obj, metadata_Relocation::spec(metadata_index));
3793 break;
3794 }
3795 default: ShouldNotReachHere();
3796 }
3797 assert(constant_addr, "consts section too small");
3798 assert((constant_addr - _masm.code()->consts()->start()) == con.offset(),
3799 err_msg_res("must be: %d == %d", (int) (constant_addr - _masm.code()->consts()->start()), (int)(con.offset())));
3800 }
3801 }
3802
3803 int Compile::ConstantTable::find_offset(Constant& con) const {
3804 int idx = _constants.find(con);
3805 assert(idx != -1, "constant must be in constant table");
3806 int offset = _constants.at(idx).offset();
3807 assert(offset != -1, "constant table not emitted yet?");
3808 return offset;
3809 }
3810
3811 void Compile::ConstantTable::add(Constant& con) {
3812 if (con.can_be_reused()) {
3813 int idx = _constants.find(con);
3814 if (idx != -1 && _constants.at(idx).can_be_reused()) {
3815 _constants.adr_at(idx)->inc_freq(con.freq()); // increase the frequency by the current value
3816 return;
3817 }
3818 }
3819 (void) _constants.append(con);
3825 add(con);
3826 return con;
3827 }
3828
3829 Compile::Constant Compile::ConstantTable::add(Metadata* metadata) {
3830 Constant con(metadata);
3831 add(con);
3832 return con;
3833 }
3834
3835 Compile::Constant Compile::ConstantTable::add(MachConstantNode* n, MachOper* oper) {
3836 jvalue value;
3837 BasicType type = oper->type()->basic_type();
3838 switch (type) {
3839 case T_LONG: value.j = oper->constantL(); break;
3840 case T_FLOAT: value.f = oper->constantF(); break;
3841 case T_DOUBLE: value.d = oper->constantD(); break;
3842 case T_OBJECT:
3843 case T_ADDRESS: value.l = (jobject) oper->constant(); break;
3844 case T_METADATA: return add((Metadata*)oper->constant()); break;
3845 default: guarantee(false, err_msg_res("unhandled type: %s", type2name(type)));
3846 }
3847 return add(n, type, value);
3848 }
3849
3850 Compile::Constant Compile::ConstantTable::add_jump_table(MachConstantNode* n) {
3851 jvalue value;
3852 // We can use the node pointer here to identify the right jump-table
3853 // as this method is called from Compile::Fill_buffer right before
3854 // the MachNodes are emitted and the jump-table is filled (means the
3855 // MachNode pointers do not change anymore).
3856 value.l = (jobject) n;
3857 Constant con(T_VOID, value, next_jump_table_freq(), false); // Labels of a jump-table cannot be reused.
3858 add(con);
3859 return con;
3860 }
3861
3862 void Compile::ConstantTable::fill_jump_table(CodeBuffer& cb, MachConstantNode* n, GrowableArray<Label*> labels) const {
3863 // If called from Compile::scratch_emit_size do nothing.
3864 if (Compile::current()->in_scratch_emit_size()) return;
3865
3866 assert(labels.is_nonempty(), "must be");
3867 assert((uint) labels.length() == n->outcnt(), err_msg_res("must be equal: %d == %d", labels.length(), n->outcnt()));
3868
3869 // Since MachConstantNode::constant_offset() also contains
3870 // table_base_offset() we need to subtract the table_base_offset()
3871 // to get the plain offset into the constant table.
3872 int offset = n->constant_offset() - table_base_offset();
3873
3874 MacroAssembler _masm(&cb);
3875 address* jump_table_base = (address*) (_masm.code()->consts()->start() + offset);
3876
3877 for (uint i = 0; i < n->outcnt(); i++) {
3878 address* constant_addr = &jump_table_base[i];
3879 assert(*constant_addr == (((address) n) + i), err_msg_res("all jump-table entries must contain adjusted node pointer: " INTPTR_FORMAT " == " INTPTR_FORMAT, p2i(*constant_addr), p2i(((address) n) + i)));
3880 *constant_addr = cb.consts()->target(*labels.at(i), (address) constant_addr);
3881 cb.consts()->relocate((address) constant_addr, relocInfo::internal_word_type);
3882 }
3883 }
3884
3885 //----------------------------static_subtype_check-----------------------------
3886 // Shortcut important common cases when superklass is exact:
3887 // (0) superklass is java.lang.Object (can occur in reflective code)
3888 // (1) subklass is already limited to a subtype of superklass => always ok
3889 // (2) subklass does not overlap with superklass => always fail
3890 // (3) superklass has NO subtypes and we can check with a simple compare.
3891 int Compile::static_subtype_check(ciKlass* superk, ciKlass* subk) {
3892 if (StressReflectiveCode) {
3893 return SSC_full_test; // Let caller generate the general case.
3894 }
3895
3896 if (superk == env()->Object_klass()) {
3897 return SSC_always_true; // (0) this test cannot fail
3898 }
3899
4118 if (C->log() != NULL) {
4119 C->log()->inline_fail(msg);
4120 }
4121 }
4122
4123
4124 // Dump inlining replay data to the stream.
4125 // Don't change thread state and acquire any locks.
4126 void Compile::dump_inline_data(outputStream* out) {
4127 InlineTree* inl_tree = ilt();
4128 if (inl_tree != NULL) {
4129 out->print(" inline %d", inl_tree->count());
4130 inl_tree->dump_replay_data(out);
4131 }
4132 }
4133
4134 int Compile::cmp_expensive_nodes(Node* n1, Node* n2) {
4135 if (n1->Opcode() < n2->Opcode()) return -1;
4136 else if (n1->Opcode() > n2->Opcode()) return 1;
4137
4138 assert(n1->req() == n2->req(), err_msg_res("can't compare %s nodes: n1->req() = %d, n2->req() = %d", NodeClassNames[n1->Opcode()], n1->req(), n2->req()));
4139 for (uint i = 1; i < n1->req(); i++) {
4140 if (n1->in(i) < n2->in(i)) return -1;
4141 else if (n1->in(i) > n2->in(i)) return 1;
4142 }
4143
4144 return 0;
4145 }
4146
4147 int Compile::cmp_expensive_nodes(Node** n1p, Node** n2p) {
4148 Node* n1 = *n1p;
4149 Node* n2 = *n2p;
4150
4151 return cmp_expensive_nodes(n1, n2);
4152 }
4153
4154 void Compile::sort_expensive_nodes() {
4155 if (!expensive_nodes_sorted()) {
4156 _expensive_nodes->sort(cmp_expensive_nodes);
4157 }
4158 }
|
579 int lsize = MAX_locs_size / 3;
580 buf.consts()->initialize_shared_locs(&locs_buf[lsize * 0], lsize);
581 buf.insts()->initialize_shared_locs( &locs_buf[lsize * 1], lsize);
582 buf.stubs()->initialize_shared_locs( &locs_buf[lsize * 2], lsize);
583
584 // Do the emission.
585
586 Label fakeL; // Fake label for branch instructions.
587 Label* saveL = NULL;
588 uint save_bnum = 0;
589 bool is_branch = n->is_MachBranch();
590 if (is_branch) {
591 MacroAssembler masm(&buf);
592 masm.bind(fakeL);
593 n->as_MachBranch()->save_label(&saveL, &save_bnum);
594 n->as_MachBranch()->label_set(&fakeL, 0);
595 }
596 n->emit(buf, this->regalloc());
597
598 // Emitting into the scratch buffer should not fail
599 assert (!failing(), "Must not have pending failure. Reason is: %s", failure_reason());
600
601 if (is_branch) // Restore label.
602 n->as_MachBranch()->label_set(saveL, save_bnum);
603
604 // End scratch_emit_size section.
605 set_in_scratch_emit_size(false);
606
607 return buf.insts_size();
608 }
609
610
611 // ============================================================================
612 //------------------------------Compile standard-------------------------------
613 debug_only( int Compile::_debug_idx = 100000; )
614
615 // Compile a method. entry_bci is -1 for normal compilations and indicates
616 // the continuation bci for on stack replacement.
617
618
619 Compile::Compile( ciEnv* ci_env, C2Compiler* compiler, ciMethod* target, int osr_bci,
1172 _macro_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8, 0, NULL);
1173 _predicate_opaqs = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8, 0, NULL);
1174 _expensive_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8, 0, NULL);
1175 register_library_intrinsics();
1176 }
1177
1178 //---------------------------init_start----------------------------------------
1179 // Install the StartNode on this compile object.
1180 void Compile::init_start(StartNode* s) {
1181 if (failing())
1182 return; // already failing
1183 assert(s == start(), "");
1184 }
1185
1186 /**
1187 * Return the 'StartNode'. We must not have a pending failure, since the ideal graph
1188 * can be in an inconsistent state, i.e., we can get segmentation faults when traversing
1189 * the ideal graph.
1190 */
1191 StartNode* Compile::start() const {
1192 assert (!failing(), "Must not have pending failure. Reason is: %s", failure_reason());
1193 for (DUIterator_Fast imax, i = root()->fast_outs(imax); i < imax; i++) {
1194 Node* start = root()->fast_out(i);
1195 if (start->is_Start()) {
1196 return start->as_Start();
1197 }
1198 }
1199 fatal("Did not find Start node!");
1200 return NULL;
1201 }
1202
1203 //-------------------------------immutable_memory-------------------------------------
1204 // Access immutable memory
1205 Node* Compile::immutable_memory() {
1206 if (_immutable_memory != NULL) {
1207 return _immutable_memory;
1208 }
1209 StartNode* s = start();
1210 for (DUIterator_Fast imax, i = s->fast_outs(imax); true; i++) {
1211 Node *p = s->fast_out(i);
1212 if (p != s && p->as_Proj()->_con == TypeFunc::Memory) {
3779 // filled in later in fill_jump_table.
3780 address dummy = (address) n;
3781 constant_addr = _masm.address_constant(dummy);
3782 // Expand jump-table
3783 for (uint i = 1; i < n->outcnt(); i++) {
3784 address temp_addr = _masm.address_constant(dummy + i);
3785 assert(temp_addr, "consts section too small");
3786 }
3787 break;
3788 }
3789 case T_METADATA: {
3790 Metadata* obj = con.get_metadata();
3791 int metadata_index = _masm.oop_recorder()->find_index(obj);
3792 constant_addr = _masm.address_constant((address) obj, metadata_Relocation::spec(metadata_index));
3793 break;
3794 }
3795 default: ShouldNotReachHere();
3796 }
3797 assert(constant_addr, "consts section too small");
3798 assert((constant_addr - _masm.code()->consts()->start()) == con.offset(),
3799 "must be: %d == %d", (int) (constant_addr - _masm.code()->consts()->start()), (int)(con.offset()));
3800 }
3801 }
3802
3803 int Compile::ConstantTable::find_offset(Constant& con) const {
3804 int idx = _constants.find(con);
3805 assert(idx != -1, "constant must be in constant table");
3806 int offset = _constants.at(idx).offset();
3807 assert(offset != -1, "constant table not emitted yet?");
3808 return offset;
3809 }
3810
3811 void Compile::ConstantTable::add(Constant& con) {
3812 if (con.can_be_reused()) {
3813 int idx = _constants.find(con);
3814 if (idx != -1 && _constants.at(idx).can_be_reused()) {
3815 _constants.adr_at(idx)->inc_freq(con.freq()); // increase the frequency by the current value
3816 return;
3817 }
3818 }
3819 (void) _constants.append(con);
3825 add(con);
3826 return con;
3827 }
3828
3829 Compile::Constant Compile::ConstantTable::add(Metadata* metadata) {
3830 Constant con(metadata);
3831 add(con);
3832 return con;
3833 }
3834
3835 Compile::Constant Compile::ConstantTable::add(MachConstantNode* n, MachOper* oper) {
3836 jvalue value;
3837 BasicType type = oper->type()->basic_type();
3838 switch (type) {
3839 case T_LONG: value.j = oper->constantL(); break;
3840 case T_FLOAT: value.f = oper->constantF(); break;
3841 case T_DOUBLE: value.d = oper->constantD(); break;
3842 case T_OBJECT:
3843 case T_ADDRESS: value.l = (jobject) oper->constant(); break;
3844 case T_METADATA: return add((Metadata*)oper->constant()); break;
3845 default: guarantee(false, "unhandled type: %s", type2name(type));
3846 }
3847 return add(n, type, value);
3848 }
3849
3850 Compile::Constant Compile::ConstantTable::add_jump_table(MachConstantNode* n) {
3851 jvalue value;
3852 // We can use the node pointer here to identify the right jump-table
3853 // as this method is called from Compile::Fill_buffer right before
3854 // the MachNodes are emitted and the jump-table is filled (means the
3855 // MachNode pointers do not change anymore).
3856 value.l = (jobject) n;
3857 Constant con(T_VOID, value, next_jump_table_freq(), false); // Labels of a jump-table cannot be reused.
3858 add(con);
3859 return con;
3860 }
3861
3862 void Compile::ConstantTable::fill_jump_table(CodeBuffer& cb, MachConstantNode* n, GrowableArray<Label*> labels) const {
3863 // If called from Compile::scratch_emit_size do nothing.
3864 if (Compile::current()->in_scratch_emit_size()) return;
3865
3866 assert(labels.is_nonempty(), "must be");
3867 assert((uint) labels.length() == n->outcnt(), "must be equal: %d == %d", labels.length(), n->outcnt());
3868
3869 // Since MachConstantNode::constant_offset() also contains
3870 // table_base_offset() we need to subtract the table_base_offset()
3871 // to get the plain offset into the constant table.
3872 int offset = n->constant_offset() - table_base_offset();
3873
3874 MacroAssembler _masm(&cb);
3875 address* jump_table_base = (address*) (_masm.code()->consts()->start() + offset);
3876
3877 for (uint i = 0; i < n->outcnt(); i++) {
3878 address* constant_addr = &jump_table_base[i];
3879 assert(*constant_addr == (((address) n) + i), "all jump-table entries must contain adjusted node pointer: " INTPTR_FORMAT " == " INTPTR_FORMAT, p2i(*constant_addr), p2i(((address) n) + i));
3880 *constant_addr = cb.consts()->target(*labels.at(i), (address) constant_addr);
3881 cb.consts()->relocate((address) constant_addr, relocInfo::internal_word_type);
3882 }
3883 }
3884
3885 //----------------------------static_subtype_check-----------------------------
3886 // Shortcut important common cases when superklass is exact:
3887 // (0) superklass is java.lang.Object (can occur in reflective code)
3888 // (1) subklass is already limited to a subtype of superklass => always ok
3889 // (2) subklass does not overlap with superklass => always fail
3890 // (3) superklass has NO subtypes and we can check with a simple compare.
3891 int Compile::static_subtype_check(ciKlass* superk, ciKlass* subk) {
3892 if (StressReflectiveCode) {
3893 return SSC_full_test; // Let caller generate the general case.
3894 }
3895
3896 if (superk == env()->Object_klass()) {
3897 return SSC_always_true; // (0) this test cannot fail
3898 }
3899
4118 if (C->log() != NULL) {
4119 C->log()->inline_fail(msg);
4120 }
4121 }
4122
4123
4124 // Dump inlining replay data to the stream.
4125 // Don't change thread state and acquire any locks.
4126 void Compile::dump_inline_data(outputStream* out) {
4127 InlineTree* inl_tree = ilt();
4128 if (inl_tree != NULL) {
4129 out->print(" inline %d", inl_tree->count());
4130 inl_tree->dump_replay_data(out);
4131 }
4132 }
4133
4134 int Compile::cmp_expensive_nodes(Node* n1, Node* n2) {
4135 if (n1->Opcode() < n2->Opcode()) return -1;
4136 else if (n1->Opcode() > n2->Opcode()) return 1;
4137
4138 assert(n1->req() == n2->req(), "can't compare %s nodes: n1->req() = %d, n2->req() = %d", NodeClassNames[n1->Opcode()], n1->req(), n2->req());
4139 for (uint i = 1; i < n1->req(); i++) {
4140 if (n1->in(i) < n2->in(i)) return -1;
4141 else if (n1->in(i) > n2->in(i)) return 1;
4142 }
4143
4144 return 0;
4145 }
4146
4147 int Compile::cmp_expensive_nodes(Node** n1p, Node** n2p) {
4148 Node* n1 = *n1p;
4149 Node* n2 = *n2p;
4150
4151 return cmp_expensive_nodes(n1, n2);
4152 }
4153
4154 void Compile::sort_expensive_nodes() {
4155 if (!expensive_nodes_sorted()) {
4156 _expensive_nodes->sort(cmp_expensive_nodes);
4157 }
4158 }
|