< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page




  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"
  33 #include "ci/ciKlass.hpp"
  34 #include "ci/ciMemberName.hpp"
  35 #include "ci/ciUtilities.inline.hpp"

  36 #include "compiler/compileBroker.hpp"
  37 #include "interpreter/bytecode.hpp"
  38 #include "jfr/jfrEvents.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "oops/oop.inline.hpp"
  41 #include "runtime/sharedRuntime.hpp"
  42 #include "runtime/compilationPolicy.hpp"
  43 #include "runtime/vm_version.hpp"
  44 #include "utilities/bitMap.inline.hpp"
  45 
  46 class BlockListBuilder {
  47  private:
  48   Compilation* _compilation;
  49   IRScope*     _scope;
  50 
  51   BlockList    _blocks;                // internal list of all blocks
  52   BlockList*   _bci2block;             // mapping from bci to blocks for GraphBuilder
  53 
  54   // fields used by mark_loops
  55   ResourceBitMap _active;              // for iteration of control flow graph


 631         }
 632 #endif
 633         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 634         return result;
 635       }
 636     }
 637     return load;
 638   }
 639 
 640   // Record this newly allocated object
 641   void new_instance(NewInstance* object) {
 642     int index = _newobjects.length();
 643     _newobjects.append(object);
 644     if (_fields.at_grow(index, NULL) == NULL) {
 645       _fields.at_put(index, new FieldBuffer());
 646     } else {
 647       _fields.at(index)->kill();
 648     }
 649   }
 650 











 651   void store_value(Value value) {
 652     int index = _newobjects.find(value);
 653     if (index != -1) {
 654       // stored a newly allocated object into another object.
 655       // Assume we've lost track of it as separate slice of memory.
 656       // We could do better by keeping track of whether individual
 657       // fields could alias each other.
 658       _newobjects.remove_at(index);
 659       // pull out the field info and store it at the end up the list
 660       // of field info list to be reused later.
 661       _fields.append(_fields.at(index));
 662       _fields.remove_at(index);
 663     }
 664   }
 665 
 666   void kill() {
 667     _newobjects.trunc_to(0);
 668     _objects.trunc_to(0);
 669     _values.kill();
 670   }


 962       scope_data()->set_jsr_return_address_local(-1);
 963     }
 964   }
 965 
 966   state->store_local(index, round_fp(x));
 967 }
 968 
 969 
 970 void GraphBuilder::load_indexed(BasicType type) {
 971   // In case of in block code motion in range check elimination
 972   ValueStack* state_before = copy_state_indexed_access();
 973   compilation()->set_has_access_indexed(true);
 974   Value index = ipop();
 975   Value array = apop();
 976   Value length = NULL;
 977   if (CSEArrayLength ||
 978       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
 979       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
 980     length = append(new ArrayLength(array, state_before));
 981   }











 982   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));

 983 }
 984 
 985 
 986 void GraphBuilder::store_indexed(BasicType type) {
 987   // In case of in block code motion in range check elimination
 988   ValueStack* state_before = copy_state_indexed_access();
 989   compilation()->set_has_access_indexed(true);
 990   Value value = pop(as_ValueType(type));
 991   Value index = ipop();
 992   Value array = apop();
 993   Value length = NULL;
 994   if (CSEArrayLength ||
 995       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
 996       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
 997     length = append(new ArrayLength(array, state_before));
 998   }
 999   ciType* array_type = array->declared_type();
1000   bool check_boolean = false;
1001   if (array_type != NULL) {
1002     if (array_type->is_loaded() &&
1003       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1004       assert(type == T_BYTE, "boolean store uses bastore");
1005       Value mask = append(new Constant(new IntConstant(1)));
1006       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1007     }
1008   } else if (type == T_BYTE) {
1009     check_boolean = true;
1010   }

1011   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1012   append(result);
1013   _memory->store_value(value);
1014 
1015   if (type == T_OBJECT && is_profiling()) {
1016     // Note that we'd collect profile data in this method if we wanted it.
1017     compilation()->set_would_profile(true);
1018 
1019     if (profile_checkcasts()) {
1020       result->set_profiled_method(method());
1021       result->set_profiled_bci(bci());
1022       result->set_should_profile(true);
1023     }
1024   }
1025 }
1026 
1027 
1028 void GraphBuilder::stack_op(Bytecodes::Code code) {
1029   switch (code) {
1030     case Bytecodes::_pop:


1602   // Attach dimension info to stable arrays.
1603   if (FoldStableValues &&
1604       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1605     ciArray* array = field_value.as_object()->as_array();
1606     jint dimension = field->type()->as_array_klass()->dimension();
1607     value = new StableArrayConstant(array, dimension);
1608   }
1609 
1610   switch (field_type) {
1611     case T_ARRAY:
1612     case T_OBJECT:
1613       if (field_value.as_object()->should_be_constant()) {
1614         return new Constant(value);
1615       }
1616       return NULL; // Not a constant.
1617     default:
1618       return new Constant(value);
1619   }
1620 }
1621 













1622 void GraphBuilder::access_field(Bytecodes::Code code) {
1623   bool will_link;
1624   ciField* field = stream()->get_field(will_link);
1625   ciInstanceKlass* holder = field->holder();
1626   BasicType field_type = field->type()->basic_type();
1627   ValueType* type = as_ValueType(field_type);




























1628   // call will_link again to determine if the field is valid.
1629   const bool needs_patching = !holder->is_loaded() ||
1630                               !field->will_link(method(), code) ||

1631                               PatchALot;
1632 
1633   ValueStack* state_before = NULL;
1634   if (!holder->is_initialized() || needs_patching) {
1635     // save state before instruction for debug info when
1636     // deoptimization happens during patching
1637     state_before = copy_state_before();
1638   }
1639 
1640   Value obj = NULL;
1641   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1642     if (state_before != NULL) {
1643       // build a patching constant
1644       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1645     } else {
1646       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1647     }
1648   }
1649 
1650   if (field->is_final() && (code == Bytecodes::_putfield)) {


1658     }
1659   }
1660 
1661   const int offset = !needs_patching ? field->offset() : -1;
1662   switch (code) {
1663     case Bytecodes::_getstatic: {
1664       // check for compile-time constants, i.e., initialized static final fields
1665       Value constant = NULL;
1666       if (field->is_static_constant() && !PatchALot) {
1667         ciConstant field_value = field->constant_value();
1668         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1669                "stable static w/ default value shouldn't be a constant");
1670         constant = make_constant(field_value, field);
1671       }
1672       if (constant != NULL) {
1673         push(type, append(constant));
1674       } else {
1675         if (state_before == NULL) {
1676           state_before = copy_state_for_exception();
1677         }
1678         push(type, append(new LoadField(append(obj), offset, field, true,
1679                                         state_before, needs_patching)));





1680       }
1681       break;
1682     }
1683     case Bytecodes::_putstatic: {
1684       Value val = pop(type);
1685       if (state_before == NULL) {
1686         state_before = copy_state_for_exception();
1687       }
1688       if (field->type()->basic_type() == T_BOOLEAN) {
1689         Value mask = append(new Constant(new IntConstant(1)));
1690         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1691       }
1692       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1693       break;
1694     }
1695     case Bytecodes::_getfield: {
1696       // Check for compile-time constants, i.e., trusted final non-static fields.
1697       Value constant = NULL;
1698       obj = apop();
1699       ObjectType* obj_type = obj->type()->as_ObjectType();
1700       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1701         ciObject* const_oop = obj_type->constant_value();
1702         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1703           ciConstant field_value = field->constant_value_of(const_oop);
1704           if (field_value.is_valid()) {
1705             constant = make_constant(field_value, field);
1706             // For CallSite objects add a dependency for invalidation of the optimization.
1707             if (field->is_call_site_target()) {
1708               ciCallSite* call_site = const_oop->as_call_site();
1709               if (!call_site->is_constant_call_site()) {
1710                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1711                 dependency_recorder()->assert_call_site_target_value(call_site, target);
1712               }
1713             }
1714           }
1715         }
1716       }
1717       if (constant != NULL) {
1718         push(type, append(constant));
1719       } else {
1720         if (state_before == NULL) {
1721           state_before = copy_state_for_exception();
1722         }


1723         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1724         Value replacement = !needs_patching ? _memory->load(load) : load;
1725         if (replacement != load) {
1726           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1727           push(type, replacement);
1728         } else {
1729           push(type, append(load));
1730         }













1731       }
1732       break;
1733     }
1734     case Bytecodes::_putfield: {
1735       Value val = pop(type);
1736       obj = apop();
1737       if (state_before == NULL) {
1738         state_before = copy_state_for_exception();
1739       }
1740       if (field->type()->basic_type() == T_BOOLEAN) {
1741         Value mask = append(new Constant(new IntConstant(1)));
1742         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1743       }


1744       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1745       if (!needs_patching) store = _memory->store(store);
1746       if (store != NULL) {
1747         append(store);
1748       }







1749       break;
1750     }
1751     default:
1752       ShouldNotReachHere();
1753       break;
1754   }
1755 }
1756 































































1757 
1758 Dependencies* GraphBuilder::dependency_recorder() const {
1759   assert(DeoptC1, "need debug information");
1760   return compilation()->dependency_recorder();
1761 }
1762 
1763 // How many arguments do we want to profile?
1764 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1765   int n = 0;
1766   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1767   start = has_receiver ? 1 : 0;
1768   if (profile_arguments()) {
1769     ciProfileData* data = method()->method_data()->bci_to_data(bci());
1770     if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1771       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1772     }
1773   }
1774   // If we are inlining then we need to collect arguments to profile parameters for the target
1775   if (profile_parameters() && target != NULL) {
1776     if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {


2094       null_check(recv);
2095     }
2096 
2097     if (is_profiling()) {
2098       // Note that we'd collect profile data in this method if we wanted it.
2099       compilation()->set_would_profile(true);
2100 
2101       if (profile_calls()) {
2102         assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2103         ciKlass* target_klass = NULL;
2104         if (cha_monomorphic_target != NULL) {
2105           target_klass = cha_monomorphic_target->holder();
2106         } else if (exact_target != NULL) {
2107           target_klass = exact_target->holder();
2108         }
2109         profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2110       }
2111     }
2112   }
2113 
2114   Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);

2115   // push result
2116   append_split(result);
2117 
2118   if (result_type != voidType) {
2119     if (method()->is_strict()) {
2120       push(result_type, round_fp(result));
2121     } else {
2122       push(result_type, result);
2123     }
2124   }
2125   if (profile_return() && result_type->is_object_kind()) {
2126     profile_return_type(result, target);
2127   }
2128 }
2129 
2130 
2131 void GraphBuilder::new_instance(int klass_index) {
2132   ValueStack* state_before = copy_state_exhandling();
2133   bool will_link;
2134   ciKlass* klass = stream()->get_klass(will_link);
2135   assert(klass->is_instance_klass(), "must be an instance klass");

2136   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2137   _memory->new_instance(new_instance);
2138   apush(append_split(new_instance));
2139 }
2140 










2141 
2142 void GraphBuilder::new_type_array() {
2143   ValueStack* state_before = copy_state_exhandling();
2144   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2145 }
2146 
2147 
2148 void GraphBuilder::new_object_array() {
2149   bool will_link;
2150   ciKlass* klass = stream()->get_klass(will_link);
2151   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2152   NewArray* n = new NewObjectArray(klass, ipop(), state_before);



2153   apush(append_split(n));
2154 }
2155 
2156 
2157 bool GraphBuilder::direct_compare(ciKlass* k) {
2158   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2159     ciInstanceKlass* ik = k->as_instance_klass();
2160     if (ik->is_final()) {
2161       return true;
2162     } else {
2163       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2164         // test class is leaf class
2165         dependency_recorder()->assert_leaf_type(ik);
2166         return true;
2167       }
2168     }
2169   }
2170   return false;
2171 }
2172 
2173 
2174 void GraphBuilder::check_cast(int klass_index) {
2175   bool will_link;
2176   ciKlass* klass = stream()->get_klass(will_link);

2177   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2178   CheckCast* c = new CheckCast(klass, apop(), state_before);
2179   apush(append_split(c));
2180   c->set_direct_compare(direct_compare(klass));
2181 
2182   if (is_profiling()) {
2183     // Note that we'd collect profile data in this method if we wanted it.
2184     compilation()->set_would_profile(true);
2185 
2186     if (profile_checkcasts()) {
2187       c->set_profiled_method(method());
2188       c->set_profiled_bci(bci());
2189       c->set_should_profile(true);
2190     }
2191   }
2192 }
2193 
2194 
2195 void GraphBuilder::instance_of(int klass_index) {
2196   bool will_link;
2197   ciKlass* klass = stream()->get_klass(will_link);
2198   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2199   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2200   ipush(append_split(i));
2201   i->set_direct_compare(direct_compare(klass));
2202 
2203   if (is_profiling()) {
2204     // Note that we'd collect profile data in this method if we wanted it.
2205     compilation()->set_would_profile(true);
2206 
2207     if (profile_checkcasts()) {
2208       i->set_profiled_method(method());
2209       i->set_profiled_bci(bci());
2210       i->set_should_profile(true);
2211     }
2212   }
2213 }
2214 
2215 
2216 void GraphBuilder::monitorenter(Value x, int bci) {



















2217   // save state before locking in case of deoptimization after a NullPointerException
2218   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2219   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2220   kill_all();
2221 }
2222 
2223 
2224 void GraphBuilder::monitorexit(Value x, int bci) {
2225   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2226   kill_all();
2227 }
2228 
2229 
2230 void GraphBuilder::new_multi_array(int dimensions) {
2231   bool will_link;
2232   ciKlass* klass = stream()->get_klass(will_link);
2233   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2234 
2235   Values* dims = new Values(dimensions, dimensions, NULL);
2236   // fill in all dimensions
2237   int i = dimensions;
2238   while (i-- > 0) dims->at_put(i, ipop());
2239   // create array


2855       case Bytecodes::_invokevirtual  : // fall through
2856       case Bytecodes::_invokespecial  : // fall through
2857       case Bytecodes::_invokestatic   : // fall through
2858       case Bytecodes::_invokedynamic  : // fall through
2859       case Bytecodes::_invokeinterface: invoke(code); break;
2860       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
2861       case Bytecodes::_newarray       : new_type_array(); break;
2862       case Bytecodes::_anewarray      : new_object_array(); break;
2863       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
2864       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
2865       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
2866       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
2867       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
2868       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
2869       case Bytecodes::_wide           : ShouldNotReachHere(); break;
2870       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
2871       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
2872       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
2873       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
2874       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;


2875       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
2876       default                         : ShouldNotReachHere(); break;
2877     }
2878 
2879     if (log != NULL)
2880       log->clear_context(); // skip marker if nothing was printed
2881 
2882     // save current bci to setup Goto at the end
2883     prev_bci = s.cur_bci();
2884 
2885   }
2886   CHECK_BAILOUT_(NULL);
2887   // stop processing of this block (see try_inline_full)
2888   if (_skip_block) {
2889     _skip_block = false;
2890     assert(_last && _last->as_BlockEnd(), "");
2891     return _last->as_BlockEnd();
2892   }
2893   // if there are any, check if last instruction is a BlockEnd instruction
2894   BlockEnd* end = last()->as_BlockEnd();


3140   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3141 
3142   assert(state->caller_state() == NULL, "should be top scope");
3143   state->clear_locals();
3144   Goto* g = new Goto(target, false);
3145   append(g);
3146   _osr_entry->set_end(g);
3147   target->merge(_osr_entry->end()->state());
3148 
3149   scope_data()->set_stream(NULL);
3150 }
3151 
3152 
3153 ValueStack* GraphBuilder::state_at_entry() {
3154   ValueStack* state = new ValueStack(scope(), NULL);
3155 
3156   // Set up locals for receiver
3157   int idx = 0;
3158   if (!method()->is_static()) {
3159     // we should always see the receiver
3160     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));

3161     idx = 1;
3162   }
3163 
3164   // Set up locals for incoming arguments
3165   ciSignature* sig = method()->signature();
3166   for (int i = 0; i < sig->count(); i++) {
3167     ciType* type = sig->type_at(i);
3168     BasicType basic_type = type->basic_type();
3169     // don't allow T_ARRAY to propagate into locals types
3170     if (basic_type == T_ARRAY) basic_type = T_OBJECT;
3171     ValueType* vt = as_ValueType(basic_type);
3172     state->store_local(idx, new Local(type, vt, idx, false));
3173     idx += type->size();
3174   }
3175 
3176   // lock synchronized method
3177   if (method()->is_synchronized()) {
3178     state->lock(NULL);
3179   }
3180 
3181   return state;
3182 }
3183 
3184 
3185 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3186   : _scope_data(NULL)
3187   , _compilation(compilation)
3188   , _memory(new MemoryBuffer())
3189   , _inline_bailout_msg(NULL)
3190   , _instruction_count(0)
3191   , _osr_entry(NULL)
3192 {




  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"
  33 #include "ci/ciKlass.hpp"
  34 #include "ci/ciMemberName.hpp"
  35 #include "ci/ciUtilities.inline.hpp"
  36 #include "ci/ciValueKlass.hpp"
  37 #include "compiler/compileBroker.hpp"
  38 #include "interpreter/bytecode.hpp"
  39 #include "jfr/jfrEvents.hpp"
  40 #include "memory/resourceArea.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "runtime/sharedRuntime.hpp"
  43 #include "runtime/compilationPolicy.hpp"
  44 #include "runtime/vm_version.hpp"
  45 #include "utilities/bitMap.inline.hpp"
  46 
  47 class BlockListBuilder {
  48  private:
  49   Compilation* _compilation;
  50   IRScope*     _scope;
  51 
  52   BlockList    _blocks;                // internal list of all blocks
  53   BlockList*   _bci2block;             // mapping from bci to blocks for GraphBuilder
  54 
  55   // fields used by mark_loops
  56   ResourceBitMap _active;              // for iteration of control flow graph


 632         }
 633 #endif
 634         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 635         return result;
 636       }
 637     }
 638     return load;
 639   }
 640 
 641   // Record this newly allocated object
 642   void new_instance(NewInstance* object) {
 643     int index = _newobjects.length();
 644     _newobjects.append(object);
 645     if (_fields.at_grow(index, NULL) == NULL) {
 646       _fields.at_put(index, new FieldBuffer());
 647     } else {
 648       _fields.at(index)->kill();
 649     }
 650   }
 651 
 652   // Record this newly allocated object
 653   void new_instance(NewValueTypeInstance* object) {
 654     int index = _newobjects.length();
 655     _newobjects.append(object);
 656     if (_fields.at_grow(index, NULL) == NULL) {
 657       _fields.at_put(index, new FieldBuffer());
 658     } else {
 659       _fields.at(index)->kill();
 660     }
 661   }
 662 
 663   void store_value(Value value) {
 664     int index = _newobjects.find(value);
 665     if (index != -1) {
 666       // stored a newly allocated object into another object.
 667       // Assume we've lost track of it as separate slice of memory.
 668       // We could do better by keeping track of whether individual
 669       // fields could alias each other.
 670       _newobjects.remove_at(index);
 671       // pull out the field info and store it at the end up the list
 672       // of field info list to be reused later.
 673       _fields.append(_fields.at(index));
 674       _fields.remove_at(index);
 675     }
 676   }
 677 
 678   void kill() {
 679     _newobjects.trunc_to(0);
 680     _objects.trunc_to(0);
 681     _values.kill();
 682   }


 974       scope_data()->set_jsr_return_address_local(-1);
 975     }
 976   }
 977 
 978   state->store_local(index, round_fp(x));
 979 }
 980 
 981 
 982 void GraphBuilder::load_indexed(BasicType type) {
 983   // In case of in block code motion in range check elimination
 984   ValueStack* state_before = copy_state_indexed_access();
 985   compilation()->set_has_access_indexed(true);
 986   Value index = ipop();
 987   Value array = apop();
 988   Value length = NULL;
 989   if (CSEArrayLength ||
 990       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
 991       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
 992     length = append(new ArrayLength(array, state_before));
 993   }
 994 
 995   if (array->is_loaded_flattened_array()) {
 996     ciType* array_type = array->declared_type();
 997     ciValueKlass* elem_klass = array_type->as_value_array_klass()->element_klass()->as_value_klass();
 998     NewValueTypeInstance* new_instance = new NewValueTypeInstance(elem_klass, state_before, false);
 999     _memory->new_instance(new_instance);
1000     apush(append_split(new_instance));
1001     LoadIndexed* load_indexed = new LoadIndexed(array, index, length, type, state_before);
1002     load_indexed->set_vt(new_instance);
1003     append(load_indexed);
1004   } else {
1005     push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
1006   }
1007 }
1008 
1009 
1010 void GraphBuilder::store_indexed(BasicType type) {
1011   // In case of in block code motion in range check elimination
1012   ValueStack* state_before = copy_state_indexed_access();
1013   compilation()->set_has_access_indexed(true);
1014   Value value = pop(as_ValueType(type));
1015   Value index = ipop();
1016   Value array = apop();
1017   Value length = NULL;
1018   if (CSEArrayLength ||
1019       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1020       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
1021     length = append(new ArrayLength(array, state_before));
1022   }
1023   ciType* array_type = array->declared_type();
1024   bool check_boolean = false;
1025   if (array_type != NULL) {
1026     if (array_type->is_loaded() &&
1027       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1028       assert(type == T_BYTE, "boolean store uses bastore");
1029       Value mask = append(new Constant(new IntConstant(1)));
1030       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1031     }
1032   } else if (type == T_BYTE) {
1033     check_boolean = true;
1034   }
1035 
1036   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1037   append(result);
1038   _memory->store_value(value);
1039 
1040   if (type == T_OBJECT && is_profiling()) {
1041     // Note that we'd collect profile data in this method if we wanted it.
1042     compilation()->set_would_profile(true);
1043 
1044     if (profile_checkcasts()) {
1045       result->set_profiled_method(method());
1046       result->set_profiled_bci(bci());
1047       result->set_should_profile(true);
1048     }
1049   }
1050 }
1051 
1052 
1053 void GraphBuilder::stack_op(Bytecodes::Code code) {
1054   switch (code) {
1055     case Bytecodes::_pop:


1627   // Attach dimension info to stable arrays.
1628   if (FoldStableValues &&
1629       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1630     ciArray* array = field_value.as_object()->as_array();
1631     jint dimension = field->type()->as_array_klass()->dimension();
1632     value = new StableArrayConstant(array, dimension);
1633   }
1634 
1635   switch (field_type) {
1636     case T_ARRAY:
1637     case T_OBJECT:
1638       if (field_value.as_object()->should_be_constant()) {
1639         return new Constant(value);
1640       }
1641       return NULL; // Not a constant.
1642     default:
1643       return new Constant(value);
1644   }
1645 }
1646 
1647 void GraphBuilder::copy_value_content(ciValueKlass* vk, Value src, int src_off, Value dest, int dest_off,
1648     ValueStack* state_before, bool needs_patching) {
1649   for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
1650     ciField* inner_field = vk->nonstatic_field_at(i);
1651     assert(!inner_field->is_flattened(), "the iteration over nested fields is handled by the loop itself");
1652     int off = inner_field->offset() - vk->first_field_offset();
1653     LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, needs_patching);
1654     Value replacement = append(load);
1655     StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, needs_patching);
1656     append(store);
1657   }
1658 }
1659 
1660 void GraphBuilder::access_field(Bytecodes::Code code) {
1661   bool will_link;
1662   ciField* field = stream()->get_field(will_link);
1663   ciInstanceKlass* holder = field->holder();
1664   BasicType field_type = field->type()->basic_type();
1665   ValueType* type = as_ValueType(field_type);
1666 
1667   // Null check and deopt for getting static value field
1668   ciValueKlass* value_klass = NULL;
1669   Value default_value = NULL;
1670   bool needs_deopt = false;
1671   if (code == Bytecodes::_getstatic && !field->is_static_constant() &&
1672       field->layout_type() == T_VALUETYPE && field->is_flattenable()) {
1673     value_klass = field->type()->as_value_klass();
1674     if (holder->is_loaded()) {
1675       ciInstance* mirror = field->holder()->java_mirror();
1676       ciObject* val = mirror->field_value(field).as_object();
1677       if (val->is_null_object()) {
1678         // This is a non-nullable static field, but it's not initialized.
1679         // We need to do a null check, and replace it with the default value.
1680       } else {
1681         // No need to perform null check on this static field
1682         value_klass = NULL;
1683       }
1684     }
1685     if (value_klass != NULL) {
1686       if (value_klass->is_loaded()) {
1687         default_value = new Constant(new InstanceConstant(value_klass->default_value_instance()));
1688       } else {
1689         needs_deopt = true;
1690       }
1691     }
1692   }
1693 
1694   // call will_link again to determine if the field is valid.
1695   const bool needs_patching = !holder->is_loaded() ||
1696                               !field->will_link(method(), code) ||
1697                               needs_deopt ||
1698                               PatchALot;
1699 
1700   ValueStack* state_before = NULL;
1701   if (!holder->is_initialized() || needs_patching) {
1702     // save state before instruction for debug info when
1703     // deoptimization happens during patching
1704     state_before = copy_state_before();
1705   }
1706 
1707   Value obj = NULL;
1708   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1709     if (state_before != NULL) {
1710       // build a patching constant
1711       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1712     } else {
1713       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1714     }
1715   }
1716 
1717   if (field->is_final() && (code == Bytecodes::_putfield)) {


1725     }
1726   }
1727 
1728   const int offset = !needs_patching ? field->offset() : -1;
1729   switch (code) {
1730     case Bytecodes::_getstatic: {
1731       // check for compile-time constants, i.e., initialized static final fields
1732       Value constant = NULL;
1733       if (field->is_static_constant() && !PatchALot) {
1734         ciConstant field_value = field->constant_value();
1735         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1736                "stable static w/ default value shouldn't be a constant");
1737         constant = make_constant(field_value, field);
1738       }
1739       if (constant != NULL) {
1740         push(type, append(constant));
1741       } else {
1742         if (state_before == NULL) {
1743           state_before = copy_state_for_exception();
1744         }
1745         LoadField* load_field = new LoadField(append(obj), offset, field, true,
1746                                         state_before, needs_patching,
1747                                         value_klass, default_value);
1748         if (field->layout_type() == T_VALUETYPE && field->is_flattenable()) {
1749           load_field->set_never_null(true);
1750         }
1751         push(type, append(load_field));
1752       }
1753       break;
1754     }
1755     case Bytecodes::_putstatic: {
1756       Value val = pop(type);
1757       if (state_before == NULL) {
1758         state_before = copy_state_for_exception();
1759       }
1760       if (field->type()->basic_type() == T_BOOLEAN) {
1761         Value mask = append(new Constant(new IntConstant(1)));
1762         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1763       }
1764       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1765       break;
1766     }
1767     case Bytecodes::_getfield: {
1768       // Check for compile-time constants, i.e., trusted final non-static fields.
1769       Value constant = NULL;
1770       obj = apop();
1771       ObjectType* obj_type = obj->type()->as_ObjectType();
1772       if (field->is_constant() && !field->is_flattened() && obj_type->is_constant() && !PatchALot) {
1773         ciObject* const_oop = obj_type->constant_value();
1774         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1775           ciConstant field_value = field->constant_value_of(const_oop);
1776           if (field_value.is_valid()) {
1777             constant = make_constant(field_value, field);
1778             // For CallSite objects add a dependency for invalidation of the optimization.
1779             if (field->is_call_site_target()) {
1780               ciCallSite* call_site = const_oop->as_call_site();
1781               if (!call_site->is_constant_call_site()) {
1782                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1783                 dependency_recorder()->assert_call_site_target_value(call_site, target);
1784               }
1785             }
1786           }
1787         }
1788       }
1789       if (constant != NULL) {
1790         push(type, append(constant));
1791       } else {
1792         if (state_before == NULL) {
1793           state_before = copy_state_for_exception();
1794         }
1795 
1796         if (!field->is_flattened()) {
1797           LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1798           Value replacement = !needs_patching ? _memory->load(load) : load;
1799           if (replacement != load) {
1800             assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1801             push(type, replacement);
1802           } else {
1803             push(type, append(load));
1804           }
1805         } else { // flattened field, not optimized solution: re-instantiate the flattened value
1806           assert(field->type()->is_valuetype(), "Sanity check");
1807           ciValueKlass* value_klass = field->type()->as_value_klass();
1808           int flattening_offset = field->offset() - value_klass->first_field_offset();
1809           assert(field->type()->is_valuetype(), "Sanity check");
1810           scope()->set_wrote_final();
1811           scope()->set_wrote_fields();
1812           NewValueTypeInstance* new_instance = new NewValueTypeInstance(value_klass, state_before, false);
1813           _memory->new_instance(new_instance);
1814           apush(append_split(new_instance));
1815           copy_value_content(value_klass, obj, field->offset() , new_instance, value_klass->first_field_offset(),
1816                        state_before, needs_patching);
1817         }
1818       }
1819       break;
1820     }
1821     case Bytecodes::_putfield: {
1822       Value val = pop(type);
1823       obj = apop();
1824       if (state_before == NULL) {
1825         state_before = copy_state_for_exception();
1826       }
1827       if (field->type()->basic_type() == T_BOOLEAN) {
1828         Value mask = append(new Constant(new IntConstant(1)));
1829         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1830       }
1831 
1832       if (!field->is_flattened()) {
1833         StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1834         if (!needs_patching) store = _memory->store(store);
1835         if (store != NULL) {
1836           append(store);
1837         }
1838       } else {
1839         assert(field->type()->is_valuetype(), "Sanity check");
1840         ciValueKlass* value_klass = field->type()->as_value_klass();
1841         int flattening_offset = field->offset() - value_klass->first_field_offset();
1842         copy_value_content(value_klass, val, value_klass->first_field_offset(), obj, field->offset(),
1843                    state_before, needs_patching);
1844       }
1845       break;
1846     }
1847     default:
1848       ShouldNotReachHere();
1849       break;
1850   }
1851 }
1852 
1853 // Baseline version of withfield, allocate every time
1854 void GraphBuilder::withfield(int field_index)
1855 {
1856   bool will_link;
1857   ciField* field_modify = stream()->get_field(will_link);
1858   ciInstanceKlass* holder = field_modify->holder();
1859   assert(holder->is_valuetype(), "must be a value klass");
1860   BasicType field_type = field_modify->type()->basic_type();
1861   ValueType* type = as_ValueType(field_type);
1862 
1863   // call will_link again to determine if the field is valid.
1864   const bool needs_patching = !holder->is_loaded() ||
1865                               !field_modify->will_link(method(), Bytecodes::_withfield) ||
1866                               PatchALot;
1867 
1868 
1869   scope()->set_wrote_final();
1870   scope()->set_wrote_fields();
1871 
1872   const int offset = !needs_patching ? field_modify->offset() : -1;
1873   Value val = pop(type);
1874   Value obj = apop();
1875 
1876   ValueStack* state_before = copy_state_for_exception();
1877 
1878   NewValueTypeInstance* new_instance = new NewValueTypeInstance(holder->as_value_klass(), state_before, false);
1879   _memory->new_instance(new_instance);
1880   apush(append_split(new_instance));
1881 
1882   for (int i = 0; i < holder->nof_nonstatic_fields(); i++) {
1883     ciField* field = holder->nonstatic_field_at(i);
1884     int off = field->offset();
1885 
1886     if (field->offset() != offset) {
1887       if (field->is_flattened()) {
1888         assert(field->type()->is_valuetype(), "Sanity check");
1889         assert(field->type()->is_valuetype(), "Only value types can be flattened");
1890         ciValueKlass* vk = field->type()->as_value_klass();
1891         copy_value_content(vk, obj, off, new_instance, vk->first_field_offset(), state_before, needs_patching);
1892       } else {
1893         // Only load those fields who are not modified
1894         LoadField* load = new LoadField(obj, off, field, false, state_before, needs_patching);
1895         Value replacement = append(load);
1896         StoreField* store = new StoreField(new_instance, off, field, replacement, false, state_before, needs_patching);
1897         append(store);
1898       }
1899     }
1900   }
1901 
1902   // Field to modify
1903   if (field_modify->type()->basic_type() == T_BOOLEAN) {
1904     Value mask = append(new Constant(new IntConstant(1)));
1905     val = append(new LogicOp(Bytecodes::_iand, val, mask));
1906   }
1907   if (field_modify->is_flattened()) {
1908     assert(field_modify->type()->is_valuetype(), "Only value types can be flattened");
1909     ciValueKlass* vk = field_modify->type()->as_value_klass();
1910     copy_value_content(vk, val, vk->first_field_offset(), new_instance, field_modify->offset(), state_before, needs_patching);
1911   } else {
1912     StoreField* store = new StoreField(new_instance, offset, field_modify, val, false, state_before, needs_patching);
1913     append(store);
1914   }
1915 }
1916 
1917 Dependencies* GraphBuilder::dependency_recorder() const {
1918   assert(DeoptC1, "need debug information");
1919   return compilation()->dependency_recorder();
1920 }
1921 
1922 // How many arguments do we want to profile?
1923 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1924   int n = 0;
1925   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1926   start = has_receiver ? 1 : 0;
1927   if (profile_arguments()) {
1928     ciProfileData* data = method()->method_data()->bci_to_data(bci());
1929     if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1930       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1931     }
1932   }
1933   // If we are inlining then we need to collect arguments to profile parameters for the target
1934   if (profile_parameters() && target != NULL) {
1935     if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {


2253       null_check(recv);
2254     }
2255 
2256     if (is_profiling()) {
2257       // Note that we'd collect profile data in this method if we wanted it.
2258       compilation()->set_would_profile(true);
2259 
2260       if (profile_calls()) {
2261         assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2262         ciKlass* target_klass = NULL;
2263         if (cha_monomorphic_target != NULL) {
2264           target_klass = cha_monomorphic_target->holder();
2265         } else if (exact_target != NULL) {
2266           target_klass = exact_target->holder();
2267         }
2268         profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2269       }
2270     }
2271   }
2272 
2273   Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before,
2274                               declared_signature->returns_never_null());
2275   // push result
2276   append_split(result);
2277 
2278   if (result_type != voidType) {
2279     if (method()->is_strict()) {
2280       push(result_type, round_fp(result));
2281     } else {
2282       push(result_type, result);
2283     }
2284   }
2285   if (profile_return() && result_type->is_object_kind()) {
2286     profile_return_type(result, target);
2287   }
2288 }
2289 
2290 
2291 void GraphBuilder::new_instance(int klass_index) {
2292   ValueStack* state_before = copy_state_exhandling();
2293   bool will_link;
2294   ciKlass* klass = stream()->get_klass(will_link);
2295   assert(klass->is_instance_klass(), "must be an instance klass");
2296   assert(!klass->is_valuetype(), "must not be a value klass");
2297   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2298   _memory->new_instance(new_instance);
2299   apush(append_split(new_instance));
2300 }
2301 
2302 void GraphBuilder::new_value_type_instance(int klass_index) {
2303   ValueStack* state_before = copy_state_exhandling();
2304   bool will_link;
2305   ciKlass* klass = stream()->get_klass(will_link);
2306   assert(klass->is_valuetype(), "must be a value klass");
2307   NewValueTypeInstance* new_instance = new NewValueTypeInstance(klass->as_value_klass(),
2308       state_before, stream()->is_unresolved_klass());
2309   _memory->new_instance(new_instance);
2310   apush(append_split(new_instance));
2311 }
2312 
2313 void GraphBuilder::new_type_array() {
2314   ValueStack* state_before = copy_state_exhandling();
2315   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2316 }
2317 
2318 
2319 void GraphBuilder::new_object_array() {
2320   bool will_link;
2321   ciKlass* klass = stream()->get_klass(will_link);
2322   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2323   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2324   if (stream()->is_klass_never_null()) {
2325     n->set_never_null(true);
2326   }
2327   apush(append_split(n));
2328 }
2329 
2330 
2331 bool GraphBuilder::direct_compare(ciKlass* k) {
2332   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2333     ciInstanceKlass* ik = k->as_instance_klass();
2334     if (ik->is_final()) {
2335       return true;
2336     } else {
2337       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2338         // test class is leaf class
2339         dependency_recorder()->assert_leaf_type(ik);
2340         return true;
2341       }
2342     }
2343   }
2344   return false;
2345 }
2346 
2347 
2348 void GraphBuilder::check_cast(int klass_index) {
2349   bool will_link;
2350   ciKlass* klass = stream()->get_klass(will_link);
2351   bool never_null = stream()->is_klass_never_null();
2352   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2353   CheckCast* c = new CheckCast(klass, apop(), state_before, never_null);
2354   apush(append_split(c));
2355   c->set_direct_compare(direct_compare(klass));
2356 
2357   if (is_profiling()) {
2358     // Note that we'd collect profile data in this method if we wanted it.
2359     compilation()->set_would_profile(true);
2360 
2361     if (profile_checkcasts()) {
2362       c->set_profiled_method(method());
2363       c->set_profiled_bci(bci());
2364       c->set_should_profile(true);
2365     }
2366   }
2367 }
2368 
2369 
2370 void GraphBuilder::instance_of(int klass_index) {
2371   bool will_link;
2372   ciKlass* klass = stream()->get_klass(will_link);
2373   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2374   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2375   ipush(append_split(i));
2376   i->set_direct_compare(direct_compare(klass));
2377 
2378   if (is_profiling()) {
2379     // Note that we'd collect profile data in this method if we wanted it.
2380     compilation()->set_would_profile(true);
2381 
2382     if (profile_checkcasts()) {
2383       i->set_profiled_method(method());
2384       i->set_profiled_bci(bci());
2385       i->set_should_profile(true);
2386     }
2387   }
2388 }
2389 
2390 
2391 void GraphBuilder::monitorenter(Value x, int bci) {
2392   bool maybe_valuetype = false;
2393   if (bci == InvocationEntryBci) {
2394     // Called by GraphBuilder::inline_sync_entry.
2395 #ifdef ASSERT
2396     ciType* obj_type = x->declared_type();
2397     assert(obj_type == NULL || !obj_type->is_valuetype(), "valuetypes cannot have synchronized methods");
2398 #endif
2399   } else {
2400     // We are compiling a monitorenter bytecode
2401     if (EnableValhalla) {
2402       ciType* obj_type = x->declared_type();
2403       if (obj_type == NULL || obj_type->is_valuetype() || obj_type->as_klass()->is_java_lang_Object()) {
2404         // If we're (possibly) locking on a valuetype, check for markOopDesc::always_locked_pattern
2405         // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2406         maybe_valuetype = true;
2407       }
2408     }
2409   }
2410 
2411   // save state before locking in case of deoptimization after a NullPointerException
2412   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2413   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_valuetype), bci);
2414   kill_all();
2415 }
2416 
2417 
2418 void GraphBuilder::monitorexit(Value x, int bci) {
2419   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2420   kill_all();
2421 }
2422 
2423 
2424 void GraphBuilder::new_multi_array(int dimensions) {
2425   bool will_link;
2426   ciKlass* klass = stream()->get_klass(will_link);
2427   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2428 
2429   Values* dims = new Values(dimensions, dimensions, NULL);
2430   // fill in all dimensions
2431   int i = dimensions;
2432   while (i-- > 0) dims->at_put(i, ipop());
2433   // create array


3049       case Bytecodes::_invokevirtual  : // fall through
3050       case Bytecodes::_invokespecial  : // fall through
3051       case Bytecodes::_invokestatic   : // fall through
3052       case Bytecodes::_invokedynamic  : // fall through
3053       case Bytecodes::_invokeinterface: invoke(code); break;
3054       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
3055       case Bytecodes::_newarray       : new_type_array(); break;
3056       case Bytecodes::_anewarray      : new_object_array(); break;
3057       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
3058       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
3059       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
3060       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
3061       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
3062       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
3063       case Bytecodes::_wide           : ShouldNotReachHere(); break;
3064       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
3065       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
3066       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
3067       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
3068       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;
3069       case Bytecodes::_defaultvalue   : new_value_type_instance(s.get_index_u2()); break;
3070       case Bytecodes::_withfield      : withfield(s.get_index_u2()); break;
3071       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
3072       default                         : ShouldNotReachHere(); break;
3073     }
3074 
3075     if (log != NULL)
3076       log->clear_context(); // skip marker if nothing was printed
3077 
3078     // save current bci to setup Goto at the end
3079     prev_bci = s.cur_bci();
3080 
3081   }
3082   CHECK_BAILOUT_(NULL);
3083   // stop processing of this block (see try_inline_full)
3084   if (_skip_block) {
3085     _skip_block = false;
3086     assert(_last && _last->as_BlockEnd(), "");
3087     return _last->as_BlockEnd();
3088   }
3089   // if there are any, check if last instruction is a BlockEnd instruction
3090   BlockEnd* end = last()->as_BlockEnd();


3336   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3337 
3338   assert(state->caller_state() == NULL, "should be top scope");
3339   state->clear_locals();
3340   Goto* g = new Goto(target, false);
3341   append(g);
3342   _osr_entry->set_end(g);
3343   target->merge(_osr_entry->end()->state());
3344 
3345   scope_data()->set_stream(NULL);
3346 }
3347 
3348 
3349 ValueStack* GraphBuilder::state_at_entry() {
3350   ValueStack* state = new ValueStack(scope(), NULL);
3351 
3352   // Set up locals for receiver
3353   int idx = 0;
3354   if (!method()->is_static()) {
3355     // we should always see the receiver
3356     state->store_local(idx, new Local(method()->holder(), objectType, idx,
3357              /*receiver*/ true, /*never_null*/ method()->holder()->is_value_array_klass()));
3358     idx = 1;
3359   }
3360 
3361   // Set up locals for incoming arguments
3362   ciSignature* sig = method()->signature();
3363   for (int i = 0; i < sig->count(); i++) {
3364     ciType* type = sig->type_at(i);
3365     BasicType basic_type = type->basic_type();
3366     // don't allow T_ARRAY to propagate into locals types
3367     if (basic_type == T_ARRAY || basic_type == T_VALUETYPE) basic_type = T_OBJECT;
3368     ValueType* vt = as_ValueType(basic_type);
3369     state->store_local(idx, new Local(type, vt, idx, false, sig->is_never_null_at(i)));
3370     idx += type->size();
3371   }
3372 
3373   // lock synchronized method
3374   if (method()->is_synchronized()) {
3375     state->lock(NULL);
3376   }
3377 
3378   return state;
3379 }
3380 
3381 
3382 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3383   : _scope_data(NULL)
3384   , _compilation(compilation)
3385   , _memory(new MemoryBuffer())
3386   , _inline_bailout_msg(NULL)
3387   , _instruction_count(0)
3388   , _osr_entry(NULL)
3389 {


< prev index next >