1 /*
   2  * Copyright (c) 1999, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"
  33 #include "ci/ciKlass.hpp"
  34 #include "ci/ciMemberName.hpp"
  35 #include "ci/ciUtilities.inline.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "interpreter/bytecode.hpp"
  38 #include "jfr/jfrEvents.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "oops/oop.inline.hpp"
  41 #include "runtime/sharedRuntime.hpp"
  42 #include "runtime/compilationPolicy.hpp"
  43 #include "runtime/vm_version.hpp"
  44 #include "utilities/bitMap.inline.hpp"
  45 
  46 class BlockListBuilder {
  47  private:
  48   Compilation* _compilation;
  49   IRScope*     _scope;
  50 
  51   BlockList    _blocks;                // internal list of all blocks
  52   BlockList*   _bci2block;             // mapping from bci to blocks for GraphBuilder
  53 
  54   // fields used by mark_loops
  55   ResourceBitMap _active;              // for iteration of control flow graph
  56   ResourceBitMap _visited;             // for iteration of control flow graph
  57   intArray       _loop_map;            // caches the information if a block is contained in a loop
  58   int            _next_loop_index;     // next free loop number
  59   int            _next_block_number;   // for reverse postorder numbering of blocks
  60 
  61   // accessors
  62   Compilation*  compilation() const              { return _compilation; }
  63   IRScope*      scope() const                    { return _scope; }
  64   ciMethod*     method() const                   { return scope()->method(); }
  65   XHandlers*    xhandlers() const                { return scope()->xhandlers(); }
  66 
  67   // unified bailout support
  68   void          bailout(const char* msg) const   { compilation()->bailout(msg); }
  69   bool          bailed_out() const               { return compilation()->bailed_out(); }
  70 
  71   // helper functions
  72   BlockBegin* make_block_at(int bci, BlockBegin* predecessor);
  73   void handle_exceptions(BlockBegin* current, int cur_bci);
  74   void handle_jsr(BlockBegin* current, int sr_bci, int next_bci);
  75   void store_one(BlockBegin* current, int local);
  76   void store_two(BlockBegin* current, int local);
  77   void set_entries(int osr_bci);
  78   void set_leaders();
  79 
  80   void make_loop_header(BlockBegin* block);
  81   void mark_loops();
  82   int  mark_loops(BlockBegin* b, bool in_subroutine);
  83 
  84   // debugging
  85 #ifndef PRODUCT
  86   void print();
  87 #endif
  88 
  89  public:
  90   // creation
  91   BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci);
  92 
  93   // accessors for GraphBuilder
  94   BlockList*    bci2block() const                { return _bci2block; }
  95 };
  96 
  97 
  98 // Implementation of BlockListBuilder
  99 
 100 BlockListBuilder::BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci)
 101  : _compilation(compilation)
 102  , _scope(scope)
 103  , _blocks(16)
 104  , _bci2block(new BlockList(scope->method()->code_size(), NULL))
 105  , _active()         // size not known yet
 106  , _visited()        // size not known yet
 107  , _loop_map() // size not known yet
 108  , _next_loop_index(0)
 109  , _next_block_number(0)
 110 {
 111   set_entries(osr_bci);
 112   set_leaders();
 113   CHECK_BAILOUT();
 114 
 115   mark_loops();
 116   NOT_PRODUCT(if (PrintInitialBlockList) print());
 117 
 118 #ifndef PRODUCT
 119   if (PrintCFGToFile) {
 120     stringStream title;
 121     title.print("BlockListBuilder ");
 122     scope->method()->print_name(&title);
 123     CFGPrinter::print_cfg(_bci2block, title.as_string(), false, false);
 124   }
 125 #endif
 126 }
 127 
 128 
 129 void BlockListBuilder::set_entries(int osr_bci) {
 130   // generate start blocks
 131   BlockBegin* std_entry = make_block_at(0, NULL);
 132   if (scope()->caller() == NULL) {
 133     std_entry->set(BlockBegin::std_entry_flag);
 134   }
 135   if (osr_bci != -1) {
 136     BlockBegin* osr_entry = make_block_at(osr_bci, NULL);
 137     osr_entry->set(BlockBegin::osr_entry_flag);
 138   }
 139 
 140   // generate exception entry blocks
 141   XHandlers* list = xhandlers();
 142   const int n = list->length();
 143   for (int i = 0; i < n; i++) {
 144     XHandler* h = list->handler_at(i);
 145     BlockBegin* entry = make_block_at(h->handler_bci(), NULL);
 146     entry->set(BlockBegin::exception_entry_flag);
 147     h->set_entry_block(entry);
 148   }
 149 }
 150 
 151 
 152 BlockBegin* BlockListBuilder::make_block_at(int cur_bci, BlockBegin* predecessor) {
 153   assert(method()->bci_block_start().at(cur_bci), "wrong block starts of MethodLivenessAnalyzer");
 154 
 155   BlockBegin* block = _bci2block->at(cur_bci);
 156   if (block == NULL) {
 157     block = new BlockBegin(cur_bci);
 158     block->init_stores_to_locals(method()->max_locals());
 159     _bci2block->at_put(cur_bci, block);
 160     _blocks.append(block);
 161 
 162     assert(predecessor == NULL || predecessor->bci() < cur_bci, "targets for backward branches must already exist");
 163   }
 164 
 165   if (predecessor != NULL) {
 166     if (block->is_set(BlockBegin::exception_entry_flag)) {
 167       BAILOUT_("Exception handler can be reached by both normal and exceptional control flow", block);
 168     }
 169 
 170     predecessor->add_successor(block);
 171     block->increment_total_preds();
 172   }
 173 
 174   return block;
 175 }
 176 
 177 
 178 inline void BlockListBuilder::store_one(BlockBegin* current, int local) {
 179   current->stores_to_locals().set_bit(local);
 180 }
 181 inline void BlockListBuilder::store_two(BlockBegin* current, int local) {
 182   store_one(current, local);
 183   store_one(current, local + 1);
 184 }
 185 
 186 
 187 void BlockListBuilder::handle_exceptions(BlockBegin* current, int cur_bci) {
 188   // Draws edges from a block to its exception handlers
 189   XHandlers* list = xhandlers();
 190   const int n = list->length();
 191 
 192   for (int i = 0; i < n; i++) {
 193     XHandler* h = list->handler_at(i);
 194 
 195     if (h->covers(cur_bci)) {
 196       BlockBegin* entry = h->entry_block();
 197       assert(entry != NULL && entry == _bci2block->at(h->handler_bci()), "entry must be set");
 198       assert(entry->is_set(BlockBegin::exception_entry_flag), "flag must be set");
 199 
 200       // add each exception handler only once
 201       if (!current->is_successor(entry)) {
 202         current->add_successor(entry);
 203         entry->increment_total_preds();
 204       }
 205 
 206       // stop when reaching catchall
 207       if (h->catch_type() == 0) break;
 208     }
 209   }
 210 }
 211 
 212 void BlockListBuilder::handle_jsr(BlockBegin* current, int sr_bci, int next_bci) {
 213   // start a new block after jsr-bytecode and link this block into cfg
 214   make_block_at(next_bci, current);
 215 
 216   // start a new block at the subroutine entry at mark it with special flag
 217   BlockBegin* sr_block = make_block_at(sr_bci, current);
 218   if (!sr_block->is_set(BlockBegin::subroutine_entry_flag)) {
 219     sr_block->set(BlockBegin::subroutine_entry_flag);
 220   }
 221 }
 222 
 223 
 224 void BlockListBuilder::set_leaders() {
 225   bool has_xhandlers = xhandlers()->has_handlers();
 226   BlockBegin* current = NULL;
 227 
 228   // The information which bci starts a new block simplifies the analysis
 229   // Without it, backward branches could jump to a bci where no block was created
 230   // during bytecode iteration. This would require the creation of a new block at the
 231   // branch target and a modification of the successor lists.
 232   const BitMap& bci_block_start = method()->bci_block_start();
 233 
 234   ciBytecodeStream s(method());
 235   while (s.next() != ciBytecodeStream::EOBC()) {
 236     int cur_bci = s.cur_bci();
 237 
 238     if (bci_block_start.at(cur_bci)) {
 239       current = make_block_at(cur_bci, current);
 240     }
 241     assert(current != NULL, "must have current block");
 242 
 243     if (has_xhandlers && GraphBuilder::can_trap(method(), s.cur_bc())) {
 244       handle_exceptions(current, cur_bci);
 245     }
 246 
 247     switch (s.cur_bc()) {
 248       // track stores to local variables for selective creation of phi functions
 249       case Bytecodes::_iinc:     store_one(current, s.get_index()); break;
 250       case Bytecodes::_istore:   store_one(current, s.get_index()); break;
 251       case Bytecodes::_lstore:   store_two(current, s.get_index()); break;
 252       case Bytecodes::_fstore:   store_one(current, s.get_index()); break;
 253       case Bytecodes::_dstore:   store_two(current, s.get_index()); break;
 254       case Bytecodes::_astore:   store_one(current, s.get_index()); break;
 255       case Bytecodes::_istore_0: store_one(current, 0); break;
 256       case Bytecodes::_istore_1: store_one(current, 1); break;
 257       case Bytecodes::_istore_2: store_one(current, 2); break;
 258       case Bytecodes::_istore_3: store_one(current, 3); break;
 259       case Bytecodes::_lstore_0: store_two(current, 0); break;
 260       case Bytecodes::_lstore_1: store_two(current, 1); break;
 261       case Bytecodes::_lstore_2: store_two(current, 2); break;
 262       case Bytecodes::_lstore_3: store_two(current, 3); break;
 263       case Bytecodes::_fstore_0: store_one(current, 0); break;
 264       case Bytecodes::_fstore_1: store_one(current, 1); break;
 265       case Bytecodes::_fstore_2: store_one(current, 2); break;
 266       case Bytecodes::_fstore_3: store_one(current, 3); break;
 267       case Bytecodes::_dstore_0: store_two(current, 0); break;
 268       case Bytecodes::_dstore_1: store_two(current, 1); break;
 269       case Bytecodes::_dstore_2: store_two(current, 2); break;
 270       case Bytecodes::_dstore_3: store_two(current, 3); break;
 271       case Bytecodes::_astore_0: store_one(current, 0); break;
 272       case Bytecodes::_astore_1: store_one(current, 1); break;
 273       case Bytecodes::_astore_2: store_one(current, 2); break;
 274       case Bytecodes::_astore_3: store_one(current, 3); break;
 275 
 276       // track bytecodes that affect the control flow
 277       case Bytecodes::_athrow:  // fall through
 278       case Bytecodes::_ret:     // fall through
 279       case Bytecodes::_ireturn: // fall through
 280       case Bytecodes::_lreturn: // fall through
 281       case Bytecodes::_freturn: // fall through
 282       case Bytecodes::_dreturn: // fall through
 283       case Bytecodes::_areturn: // fall through
 284       case Bytecodes::_return:
 285         current = NULL;
 286         break;
 287 
 288       case Bytecodes::_ifeq:      // fall through
 289       case Bytecodes::_ifne:      // fall through
 290       case Bytecodes::_iflt:      // fall through
 291       case Bytecodes::_ifge:      // fall through
 292       case Bytecodes::_ifgt:      // fall through
 293       case Bytecodes::_ifle:      // fall through
 294       case Bytecodes::_if_icmpeq: // fall through
 295       case Bytecodes::_if_icmpne: // fall through
 296       case Bytecodes::_if_icmplt: // fall through
 297       case Bytecodes::_if_icmpge: // fall through
 298       case Bytecodes::_if_icmpgt: // fall through
 299       case Bytecodes::_if_icmple: // fall through
 300       case Bytecodes::_if_acmpeq: // fall through
 301       case Bytecodes::_if_acmpne: // fall through
 302       case Bytecodes::_ifnull:    // fall through
 303       case Bytecodes::_ifnonnull:
 304         make_block_at(s.next_bci(), current);
 305         make_block_at(s.get_dest(), current);
 306         current = NULL;
 307         break;
 308 
 309       case Bytecodes::_goto:
 310         make_block_at(s.get_dest(), current);
 311         current = NULL;
 312         break;
 313 
 314       case Bytecodes::_goto_w:
 315         make_block_at(s.get_far_dest(), current);
 316         current = NULL;
 317         break;
 318 
 319       case Bytecodes::_jsr:
 320         handle_jsr(current, s.get_dest(), s.next_bci());
 321         current = NULL;
 322         break;
 323 
 324       case Bytecodes::_jsr_w:
 325         handle_jsr(current, s.get_far_dest(), s.next_bci());
 326         current = NULL;
 327         break;
 328 
 329       case Bytecodes::_tableswitch: {
 330         // set block for each case
 331         Bytecode_tableswitch sw(&s);
 332         int l = sw.length();
 333         for (int i = 0; i < l; i++) {
 334           make_block_at(cur_bci + sw.dest_offset_at(i), current);
 335         }
 336         make_block_at(cur_bci + sw.default_offset(), current);
 337         current = NULL;
 338         break;
 339       }
 340 
 341       case Bytecodes::_lookupswitch: {
 342         // set block for each case
 343         Bytecode_lookupswitch sw(&s);
 344         int l = sw.number_of_pairs();
 345         for (int i = 0; i < l; i++) {
 346           make_block_at(cur_bci + sw.pair_at(i).offset(), current);
 347         }
 348         make_block_at(cur_bci + sw.default_offset(), current);
 349         current = NULL;
 350         break;
 351       }
 352 
 353       default:
 354         break;
 355     }
 356   }
 357 }
 358 
 359 
 360 void BlockListBuilder::mark_loops() {
 361   ResourceMark rm;
 362 
 363   _active.initialize(BlockBegin::number_of_blocks());
 364   _visited.initialize(BlockBegin::number_of_blocks());
 365   _loop_map = intArray(BlockBegin::number_of_blocks(), BlockBegin::number_of_blocks(), 0);
 366   _next_loop_index = 0;
 367   _next_block_number = _blocks.length();
 368 
 369   // recursively iterate the control flow graph
 370   mark_loops(_bci2block->at(0), false);
 371   assert(_next_block_number >= 0, "invalid block numbers");
 372 
 373   // Remove dangling Resource pointers before the ResourceMark goes out-of-scope.
 374   _active.resize(0);
 375   _visited.resize(0);
 376 }
 377 
 378 void BlockListBuilder::make_loop_header(BlockBegin* block) {
 379   if (block->is_set(BlockBegin::exception_entry_flag)) {
 380     // exception edges may look like loops but don't mark them as such
 381     // since it screws up block ordering.
 382     return;
 383   }
 384   if (!block->is_set(BlockBegin::parser_loop_header_flag)) {
 385     block->set(BlockBegin::parser_loop_header_flag);
 386 
 387     assert(_loop_map.at(block->block_id()) == 0, "must not be set yet");
 388     assert(0 <= _next_loop_index && _next_loop_index < BitsPerInt, "_next_loop_index is used as a bit-index in integer");
 389     _loop_map.at_put(block->block_id(), 1 << _next_loop_index);
 390     if (_next_loop_index < 31) _next_loop_index++;
 391   } else {
 392     // block already marked as loop header
 393     assert(is_power_of_2((unsigned int)_loop_map.at(block->block_id())), "exactly one bit must be set");
 394   }
 395 }
 396 
 397 int BlockListBuilder::mark_loops(BlockBegin* block, bool in_subroutine) {
 398   int block_id = block->block_id();
 399 
 400   if (_visited.at(block_id)) {
 401     if (_active.at(block_id)) {
 402       // reached block via backward branch
 403       make_loop_header(block);
 404     }
 405     // return cached loop information for this block
 406     return _loop_map.at(block_id);
 407   }
 408 
 409   if (block->is_set(BlockBegin::subroutine_entry_flag)) {
 410     in_subroutine = true;
 411   }
 412 
 413   // set active and visited bits before successors are processed
 414   _visited.set_bit(block_id);
 415   _active.set_bit(block_id);
 416 
 417   intptr_t loop_state = 0;
 418   for (int i = block->number_of_sux() - 1; i >= 0; i--) {
 419     // recursively process all successors
 420     loop_state |= mark_loops(block->sux_at(i), in_subroutine);
 421   }
 422 
 423   // clear active-bit after all successors are processed
 424   _active.clear_bit(block_id);
 425 
 426   // reverse-post-order numbering of all blocks
 427   block->set_depth_first_number(_next_block_number);
 428   _next_block_number--;
 429 
 430   if (loop_state != 0 || in_subroutine ) {
 431     // block is contained at least in one loop, so phi functions are necessary
 432     // phi functions are also necessary for all locals stored in a subroutine
 433     scope()->requires_phi_function().set_union(block->stores_to_locals());
 434   }
 435 
 436   if (block->is_set(BlockBegin::parser_loop_header_flag)) {
 437     int header_loop_state = _loop_map.at(block_id);
 438     assert(is_power_of_2((unsigned)header_loop_state), "exactly one bit must be set");
 439 
 440     // If the highest bit is set (i.e. when integer value is negative), the method
 441     // has 32 or more loops. This bit is never cleared because it is used for multiple loops
 442     if (header_loop_state >= 0) {
 443       clear_bits(loop_state, header_loop_state);
 444     }
 445   }
 446 
 447   // cache and return loop information for this block
 448   _loop_map.at_put(block_id, loop_state);
 449   return loop_state;
 450 }
 451 
 452 
 453 #ifndef PRODUCT
 454 
 455 int compare_depth_first(BlockBegin** a, BlockBegin** b) {
 456   return (*a)->depth_first_number() - (*b)->depth_first_number();
 457 }
 458 
 459 void BlockListBuilder::print() {
 460   tty->print("----- initial block list of BlockListBuilder for method ");
 461   method()->print_short_name();
 462   tty->cr();
 463 
 464   // better readability if blocks are sorted in processing order
 465   _blocks.sort(compare_depth_first);
 466 
 467   for (int i = 0; i < _blocks.length(); i++) {
 468     BlockBegin* cur = _blocks.at(i);
 469     tty->print("%4d: B%-4d bci: %-4d  preds: %-4d ", cur->depth_first_number(), cur->block_id(), cur->bci(), cur->total_preds());
 470 
 471     tty->print(cur->is_set(BlockBegin::std_entry_flag)               ? " std" : "    ");
 472     tty->print(cur->is_set(BlockBegin::osr_entry_flag)               ? " osr" : "    ");
 473     tty->print(cur->is_set(BlockBegin::exception_entry_flag)         ? " ex" : "   ");
 474     tty->print(cur->is_set(BlockBegin::subroutine_entry_flag)        ? " sr" : "   ");
 475     tty->print(cur->is_set(BlockBegin::parser_loop_header_flag)      ? " lh" : "   ");
 476 
 477     if (cur->number_of_sux() > 0) {
 478       tty->print("    sux: ");
 479       for (int j = 0; j < cur->number_of_sux(); j++) {
 480         BlockBegin* sux = cur->sux_at(j);
 481         tty->print("B%d ", sux->block_id());
 482       }
 483     }
 484     tty->cr();
 485   }
 486 }
 487 
 488 #endif
 489 
 490 
 491 // A simple growable array of Values indexed by ciFields
 492 class FieldBuffer: public CompilationResourceObj {
 493  private:
 494   GrowableArray<Value> _values;
 495 
 496  public:
 497   FieldBuffer() {}
 498 
 499   void kill() {
 500     _values.trunc_to(0);
 501   }
 502 
 503   Value at(ciField* field) {
 504     assert(field->holder()->is_loaded(), "must be a loaded field");
 505     int offset = field->offset();
 506     if (offset < _values.length()) {
 507       return _values.at(offset);
 508     } else {
 509       return NULL;
 510     }
 511   }
 512 
 513   void at_put(ciField* field, Value value) {
 514     assert(field->holder()->is_loaded(), "must be a loaded field");
 515     int offset = field->offset();
 516     _values.at_put_grow(offset, value, NULL);
 517   }
 518 
 519 };
 520 
 521 
 522 // MemoryBuffer is fairly simple model of the current state of memory.
 523 // It partitions memory into several pieces.  The first piece is
 524 // generic memory where little is known about the owner of the memory.
 525 // This is conceptually represented by the tuple <O, F, V> which says
 526 // that the field F of object O has value V.  This is flattened so
 527 // that F is represented by the offset of the field and the parallel
 528 // arrays _objects and _values are used for O and V.  Loads of O.F can
 529 // simply use V.  Newly allocated objects are kept in a separate list
 530 // along with a parallel array for each object which represents the
 531 // current value of its fields.  Stores of the default value to fields
 532 // which have never been stored to before are eliminated since they
 533 // are redundant.  Once newly allocated objects are stored into
 534 // another object or they are passed out of the current compile they
 535 // are treated like generic memory.
 536 
 537 class MemoryBuffer: public CompilationResourceObj {
 538  private:
 539   FieldBuffer                 _values;
 540   GrowableArray<Value>        _objects;
 541   GrowableArray<Value>        _newobjects;
 542   GrowableArray<FieldBuffer*> _fields;
 543 
 544  public:
 545   MemoryBuffer() {}
 546 
 547   StoreField* store(StoreField* st) {
 548     if (!EliminateFieldAccess) {
 549       return st;
 550     }
 551 
 552     Value object = st->obj();
 553     Value value = st->value();
 554     ciField* field = st->field();
 555     if (field->holder()->is_loaded()) {
 556       int offset = field->offset();
 557       int index = _newobjects.find(object);
 558       if (index != -1) {
 559         // newly allocated object with no other stores performed on this field
 560         FieldBuffer* buf = _fields.at(index);
 561         if (buf->at(field) == NULL && is_default_value(value)) {
 562 #ifndef PRODUCT
 563           if (PrintIRDuringConstruction && Verbose) {
 564             tty->print_cr("Eliminated store for object %d:", index);
 565             st->print_line();
 566           }
 567 #endif
 568           return NULL;
 569         } else {
 570           buf->at_put(field, value);
 571         }
 572       } else {
 573         _objects.at_put_grow(offset, object, NULL);
 574         _values.at_put(field, value);
 575       }
 576 
 577       store_value(value);
 578     } else {
 579       // if we held onto field names we could alias based on names but
 580       // we don't know what's being stored to so kill it all.
 581       kill();
 582     }
 583     return st;
 584   }
 585 
 586 
 587   // return true if this value correspond to the default value of a field.
 588   bool is_default_value(Value value) {
 589     Constant* con = value->as_Constant();
 590     if (con) {
 591       switch (con->type()->tag()) {
 592         case intTag:    return con->type()->as_IntConstant()->value() == 0;
 593         case longTag:   return con->type()->as_LongConstant()->value() == 0;
 594         case floatTag:  return jint_cast(con->type()->as_FloatConstant()->value()) == 0;
 595         case doubleTag: return jlong_cast(con->type()->as_DoubleConstant()->value()) == jlong_cast(0);
 596         case objectTag: return con->type() == objectNull;
 597         default:  ShouldNotReachHere();
 598       }
 599     }
 600     return false;
 601   }
 602 
 603 
 604   // return either the actual value of a load or the load itself
 605   Value load(LoadField* load) {
 606     if (!EliminateFieldAccess) {
 607       return load;
 608     }
 609 
 610     if (RoundFPResults && UseSSE < 2 && load->type()->is_float_kind()) {
 611       // can't skip load since value might get rounded as a side effect
 612       return load;
 613     }
 614 
 615     ciField* field = load->field();
 616     Value object   = load->obj();
 617     if (field->holder()->is_loaded() && !field->is_volatile()) {
 618       int offset = field->offset();
 619       Value result = NULL;
 620       int index = _newobjects.find(object);
 621       if (index != -1) {
 622         result = _fields.at(index)->at(field);
 623       } else if (_objects.at_grow(offset, NULL) == object) {
 624         result = _values.at(field);
 625       }
 626       if (result != NULL) {
 627 #ifndef PRODUCT
 628         if (PrintIRDuringConstruction && Verbose) {
 629           tty->print_cr("Eliminated load: ");
 630           load->print_line();
 631         }
 632 #endif
 633         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 634         return result;
 635       }
 636     }
 637     return load;
 638   }
 639 
 640   // Record this newly allocated object
 641   void new_instance(NewInstance* object) {
 642     int index = _newobjects.length();
 643     _newobjects.append(object);
 644     if (_fields.at_grow(index, NULL) == NULL) {
 645       _fields.at_put(index, new FieldBuffer());
 646     } else {
 647       _fields.at(index)->kill();
 648     }
 649   }
 650 
 651   void store_value(Value value) {
 652     int index = _newobjects.find(value);
 653     if (index != -1) {
 654       // stored a newly allocated object into another object.
 655       // Assume we've lost track of it as separate slice of memory.
 656       // We could do better by keeping track of whether individual
 657       // fields could alias each other.
 658       _newobjects.remove_at(index);
 659       // pull out the field info and store it at the end up the list
 660       // of field info list to be reused later.
 661       _fields.append(_fields.at(index));
 662       _fields.remove_at(index);
 663     }
 664   }
 665 
 666   void kill() {
 667     _newobjects.trunc_to(0);
 668     _objects.trunc_to(0);
 669     _values.kill();
 670   }
 671 };
 672 
 673 
 674 // Implementation of GraphBuilder's ScopeData
 675 
 676 GraphBuilder::ScopeData::ScopeData(ScopeData* parent)
 677   : _parent(parent)
 678   , _bci2block(NULL)
 679   , _scope(NULL)
 680   , _has_handler(false)
 681   , _stream(NULL)
 682   , _work_list(NULL)
 683   , _caller_stack_size(-1)
 684   , _continuation(NULL)
 685   , _parsing_jsr(false)
 686   , _jsr_xhandlers(NULL)
 687   , _num_returns(0)
 688   , _cleanup_block(NULL)
 689   , _cleanup_return_prev(NULL)
 690   , _cleanup_state(NULL)
 691   , _ignore_return(false)
 692 {
 693   if (parent != NULL) {
 694     _max_inline_size = (intx) ((float) NestedInliningSizeRatio * (float) parent->max_inline_size() / 100.0f);
 695   } else {
 696     _max_inline_size = MaxInlineSize;
 697   }
 698   if (_max_inline_size < MaxTrivialSize) {
 699     _max_inline_size = MaxTrivialSize;
 700   }
 701 }
 702 
 703 
 704 void GraphBuilder::kill_all() {
 705   if (UseLocalValueNumbering) {
 706     vmap()->kill_all();
 707   }
 708   _memory->kill();
 709 }
 710 
 711 
 712 BlockBegin* GraphBuilder::ScopeData::block_at(int bci) {
 713   if (parsing_jsr()) {
 714     // It is necessary to clone all blocks associated with a
 715     // subroutine, including those for exception handlers in the scope
 716     // of the method containing the jsr (because those exception
 717     // handlers may contain ret instructions in some cases).
 718     BlockBegin* block = bci2block()->at(bci);
 719     if (block != NULL && block == parent()->bci2block()->at(bci)) {
 720       BlockBegin* new_block = new BlockBegin(block->bci());
 721       if (PrintInitialBlockList) {
 722         tty->print_cr("CFG: cloned block %d (bci %d) as block %d for jsr",
 723                       block->block_id(), block->bci(), new_block->block_id());
 724       }
 725       // copy data from cloned blocked
 726       new_block->set_depth_first_number(block->depth_first_number());
 727       if (block->is_set(BlockBegin::parser_loop_header_flag)) new_block->set(BlockBegin::parser_loop_header_flag);
 728       // Preserve certain flags for assertion checking
 729       if (block->is_set(BlockBegin::subroutine_entry_flag)) new_block->set(BlockBegin::subroutine_entry_flag);
 730       if (block->is_set(BlockBegin::exception_entry_flag))  new_block->set(BlockBegin::exception_entry_flag);
 731 
 732       // copy was_visited_flag to allow early detection of bailouts
 733       // if a block that is used in a jsr has already been visited before,
 734       // it is shared between the normal control flow and a subroutine
 735       // BlockBegin::try_merge returns false when the flag is set, this leads
 736       // to a compilation bailout
 737       if (block->is_set(BlockBegin::was_visited_flag))  new_block->set(BlockBegin::was_visited_flag);
 738 
 739       bci2block()->at_put(bci, new_block);
 740       block = new_block;
 741     }
 742     return block;
 743   } else {
 744     return bci2block()->at(bci);
 745   }
 746 }
 747 
 748 
 749 XHandlers* GraphBuilder::ScopeData::xhandlers() const {
 750   if (_jsr_xhandlers == NULL) {
 751     assert(!parsing_jsr(), "");
 752     return scope()->xhandlers();
 753   }
 754   assert(parsing_jsr(), "");
 755   return _jsr_xhandlers;
 756 }
 757 
 758 
 759 void GraphBuilder::ScopeData::set_scope(IRScope* scope) {
 760   _scope = scope;
 761   bool parent_has_handler = false;
 762   if (parent() != NULL) {
 763     parent_has_handler = parent()->has_handler();
 764   }
 765   _has_handler = parent_has_handler || scope->xhandlers()->has_handlers();
 766 }
 767 
 768 
 769 void GraphBuilder::ScopeData::set_inline_cleanup_info(BlockBegin* block,
 770                                                       Instruction* return_prev,
 771                                                       ValueStack* return_state) {
 772   _cleanup_block       = block;
 773   _cleanup_return_prev = return_prev;
 774   _cleanup_state       = return_state;
 775 }
 776 
 777 
 778 void GraphBuilder::ScopeData::add_to_work_list(BlockBegin* block) {
 779   if (_work_list == NULL) {
 780     _work_list = new BlockList();
 781   }
 782 
 783   if (!block->is_set(BlockBegin::is_on_work_list_flag)) {
 784     // Do not start parsing the continuation block while in a
 785     // sub-scope
 786     if (parsing_jsr()) {
 787       if (block == jsr_continuation()) {
 788         return;
 789       }
 790     } else {
 791       if (block == continuation()) {
 792         return;
 793       }
 794     }
 795     block->set(BlockBegin::is_on_work_list_flag);
 796     _work_list->push(block);
 797 
 798     sort_top_into_worklist(_work_list, block);
 799   }
 800 }
 801 
 802 
 803 void GraphBuilder::sort_top_into_worklist(BlockList* worklist, BlockBegin* top) {
 804   assert(worklist->top() == top, "");
 805   // sort block descending into work list
 806   const int dfn = top->depth_first_number();
 807   assert(dfn != -1, "unknown depth first number");
 808   int i = worklist->length()-2;
 809   while (i >= 0) {
 810     BlockBegin* b = worklist->at(i);
 811     if (b->depth_first_number() < dfn) {
 812       worklist->at_put(i+1, b);
 813     } else {
 814       break;
 815     }
 816     i --;
 817   }
 818   if (i >= -1) worklist->at_put(i + 1, top);
 819 }
 820 
 821 
 822 BlockBegin* GraphBuilder::ScopeData::remove_from_work_list() {
 823   if (is_work_list_empty()) {
 824     return NULL;
 825   }
 826   return _work_list->pop();
 827 }
 828 
 829 
 830 bool GraphBuilder::ScopeData::is_work_list_empty() const {
 831   return (_work_list == NULL || _work_list->length() == 0);
 832 }
 833 
 834 
 835 void GraphBuilder::ScopeData::setup_jsr_xhandlers() {
 836   assert(parsing_jsr(), "");
 837   // clone all the exception handlers from the scope
 838   XHandlers* handlers = new XHandlers(scope()->xhandlers());
 839   const int n = handlers->length();
 840   for (int i = 0; i < n; i++) {
 841     // The XHandlers need to be adjusted to dispatch to the cloned
 842     // handler block instead of the default one but the synthetic
 843     // unlocker needs to be handled specially.  The synthetic unlocker
 844     // should be left alone since there can be only one and all code
 845     // should dispatch to the same one.
 846     XHandler* h = handlers->handler_at(i);
 847     assert(h->handler_bci() != SynchronizationEntryBCI, "must be real");
 848     h->set_entry_block(block_at(h->handler_bci()));
 849   }
 850   _jsr_xhandlers = handlers;
 851 }
 852 
 853 
 854 int GraphBuilder::ScopeData::num_returns() {
 855   if (parsing_jsr()) {
 856     return parent()->num_returns();
 857   }
 858   return _num_returns;
 859 }
 860 
 861 
 862 void GraphBuilder::ScopeData::incr_num_returns() {
 863   if (parsing_jsr()) {
 864     parent()->incr_num_returns();
 865   } else {
 866     ++_num_returns;
 867   }
 868 }
 869 
 870 
 871 // Implementation of GraphBuilder
 872 
 873 #define INLINE_BAILOUT(msg)        { inline_bailout(msg); return false; }
 874 
 875 
 876 void GraphBuilder::load_constant() {
 877   ciConstant con = stream()->get_constant();
 878   if (con.basic_type() == T_ILLEGAL) {
 879     // FIXME: an unresolved Dynamic constant can get here,
 880     // and that should not terminate the whole compilation.
 881     BAILOUT("could not resolve a constant");
 882   } else {
 883     ValueType* t = illegalType;
 884     ValueStack* patch_state = NULL;
 885     switch (con.basic_type()) {
 886       case T_BOOLEAN: t = new IntConstant     (con.as_boolean()); break;
 887       case T_BYTE   : t = new IntConstant     (con.as_byte   ()); break;
 888       case T_CHAR   : t = new IntConstant     (con.as_char   ()); break;
 889       case T_SHORT  : t = new IntConstant     (con.as_short  ()); break;
 890       case T_INT    : t = new IntConstant     (con.as_int    ()); break;
 891       case T_LONG   : t = new LongConstant    (con.as_long   ()); break;
 892       case T_FLOAT  : t = new FloatConstant   (con.as_float  ()); break;
 893       case T_DOUBLE : t = new DoubleConstant  (con.as_double ()); break;
 894       case T_ARRAY  : t = new ArrayConstant   (con.as_object ()->as_array   ()); break;
 895       case T_OBJECT :
 896        {
 897         ciObject* obj = con.as_object();
 898         if (!obj->is_loaded()
 899             || (PatchALot && obj->klass() != ciEnv::current()->String_klass())) {
 900           // A Class, MethodType, MethodHandle, or String.
 901           // Unloaded condy nodes show up as T_ILLEGAL, above.
 902           patch_state = copy_state_before();
 903           t = new ObjectConstant(obj);
 904         } else {
 905           // Might be a Class, MethodType, MethodHandle, or Dynamic constant
 906           // result, which might turn out to be an array.
 907           if (obj->is_null_object())
 908             t = objectNull;
 909           else if (obj->is_array())
 910             t = new ArrayConstant(obj->as_array());
 911           else
 912             t = new InstanceConstant(obj->as_instance());
 913         }
 914         break;
 915        }
 916       default       : ShouldNotReachHere();
 917     }
 918     Value x;
 919     if (patch_state != NULL) {
 920       x = new Constant(t, patch_state);
 921     } else {
 922       x = new Constant(t);
 923     }
 924     push(t, append(x));
 925   }
 926 }
 927 
 928 
 929 void GraphBuilder::load_local(ValueType* type, int index) {
 930   Value x = state()->local_at(index);
 931   assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
 932   push(type, x);
 933 }
 934 
 935 
 936 void GraphBuilder::store_local(ValueType* type, int index) {
 937   Value x = pop(type);
 938   store_local(state(), x, index);
 939 }
 940 
 941 
 942 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
 943   if (parsing_jsr()) {
 944     // We need to do additional tracking of the location of the return
 945     // address for jsrs since we don't handle arbitrary jsr/ret
 946     // constructs. Here we are figuring out in which circumstances we
 947     // need to bail out.
 948     if (x->type()->is_address()) {
 949       scope_data()->set_jsr_return_address_local(index);
 950 
 951       // Also check parent jsrs (if any) at this time to see whether
 952       // they are using this local. We don't handle skipping over a
 953       // ret.
 954       for (ScopeData* cur_scope_data = scope_data()->parent();
 955            cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
 956            cur_scope_data = cur_scope_data->parent()) {
 957         if (cur_scope_data->jsr_return_address_local() == index) {
 958           BAILOUT("subroutine overwrites return address from previous subroutine");
 959         }
 960       }
 961     } else if (index == scope_data()->jsr_return_address_local()) {
 962       scope_data()->set_jsr_return_address_local(-1);
 963     }
 964   }
 965 
 966   state->store_local(index, round_fp(x));
 967 }
 968 
 969 
 970 void GraphBuilder::load_indexed(BasicType type) {
 971   // In case of in block code motion in range check elimination
 972   ValueStack* state_before = copy_state_indexed_access();
 973   compilation()->set_has_access_indexed(true);
 974   Value index = ipop();
 975   Value array = apop();
 976   Value length = NULL;
 977   if (CSEArrayLength ||
 978       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
 979       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
 980     length = append(new ArrayLength(array, state_before));
 981   }
 982   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
 983 }
 984 
 985 
 986 void GraphBuilder::store_indexed(BasicType type) {
 987   // In case of in block code motion in range check elimination
 988   ValueStack* state_before = copy_state_indexed_access();
 989   compilation()->set_has_access_indexed(true);
 990   Value value = pop(as_ValueType(type));
 991   Value index = ipop();
 992   Value array = apop();
 993   Value length = NULL;
 994   if (CSEArrayLength ||
 995       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
 996       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
 997     length = append(new ArrayLength(array, state_before));
 998   }
 999   ciType* array_type = array->declared_type();
1000   bool check_boolean = false;
1001   if (array_type != NULL) {
1002     if (array_type->is_loaded() &&
1003       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1004       assert(type == T_BYTE, "boolean store uses bastore");
1005       Value mask = append(new Constant(new IntConstant(1)));
1006       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1007     }
1008   } else if (type == T_BYTE) {
1009     check_boolean = true;
1010   }
1011   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1012   append(result);
1013   _memory->store_value(value);
1014 
1015   if (type == T_OBJECT && is_profiling()) {
1016     // Note that we'd collect profile data in this method if we wanted it.
1017     compilation()->set_would_profile(true);
1018 
1019     if (profile_checkcasts()) {
1020       result->set_profiled_method(method());
1021       result->set_profiled_bci(bci());
1022       result->set_should_profile(true);
1023     }
1024   }
1025 }
1026 
1027 
1028 void GraphBuilder::stack_op(Bytecodes::Code code) {
1029   switch (code) {
1030     case Bytecodes::_pop:
1031       { state()->raw_pop();
1032       }
1033       break;
1034     case Bytecodes::_pop2:
1035       { state()->raw_pop();
1036         state()->raw_pop();
1037       }
1038       break;
1039     case Bytecodes::_dup:
1040       { Value w = state()->raw_pop();
1041         state()->raw_push(w);
1042         state()->raw_push(w);
1043       }
1044       break;
1045     case Bytecodes::_dup_x1:
1046       { Value w1 = state()->raw_pop();
1047         Value w2 = state()->raw_pop();
1048         state()->raw_push(w1);
1049         state()->raw_push(w2);
1050         state()->raw_push(w1);
1051       }
1052       break;
1053     case Bytecodes::_dup_x2:
1054       { Value w1 = state()->raw_pop();
1055         Value w2 = state()->raw_pop();
1056         Value w3 = state()->raw_pop();
1057         state()->raw_push(w1);
1058         state()->raw_push(w3);
1059         state()->raw_push(w2);
1060         state()->raw_push(w1);
1061       }
1062       break;
1063     case Bytecodes::_dup2:
1064       { Value w1 = state()->raw_pop();
1065         Value w2 = state()->raw_pop();
1066         state()->raw_push(w2);
1067         state()->raw_push(w1);
1068         state()->raw_push(w2);
1069         state()->raw_push(w1);
1070       }
1071       break;
1072     case Bytecodes::_dup2_x1:
1073       { Value w1 = state()->raw_pop();
1074         Value w2 = state()->raw_pop();
1075         Value w3 = state()->raw_pop();
1076         state()->raw_push(w2);
1077         state()->raw_push(w1);
1078         state()->raw_push(w3);
1079         state()->raw_push(w2);
1080         state()->raw_push(w1);
1081       }
1082       break;
1083     case Bytecodes::_dup2_x2:
1084       { Value w1 = state()->raw_pop();
1085         Value w2 = state()->raw_pop();
1086         Value w3 = state()->raw_pop();
1087         Value w4 = state()->raw_pop();
1088         state()->raw_push(w2);
1089         state()->raw_push(w1);
1090         state()->raw_push(w4);
1091         state()->raw_push(w3);
1092         state()->raw_push(w2);
1093         state()->raw_push(w1);
1094       }
1095       break;
1096     case Bytecodes::_swap:
1097       { Value w1 = state()->raw_pop();
1098         Value w2 = state()->raw_pop();
1099         state()->raw_push(w1);
1100         state()->raw_push(w2);
1101       }
1102       break;
1103     default:
1104       ShouldNotReachHere();
1105       break;
1106   }
1107 }
1108 
1109 
1110 void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before) {
1111   Value y = pop(type);
1112   Value x = pop(type);
1113   // NOTE: strictfp can be queried from current method since we don't
1114   // inline methods with differing strictfp bits
1115   Value res = new ArithmeticOp(code, x, y, method()->is_strict(), state_before);
1116   // Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level
1117   res = append(res);
1118   if (method()->is_strict()) {
1119     res = round_fp(res);
1120   }
1121   push(type, res);
1122 }
1123 
1124 
1125 void GraphBuilder::negate_op(ValueType* type) {
1126   push(type, append(new NegateOp(pop(type))));
1127 }
1128 
1129 
1130 void GraphBuilder::shift_op(ValueType* type, Bytecodes::Code code) {
1131   Value s = ipop();
1132   Value x = pop(type);
1133   // try to simplify
1134   // Note: This code should go into the canonicalizer as soon as it can
1135   //       can handle canonicalized forms that contain more than one node.
1136   if (CanonicalizeNodes && code == Bytecodes::_iushr) {
1137     // pattern: x >>> s
1138     IntConstant* s1 = s->type()->as_IntConstant();
1139     if (s1 != NULL) {
1140       // pattern: x >>> s1, with s1 constant
1141       ShiftOp* l = x->as_ShiftOp();
1142       if (l != NULL && l->op() == Bytecodes::_ishl) {
1143         // pattern: (a << b) >>> s1
1144         IntConstant* s0 = l->y()->type()->as_IntConstant();
1145         if (s0 != NULL) {
1146           // pattern: (a << s0) >>> s1
1147           const int s0c = s0->value() & 0x1F; // only the low 5 bits are significant for shifts
1148           const int s1c = s1->value() & 0x1F; // only the low 5 bits are significant for shifts
1149           if (s0c == s1c) {
1150             if (s0c == 0) {
1151               // pattern: (a << 0) >>> 0 => simplify to: a
1152               ipush(l->x());
1153             } else {
1154               // pattern: (a << s0c) >>> s0c => simplify to: a & m, with m constant
1155               assert(0 < s0c && s0c < BitsPerInt, "adjust code below to handle corner cases");
1156               const int m = (1 << (BitsPerInt - s0c)) - 1;
1157               Value s = append(new Constant(new IntConstant(m)));
1158               ipush(append(new LogicOp(Bytecodes::_iand, l->x(), s)));
1159             }
1160             return;
1161           }
1162         }
1163       }
1164     }
1165   }
1166   // could not simplify
1167   push(type, append(new ShiftOp(code, x, s)));
1168 }
1169 
1170 
1171 void GraphBuilder::logic_op(ValueType* type, Bytecodes::Code code) {
1172   Value y = pop(type);
1173   Value x = pop(type);
1174   push(type, append(new LogicOp(code, x, y)));
1175 }
1176 
1177 
1178 void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) {
1179   ValueStack* state_before = copy_state_before();
1180   Value y = pop(type);
1181   Value x = pop(type);
1182   ipush(append(new CompareOp(code, x, y, state_before)));
1183 }
1184 
1185 
1186 void GraphBuilder::convert(Bytecodes::Code op, BasicType from, BasicType to) {
1187   push(as_ValueType(to), append(new Convert(op, pop(as_ValueType(from)), as_ValueType(to))));
1188 }
1189 
1190 
1191 void GraphBuilder::increment() {
1192   int index = stream()->get_index();
1193   int delta = stream()->is_wide() ? (signed short)Bytes::get_Java_u2(stream()->cur_bcp() + 4) : (signed char)(stream()->cur_bcp()[2]);
1194   load_local(intType, index);
1195   ipush(append(new Constant(new IntConstant(delta))));
1196   arithmetic_op(intType, Bytecodes::_iadd);
1197   store_local(intType, index);
1198 }
1199 
1200 
1201 void GraphBuilder::_goto(int from_bci, int to_bci) {
1202   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1203   if (is_profiling()) {
1204     compilation()->set_would_profile(true);
1205     x->set_profiled_bci(bci());
1206     if (profile_branches()) {
1207       x->set_profiled_method(method());
1208       x->set_should_profile(true);
1209     }
1210   }
1211   append(x);
1212 }
1213 
1214 
1215 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1216   BlockBegin* tsux = block_at(stream()->get_dest());
1217   BlockBegin* fsux = block_at(stream()->next_bci());
1218   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1219   // In case of loop invariant code motion or predicate insertion
1220   // before the body of a loop the state is needed
1221   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : NULL, is_bb));
1222 
1223   assert(i->as_Goto() == NULL ||
1224          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1225          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1226          "safepoint state of Goto returned by canonicalizer incorrect");
1227 
1228   if (is_profiling()) {
1229     If* if_node = i->as_If();
1230     if (if_node != NULL) {
1231       // Note that we'd collect profile data in this method if we wanted it.
1232       compilation()->set_would_profile(true);
1233       // At level 2 we need the proper bci to count backedges
1234       if_node->set_profiled_bci(bci());
1235       if (profile_branches()) {
1236         // Successors can be rotated by the canonicalizer, check for this case.
1237         if_node->set_profiled_method(method());
1238         if_node->set_should_profile(true);
1239         if (if_node->tsux() == fsux) {
1240           if_node->set_swapped(true);
1241         }
1242       }
1243       return;
1244     }
1245 
1246     // Check if this If was reduced to Goto.
1247     Goto *goto_node = i->as_Goto();
1248     if (goto_node != NULL) {
1249       compilation()->set_would_profile(true);
1250       goto_node->set_profiled_bci(bci());
1251       if (profile_branches()) {
1252         goto_node->set_profiled_method(method());
1253         goto_node->set_should_profile(true);
1254         // Find out which successor is used.
1255         if (goto_node->default_sux() == tsux) {
1256           goto_node->set_direction(Goto::taken);
1257         } else if (goto_node->default_sux() == fsux) {
1258           goto_node->set_direction(Goto::not_taken);
1259         } else {
1260           ShouldNotReachHere();
1261         }
1262       }
1263       return;
1264     }
1265   }
1266 }
1267 
1268 
1269 void GraphBuilder::if_zero(ValueType* type, If::Condition cond) {
1270   Value y = append(new Constant(intZero));
1271   ValueStack* state_before = copy_state_before();
1272   Value x = ipop();
1273   if_node(x, cond, y, state_before);
1274 }
1275 
1276 
1277 void GraphBuilder::if_null(ValueType* type, If::Condition cond) {
1278   Value y = append(new Constant(objectNull));
1279   ValueStack* state_before = copy_state_before();
1280   Value x = apop();
1281   if_node(x, cond, y, state_before);
1282 }
1283 
1284 
1285 void GraphBuilder::if_same(ValueType* type, If::Condition cond) {
1286   ValueStack* state_before = copy_state_before();
1287   Value y = pop(type);
1288   Value x = pop(type);
1289   if_node(x, cond, y, state_before);
1290 }
1291 
1292 
1293 void GraphBuilder::jsr(int dest) {
1294   // We only handle well-formed jsrs (those which are "block-structured").
1295   // If the bytecodes are strange (jumping out of a jsr block) then we
1296   // might end up trying to re-parse a block containing a jsr which
1297   // has already been activated. Watch for this case and bail out.
1298   for (ScopeData* cur_scope_data = scope_data();
1299        cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1300        cur_scope_data = cur_scope_data->parent()) {
1301     if (cur_scope_data->jsr_entry_bci() == dest) {
1302       BAILOUT("too-complicated jsr/ret structure");
1303     }
1304   }
1305 
1306   push(addressType, append(new Constant(new AddressConstant(next_bci()))));
1307   if (!try_inline_jsr(dest)) {
1308     return; // bailed out while parsing and inlining subroutine
1309   }
1310 }
1311 
1312 
1313 void GraphBuilder::ret(int local_index) {
1314   if (!parsing_jsr()) BAILOUT("ret encountered while not parsing subroutine");
1315 
1316   if (local_index != scope_data()->jsr_return_address_local()) {
1317     BAILOUT("can not handle complicated jsr/ret constructs");
1318   }
1319 
1320   // Rets simply become (NON-SAFEPOINT) gotos to the jsr continuation
1321   append(new Goto(scope_data()->jsr_continuation(), false));
1322 }
1323 
1324 
1325 void GraphBuilder::table_switch() {
1326   Bytecode_tableswitch sw(stream());
1327   const int l = sw.length();
1328   if (CanonicalizeNodes && l == 1 && compilation()->env()->comp_level() != CompLevel_full_profile) {
1329     // total of 2 successors => use If instead of switch
1330     // Note: This code should go into the canonicalizer as soon as it can
1331     //       can handle canonicalized forms that contain more than one node.
1332     Value key = append(new Constant(new IntConstant(sw.low_key())));
1333     BlockBegin* tsux = block_at(bci() + sw.dest_offset_at(0));
1334     BlockBegin* fsux = block_at(bci() + sw.default_offset());
1335     bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
1336     // In case of loop invariant code motion or predicate insertion
1337     // before the body of a loop the state is needed
1338     ValueStack* state_before = copy_state_if_bb(is_bb);
1339     append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
1340   } else {
1341     // collect successors
1342     BlockList* sux = new BlockList(l + 1, NULL);
1343     int i;
1344     bool has_bb = false;
1345     for (i = 0; i < l; i++) {
1346       sux->at_put(i, block_at(bci() + sw.dest_offset_at(i)));
1347       if (sw.dest_offset_at(i) < 0) has_bb = true;
1348     }
1349     // add default successor
1350     if (sw.default_offset() < 0) has_bb = true;
1351     sux->at_put(i, block_at(bci() + sw.default_offset()));
1352     // In case of loop invariant code motion or predicate insertion
1353     // before the body of a loop the state is needed
1354     ValueStack* state_before = copy_state_if_bb(has_bb);
1355     Instruction* res = append(new TableSwitch(ipop(), sux, sw.low_key(), state_before, has_bb));
1356 #ifdef ASSERT
1357     if (res->as_Goto()) {
1358       for (i = 0; i < l; i++) {
1359         if (sux->at(i) == res->as_Goto()->sux_at(0)) {
1360           assert(res->as_Goto()->is_safepoint() == sw.dest_offset_at(i) < 0, "safepoint state of Goto returned by canonicalizer incorrect");
1361         }
1362       }
1363     }
1364 #endif
1365   }
1366 }
1367 
1368 
1369 void GraphBuilder::lookup_switch() {
1370   Bytecode_lookupswitch sw(stream());
1371   const int l = sw.number_of_pairs();
1372   if (CanonicalizeNodes && l == 1 && compilation()->env()->comp_level() != CompLevel_full_profile) {
1373     // total of 2 successors => use If instead of switch
1374     // Note: This code should go into the canonicalizer as soon as it can
1375     //       can handle canonicalized forms that contain more than one node.
1376     // simplify to If
1377     LookupswitchPair pair = sw.pair_at(0);
1378     Value key = append(new Constant(new IntConstant(pair.match())));
1379     BlockBegin* tsux = block_at(bci() + pair.offset());
1380     BlockBegin* fsux = block_at(bci() + sw.default_offset());
1381     bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
1382     // In case of loop invariant code motion or predicate insertion
1383     // before the body of a loop the state is needed
1384     ValueStack* state_before = copy_state_if_bb(is_bb);;
1385     append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
1386   } else {
1387     // collect successors & keys
1388     BlockList* sux = new BlockList(l + 1, NULL);
1389     intArray* keys = new intArray(l, l, 0);
1390     int i;
1391     bool has_bb = false;
1392     for (i = 0; i < l; i++) {
1393       LookupswitchPair pair = sw.pair_at(i);
1394       if (pair.offset() < 0) has_bb = true;
1395       sux->at_put(i, block_at(bci() + pair.offset()));
1396       keys->at_put(i, pair.match());
1397     }
1398     // add default successor
1399     if (sw.default_offset() < 0) has_bb = true;
1400     sux->at_put(i, block_at(bci() + sw.default_offset()));
1401     // In case of loop invariant code motion or predicate insertion
1402     // before the body of a loop the state is needed
1403     ValueStack* state_before = copy_state_if_bb(has_bb);
1404     Instruction* res = append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb));
1405 #ifdef ASSERT
1406     if (res->as_Goto()) {
1407       for (i = 0; i < l; i++) {
1408         if (sux->at(i) == res->as_Goto()->sux_at(0)) {
1409           assert(res->as_Goto()->is_safepoint() == sw.pair_at(i).offset() < 0, "safepoint state of Goto returned by canonicalizer incorrect");
1410         }
1411       }
1412     }
1413 #endif
1414   }
1415 }
1416 
1417 void GraphBuilder::call_register_finalizer() {
1418   // If the receiver requires finalization then emit code to perform
1419   // the registration on return.
1420 
1421   // Gather some type information about the receiver
1422   Value receiver = state()->local_at(0);
1423   assert(receiver != NULL, "must have a receiver");
1424   ciType* declared_type = receiver->declared_type();
1425   ciType* exact_type = receiver->exact_type();
1426   if (exact_type == NULL &&
1427       receiver->as_Local() &&
1428       receiver->as_Local()->java_index() == 0) {
1429     ciInstanceKlass* ik = compilation()->method()->holder();
1430     if (ik->is_final()) {
1431       exact_type = ik;
1432     } else if (UseCHA && !(ik->has_subklass() || ik->is_interface())) {
1433       // test class is leaf class
1434       compilation()->dependency_recorder()->assert_leaf_type(ik);
1435       exact_type = ik;
1436     } else {
1437       declared_type = ik;
1438     }
1439   }
1440 
1441   // see if we know statically that registration isn't required
1442   bool needs_check = true;
1443   if (exact_type != NULL) {
1444     needs_check = exact_type->as_instance_klass()->has_finalizer();
1445   } else if (declared_type != NULL) {
1446     ciInstanceKlass* ik = declared_type->as_instance_klass();
1447     if (!Dependencies::has_finalizable_subclass(ik)) {
1448       compilation()->dependency_recorder()->assert_has_no_finalizable_subclasses(ik);
1449       needs_check = false;
1450     }
1451   }
1452 
1453   if (needs_check) {
1454     // Not a trivial method because C2 can do better with inlined check.
1455     compilation()->set_would_profile(true);
1456 
1457     // Perform the registration of finalizable objects.
1458     ValueStack* state_before = copy_state_for_exception();
1459     load_local(objectType, 0);
1460     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1461                                state()->pop_arguments(1),
1462                                true, state_before, true));
1463   }
1464 }
1465 
1466 
1467 void GraphBuilder::method_return(Value x, bool ignore_return) {
1468   if (RegisterFinalizersAtInit &&
1469       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1470     call_register_finalizer();
1471   }
1472 
1473   bool need_mem_bar = false;
1474   if (method()->name() == ciSymbol::object_initializer_name() &&
1475       (scope()->wrote_final() || (AlwaysSafeConstructors && scope()->wrote_fields())
1476                               || (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile())
1477      )){
1478     need_mem_bar = true;
1479   }
1480 
1481   BasicType bt = method()->return_type()->basic_type();
1482   switch (bt) {
1483     case T_BYTE:
1484     {
1485       Value shift = append(new Constant(new IntConstant(24)));
1486       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1487       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1488       break;
1489     }
1490     case T_SHORT:
1491     {
1492       Value shift = append(new Constant(new IntConstant(16)));
1493       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1494       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1495       break;
1496     }
1497     case T_CHAR:
1498     {
1499       Value mask = append(new Constant(new IntConstant(0xFFFF)));
1500       x = append(new LogicOp(Bytecodes::_iand, x, mask));
1501       break;
1502     }
1503     case T_BOOLEAN:
1504     {
1505       Value mask = append(new Constant(new IntConstant(1)));
1506       x = append(new LogicOp(Bytecodes::_iand, x, mask));
1507       break;
1508     }
1509     default:
1510       break;
1511   }
1512 
1513   // Check to see whether we are inlining. If so, Return
1514   // instructions become Gotos to the continuation point.
1515   if (continuation() != NULL) {
1516 
1517     int invoke_bci = state()->caller_state()->bci();
1518 
1519     if (x != NULL  && !ignore_return) {
1520       ciMethod* caller = state()->scope()->caller()->method();
1521       Bytecodes::Code invoke_raw_bc = caller->raw_code_at_bci(invoke_bci);
1522       if (invoke_raw_bc == Bytecodes::_invokehandle || invoke_raw_bc == Bytecodes::_invokedynamic) {
1523         ciType* declared_ret_type = caller->get_declared_signature_at_bci(invoke_bci)->return_type();
1524         if (declared_ret_type->is_klass() && x->exact_type() == NULL &&
1525             x->declared_type() != declared_ret_type && declared_ret_type != compilation()->env()->Object_klass()) {
1526           x = append(new TypeCast(declared_ret_type->as_klass(), x, copy_state_before()));
1527         }
1528       }
1529     }
1530 
1531     assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet");
1532 
1533     if (compilation()->env()->dtrace_method_probes()) {
1534       // Report exit from inline methods
1535       Values* args = new Values(1);
1536       args->push(append(new Constant(new MethodConstant(method()))));
1537       append(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args));
1538     }
1539 
1540     // If the inlined method is synchronized, the monitor must be
1541     // released before we jump to the continuation block.
1542     if (method()->is_synchronized()) {
1543       assert(state()->locks_size() == 1, "receiver must be locked here");
1544       monitorexit(state()->lock_at(0), SynchronizationEntryBCI);
1545     }
1546 
1547     if (need_mem_bar) {
1548       append(new MemBar(lir_membar_storestore));
1549     }
1550 
1551     // State at end of inlined method is the state of the caller
1552     // without the method parameters on stack, including the
1553     // return value, if any, of the inlined method on operand stack.
1554     set_state(state()->caller_state()->copy_for_parsing());
1555     if (x != NULL) {
1556       if (!ignore_return) {
1557         state()->push(x->type(), x);
1558       }
1559       if (profile_return() && x->type()->is_object_kind()) {
1560         ciMethod* caller = state()->scope()->method();
1561         profile_return_type(x, method(), caller, invoke_bci);
1562       }
1563     }
1564     Goto* goto_callee = new Goto(continuation(), false);
1565 
1566     // See whether this is the first return; if so, store off some
1567     // of the state for later examination
1568     if (num_returns() == 0) {
1569       set_inline_cleanup_info();
1570     }
1571 
1572     // The current bci() is in the wrong scope, so use the bci() of
1573     // the continuation point.
1574     append_with_bci(goto_callee, scope_data()->continuation()->bci());
1575     incr_num_returns();
1576     return;
1577   }
1578 
1579   state()->truncate_stack(0);
1580   if (method()->is_synchronized()) {
1581     // perform the unlocking before exiting the method
1582     Value receiver;
1583     if (!method()->is_static()) {
1584       receiver = _initial_state->local_at(0);
1585     } else {
1586       receiver = append(new Constant(new ClassConstant(method()->holder())));
1587     }
1588     append_split(new MonitorExit(receiver, state()->unlock()));
1589   }
1590 
1591   if (need_mem_bar) {
1592       append(new MemBar(lir_membar_storestore));
1593   }
1594 
1595   assert(!ignore_return, "Ignoring return value works only for inlining");
1596   append(new Return(x));
1597 }
1598 
1599 Value GraphBuilder::make_constant(ciConstant field_value, ciField* field) {
1600   if (!field_value.is_valid())  return NULL;
1601 
1602   BasicType field_type = field_value.basic_type();
1603   ValueType* value = as_ValueType(field_value);
1604 
1605   // Attach dimension info to stable arrays.
1606   if (FoldStableValues &&
1607       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1608     ciArray* array = field_value.as_object()->as_array();
1609     jint dimension = field->type()->as_array_klass()->dimension();
1610     value = new StableArrayConstant(array, dimension);
1611   }
1612 
1613   switch (field_type) {
1614     case T_ARRAY:
1615     case T_OBJECT:
1616       if (field_value.as_object()->should_be_constant()) {
1617         return new Constant(value);
1618       }
1619       return NULL; // Not a constant.
1620     default:
1621       return new Constant(value);
1622   }
1623 }
1624 
1625 void GraphBuilder::access_field(Bytecodes::Code code) {
1626   bool will_link;
1627   ciField* field = stream()->get_field(will_link);
1628   ciInstanceKlass* holder = field->holder();
1629   BasicType field_type = field->type()->basic_type();
1630   ValueType* type = as_ValueType(field_type);
1631   // call will_link again to determine if the field is valid.
1632   const bool needs_patching = !holder->is_loaded() ||
1633                               !field->will_link(method(), code) ||
1634                               PatchALot;
1635 
1636   ValueStack* state_before = NULL;
1637   if (!holder->is_initialized() || needs_patching) {
1638     // save state before instruction for debug info when
1639     // deoptimization happens during patching
1640     state_before = copy_state_before();
1641   }
1642 
1643   Value obj = NULL;
1644   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1645     if (state_before != NULL) {
1646       // build a patching constant
1647       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1648     } else {
1649       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1650     }
1651   }
1652 
1653   if (field->is_final() && (code == Bytecodes::_putfield)) {
1654     scope()->set_wrote_final();
1655   }
1656 
1657   if (code == Bytecodes::_putfield) {
1658     scope()->set_wrote_fields();
1659     if (field->is_volatile()) {
1660       scope()->set_wrote_volatile();
1661     }
1662   }
1663 
1664   const int offset = !needs_patching ? field->offset() : -1;
1665   switch (code) {
1666     case Bytecodes::_getstatic: {
1667       // check for compile-time constants, i.e., initialized static final fields
1668       Value constant = NULL;
1669       if (field->is_static_constant() && !PatchALot) {
1670         ciConstant field_value = field->constant_value();
1671         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1672                "stable static w/ default value shouldn't be a constant");
1673         constant = make_constant(field_value, field);
1674       }
1675       if (constant != NULL) {
1676         push(type, append(constant));
1677       } else {
1678         if (state_before == NULL) {
1679           state_before = copy_state_for_exception();
1680         }
1681         push(type, append(new LoadField(append(obj), offset, field, true,
1682                                         state_before, needs_patching)));
1683       }
1684       break;
1685     }
1686     case Bytecodes::_putstatic: {
1687       Value val = pop(type);
1688       if (state_before == NULL) {
1689         state_before = copy_state_for_exception();
1690       }
1691       if (field->type()->basic_type() == T_BOOLEAN) {
1692         Value mask = append(new Constant(new IntConstant(1)));
1693         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1694       }
1695       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1696       break;
1697     }
1698     case Bytecodes::_getfield: {
1699       // Check for compile-time constants, i.e., trusted final non-static fields.
1700       Value constant = NULL;
1701       obj = apop();
1702       ObjectType* obj_type = obj->type()->as_ObjectType();
1703       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1704         ciObject* const_oop = obj_type->constant_value();
1705         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1706           ciConstant field_value = field->constant_value_of(const_oop);
1707           if (field_value.is_valid()) {
1708             constant = make_constant(field_value, field);
1709             // For CallSite objects add a dependency for invalidation of the optimization.
1710             if (field->is_call_site_target()) {
1711               ciCallSite* call_site = const_oop->as_call_site();
1712               if (!call_site->is_constant_call_site()) {
1713                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1714                 dependency_recorder()->assert_call_site_target_value(call_site, target);
1715               }
1716             }
1717           }
1718         }
1719       }
1720       if (constant != NULL) {
1721         push(type, append(constant));
1722       } else {
1723         if (state_before == NULL) {
1724           state_before = copy_state_for_exception();
1725         }
1726         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1727         Value replacement = !needs_patching ? _memory->load(load) : load;
1728         if (replacement != load) {
1729           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1730           push(type, replacement);
1731         } else {
1732           push(type, append(load));
1733         }
1734       }
1735       break;
1736     }
1737     case Bytecodes::_putfield: {
1738       Value val = pop(type);
1739       obj = apop();
1740       if (state_before == NULL) {
1741         state_before = copy_state_for_exception();
1742       }
1743       if (field->type()->basic_type() == T_BOOLEAN) {
1744         Value mask = append(new Constant(new IntConstant(1)));
1745         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1746       }
1747       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1748       if (!needs_patching) store = _memory->store(store);
1749       if (store != NULL) {
1750         append(store);
1751       }
1752       break;
1753     }
1754     default:
1755       ShouldNotReachHere();
1756       break;
1757   }
1758 }
1759 
1760 
1761 Dependencies* GraphBuilder::dependency_recorder() const {
1762   assert(DeoptC1, "need debug information");
1763   return compilation()->dependency_recorder();
1764 }
1765 
1766 // How many arguments do we want to profile?
1767 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1768   int n = 0;
1769   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1770   start = has_receiver ? 1 : 0;
1771   if (profile_arguments()) {
1772     ciProfileData* data = method()->method_data()->bci_to_data(bci());
1773     if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1774       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1775     }
1776   }
1777   // If we are inlining then we need to collect arguments to profile parameters for the target
1778   if (profile_parameters() && target != NULL) {
1779     if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {
1780       // The receiver is profiled on method entry so it's included in
1781       // the number of parameters but here we're only interested in
1782       // actual arguments.
1783       n = MAX2(n, target->method_data()->parameters_type_data()->number_of_parameters() - start);
1784     }
1785   }
1786   if (n > 0) {
1787     return new Values(n);
1788   }
1789   return NULL;
1790 }
1791 
1792 void GraphBuilder::check_args_for_profiling(Values* obj_args, int expected) {
1793 #ifdef ASSERT
1794   bool ignored_will_link;
1795   ciSignature* declared_signature = NULL;
1796   ciMethod* real_target = method()->get_method_at_bci(bci(), ignored_will_link, &declared_signature);
1797   assert(expected == obj_args->max_length() || real_target->is_method_handle_intrinsic(), "missed on arg?");
1798 #endif
1799 }
1800 
1801 // Collect arguments that we want to profile in a list
1802 Values* GraphBuilder::collect_args_for_profiling(Values* args, ciMethod* target, bool may_have_receiver) {
1803   int start = 0;
1804   Values* obj_args = args_list_for_profiling(target, start, may_have_receiver);
1805   if (obj_args == NULL) {
1806     return NULL;
1807   }
1808   int s = obj_args->max_length();
1809   // if called through method handle invoke, some arguments may have been popped
1810   for (int i = start, j = 0; j < s && i < args->length(); i++) {
1811     if (args->at(i)->type()->is_object_kind()) {
1812       obj_args->push(args->at(i));
1813       j++;
1814     }
1815   }
1816   check_args_for_profiling(obj_args, s);
1817   return obj_args;
1818 }
1819 
1820 
1821 void GraphBuilder::invoke(Bytecodes::Code code) {
1822   bool will_link;
1823   ciSignature* declared_signature = NULL;
1824   ciMethod*             target = stream()->get_method(will_link, &declared_signature);
1825   ciKlass*              holder = stream()->get_declared_method_holder();
1826   const Bytecodes::Code bc_raw = stream()->cur_bc_raw();
1827   assert(declared_signature != NULL, "cannot be null");
1828   assert(will_link == target->is_loaded(), "");
1829 
1830   ciInstanceKlass* klass = target->holder();
1831   assert(!target->is_loaded() || klass->is_loaded(), "loaded target must imply loaded klass");
1832 
1833   // check if CHA possible: if so, change the code to invoke_special
1834   ciInstanceKlass* calling_klass = method()->holder();
1835   ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
1836   ciInstanceKlass* actual_recv = callee_holder;
1837 
1838   CompileLog* log = compilation()->log();
1839   if (log != NULL)
1840       log->elem("call method='%d' instr='%s'",
1841                 log->identify(target),
1842                 Bytecodes::name(code));
1843 
1844   // invoke-special-super
1845   if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer()) {
1846     ciInstanceKlass* sender_klass =
1847           calling_klass->is_unsafe_anonymous() ? calling_klass->unsafe_anonymous_host() :
1848                                                  calling_klass;
1849     if (sender_klass->is_interface()) {
1850       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1851       Value receiver = state()->stack_at(index);
1852       CheckCast* c = new CheckCast(sender_klass, receiver, copy_state_before());
1853       c->set_invokespecial_receiver_check();
1854       state()->stack_at_put(index, append_split(c));
1855     }
1856   }
1857 
1858   // Some methods are obviously bindable without any type checks so
1859   // convert them directly to an invokespecial or invokestatic.
1860   if (target->is_loaded() && !target->is_abstract() && target->can_be_statically_bound()) {
1861     switch (bc_raw) {
1862     case Bytecodes::_invokevirtual:
1863       code = Bytecodes::_invokespecial;
1864       break;
1865     case Bytecodes::_invokehandle:
1866       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1867       break;
1868     default:
1869       break;
1870     }
1871   } else {
1872     if (bc_raw == Bytecodes::_invokehandle) {
1873       assert(!will_link, "should come here only for unlinked call");
1874       code = Bytecodes::_invokespecial;
1875     }
1876   }
1877 
1878   // Push appendix argument (MethodType, CallSite, etc.), if one.
1879   bool patch_for_appendix = false;
1880   int patching_appendix_arg = 0;
1881   if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
1882     Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
1883     apush(arg);
1884     patch_for_appendix = true;
1885     patching_appendix_arg = (will_link && stream()->has_appendix()) ? 0 : 1;
1886   } else if (stream()->has_appendix()) {
1887     ciObject* appendix = stream()->get_appendix();
1888     Value arg = append(new Constant(new ObjectConstant(appendix)));
1889     apush(arg);
1890   }
1891 
1892   ciMethod* cha_monomorphic_target = NULL;
1893   ciMethod* exact_target = NULL;
1894   Value better_receiver = NULL;
1895   if (UseCHA && DeoptC1 && target->is_loaded() &&
1896       !(// %%% FIXME: Are both of these relevant?
1897         target->is_method_handle_intrinsic() ||
1898         target->is_compiled_lambda_form()) &&
1899       !patch_for_appendix) {
1900     Value receiver = NULL;
1901     ciInstanceKlass* receiver_klass = NULL;
1902     bool type_is_exact = false;
1903     // try to find a precise receiver type
1904     if (will_link && !target->is_static()) {
1905       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1906       receiver = state()->stack_at(index);
1907       ciType* type = receiver->exact_type();
1908       if (type != NULL && type->is_loaded() &&
1909           type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1910         receiver_klass = (ciInstanceKlass*) type;
1911         type_is_exact = true;
1912       }
1913       if (type == NULL) {
1914         type = receiver->declared_type();
1915         if (type != NULL && type->is_loaded() &&
1916             type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1917           receiver_klass = (ciInstanceKlass*) type;
1918           if (receiver_klass->is_leaf_type() && !receiver_klass->is_final()) {
1919             // Insert a dependency on this type since
1920             // find_monomorphic_target may assume it's already done.
1921             dependency_recorder()->assert_leaf_type(receiver_klass);
1922             type_is_exact = true;
1923           }
1924         }
1925       }
1926     }
1927     if (receiver_klass != NULL && type_is_exact &&
1928         receiver_klass->is_loaded() && code != Bytecodes::_invokespecial) {
1929       // If we have the exact receiver type we can bind directly to
1930       // the method to call.
1931       exact_target = target->resolve_invoke(calling_klass, receiver_klass);
1932       if (exact_target != NULL) {
1933         target = exact_target;
1934         code = Bytecodes::_invokespecial;
1935       }
1936     }
1937     if (receiver_klass != NULL &&
1938         receiver_klass->is_subtype_of(actual_recv) &&
1939         actual_recv->is_initialized()) {
1940       actual_recv = receiver_klass;
1941     }
1942 
1943     if ((code == Bytecodes::_invokevirtual && callee_holder->is_initialized()) ||
1944         (code == Bytecodes::_invokeinterface && callee_holder->is_initialized() && !actual_recv->is_interface())) {
1945       // Use CHA on the receiver to select a more precise method.
1946       cha_monomorphic_target = target->find_monomorphic_target(calling_klass, callee_holder, actual_recv);
1947     } else if (code == Bytecodes::_invokeinterface && callee_holder->is_loaded() && receiver != NULL) {
1948       // if there is only one implementor of this interface then we
1949       // may be able bind this invoke directly to the implementing
1950       // klass but we need both a dependence on the single interface
1951       // and on the method we bind to.  Additionally since all we know
1952       // about the receiver type is the it's supposed to implement the
1953       // interface we have to insert a check that it's the class we
1954       // expect.  Interface types are not checked by the verifier so
1955       // they are roughly equivalent to Object.
1956       ciInstanceKlass* singleton = NULL;
1957       if (target->holder()->nof_implementors() == 1) {
1958         singleton = target->holder()->implementor();
1959         assert(singleton != NULL && singleton != target->holder(),
1960                "just checking");
1961 
1962         assert(holder->is_interface(), "invokeinterface to non interface?");
1963         ciInstanceKlass* decl_interface = (ciInstanceKlass*)holder;
1964         // the number of implementors for decl_interface is less or
1965         // equal to the number of implementors for target->holder() so
1966         // if number of implementors of target->holder() == 1 then
1967         // number of implementors for decl_interface is 0 or 1. If
1968         // it's 0 then no class implements decl_interface and there's
1969         // no point in inlining.
1970         if (!holder->is_loaded() || decl_interface->nof_implementors() != 1 || decl_interface->has_nonstatic_concrete_methods()) {
1971           singleton = NULL;
1972         }
1973       }
1974       if (singleton) {
1975         cha_monomorphic_target = target->find_monomorphic_target(calling_klass, target->holder(), singleton);
1976         if (cha_monomorphic_target != NULL) {
1977           // If CHA is able to bind this invoke then update the class
1978           // to match that class, otherwise klass will refer to the
1979           // interface.
1980           klass = cha_monomorphic_target->holder();
1981           actual_recv = target->holder();
1982 
1983           // insert a check it's really the expected class.
1984           CheckCast* c = new CheckCast(klass, receiver, copy_state_for_exception());
1985           c->set_incompatible_class_change_check();
1986           c->set_direct_compare(klass->is_final());
1987           // pass the result of the checkcast so that the compiler has
1988           // more accurate type info in the inlinee
1989           better_receiver = append_split(c);
1990         }
1991       }
1992     }
1993   }
1994 
1995   if (cha_monomorphic_target != NULL) {
1996     if (cha_monomorphic_target->is_abstract()) {
1997       // Do not optimize for abstract methods
1998       cha_monomorphic_target = NULL;
1999     }
2000   }
2001 
2002   if (cha_monomorphic_target != NULL) {
2003     if (!(target->is_final_method())) {
2004       // If we inlined because CHA revealed only a single target method,
2005       // then we are dependent on that target method not getting overridden
2006       // by dynamic class loading.  Be sure to test the "static" receiver
2007       // dest_method here, as opposed to the actual receiver, which may
2008       // falsely lead us to believe that the receiver is final or private.
2009       dependency_recorder()->assert_unique_concrete_method(actual_recv, cha_monomorphic_target);
2010     }
2011     code = Bytecodes::_invokespecial;
2012   }
2013 
2014   // check if we could do inlining
2015   if (!PatchALot && Inline && target->is_loaded() &&
2016       (klass->is_initialized() || (klass->is_interface() && target->holder()->is_initialized()))
2017       && !patch_for_appendix) {
2018     // callee is known => check if we have static binding
2019     if (code == Bytecodes::_invokestatic  ||
2020         code == Bytecodes::_invokespecial ||
2021         (code == Bytecodes::_invokevirtual && target->is_final_method()) ||
2022         code == Bytecodes::_invokedynamic) {
2023       ciMethod* inline_target = (cha_monomorphic_target != NULL) ? cha_monomorphic_target : target;
2024       // static binding => check if callee is ok
2025       bool success = try_inline(inline_target, (cha_monomorphic_target != NULL) || (exact_target != NULL), false, code, better_receiver);
2026 
2027       CHECK_BAILOUT();
2028       clear_inline_bailout();
2029 
2030       if (success) {
2031         // Register dependence if JVMTI has either breakpoint
2032         // setting or hotswapping of methods capabilities since they may
2033         // cause deoptimization.
2034         if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {
2035           dependency_recorder()->assert_evol_method(inline_target);
2036         }
2037         return;
2038       }
2039     } else {
2040       print_inlining(target, "no static binding", /*success*/ false);
2041     }
2042   } else {
2043     print_inlining(target, "not inlineable", /*success*/ false);
2044   }
2045 
2046   // If we attempted an inline which did not succeed because of a
2047   // bailout during construction of the callee graph, the entire
2048   // compilation has to be aborted. This is fairly rare and currently
2049   // seems to only occur for jasm-generated classes which contain
2050   // jsr/ret pairs which are not associated with finally clauses and
2051   // do not have exception handlers in the containing method, and are
2052   // therefore not caught early enough to abort the inlining without
2053   // corrupting the graph. (We currently bail out with a non-empty
2054   // stack at a ret in these situations.)
2055   CHECK_BAILOUT();
2056 
2057   // inlining not successful => standard invoke
2058   ValueType* result_type = as_ValueType(declared_signature->return_type());
2059   ValueStack* state_before = copy_state_exhandling();
2060 
2061   // The bytecode (code) might change in this method so we are checking this very late.
2062   const bool has_receiver =
2063     code == Bytecodes::_invokespecial   ||
2064     code == Bytecodes::_invokevirtual   ||
2065     code == Bytecodes::_invokeinterface;
2066   Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);
2067   Value recv = has_receiver ? apop() : NULL;
2068   int vtable_index = Method::invalid_vtable_index;
2069 
2070 #ifdef SPARC
2071   // Currently only supported on Sparc.
2072   // The UseInlineCaches only controls dispatch to invokevirtuals for
2073   // loaded classes which we weren't able to statically bind.
2074   if (!UseInlineCaches && target->is_loaded() && code == Bytecodes::_invokevirtual
2075       && !target->can_be_statically_bound()) {
2076     // Find a vtable index if one is available
2077     // For arrays, callee_holder is Object. Resolving the call with
2078     // Object would allow an illegal call to finalize() on an
2079     // array. We use holder instead: illegal calls to finalize() won't
2080     // be compiled as vtable calls (IC call resolution will catch the
2081     // illegal call) and the few legal calls on array types won't be
2082     // either.
2083     vtable_index = target->resolve_vtable_index(calling_klass, holder);
2084   }
2085 #endif
2086 
2087   // A null check is required here (when there is a receiver) for any of the following cases
2088   // - invokespecial, always need a null check.
2089   // - invokevirtual, when the target is final and loaded. Calls to final targets will become optimized
2090   //   and require null checking. If the target is loaded a null check is emitted here.
2091   //   If the target isn't loaded the null check must happen after the call resolution. We achieve that
2092   //   by using the target methods unverified entry point (see CompiledIC::compute_monomorphic_entry).
2093   //   (The JVM specification requires that LinkageError must be thrown before a NPE. An unloaded target may
2094   //   potentially fail, and can't have the null check before the resolution.)
2095   // - A call that will be profiled. (But we can't add a null check when the target is unloaded, by the same
2096   //   reason as above, so calls with a receiver to unloaded targets can't be profiled.)
2097   //
2098   // Normal invokevirtual will perform the null check during lookup
2099 
2100   bool need_null_check = (code == Bytecodes::_invokespecial) ||
2101       (target->is_loaded() && (target->is_final_method() || (is_profiling() && profile_calls())));
2102 
2103   if (need_null_check) {
2104     if (recv != NULL) {
2105       null_check(recv);
2106     }
2107 
2108     if (is_profiling()) {
2109       // Note that we'd collect profile data in this method if we wanted it.
2110       compilation()->set_would_profile(true);
2111 
2112       if (profile_calls()) {
2113         assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2114         ciKlass* target_klass = NULL;
2115         if (cha_monomorphic_target != NULL) {
2116           target_klass = cha_monomorphic_target->holder();
2117         } else if (exact_target != NULL) {
2118           target_klass = exact_target->holder();
2119         }
2120         profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2121       }
2122     }
2123   }
2124 
2125   Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
2126   // push result
2127   append_split(result);
2128 
2129   if (result_type != voidType) {
2130     if (method()->is_strict()) {
2131       push(result_type, round_fp(result));
2132     } else {
2133       push(result_type, result);
2134     }
2135   }
2136   if (profile_return() && result_type->is_object_kind()) {
2137     profile_return_type(result, target);
2138   }
2139 }
2140 
2141 
2142 void GraphBuilder::new_instance(int klass_index) {
2143   ValueStack* state_before = copy_state_exhandling();
2144   bool will_link;
2145   ciKlass* klass = stream()->get_klass(will_link);
2146   assert(klass->is_instance_klass(), "must be an instance klass");
2147   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2148   _memory->new_instance(new_instance);
2149   apush(append_split(new_instance));
2150 }
2151 
2152 
2153 void GraphBuilder::new_type_array() {
2154   ValueStack* state_before = copy_state_exhandling();
2155   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2156 }
2157 
2158 
2159 void GraphBuilder::new_object_array() {
2160   bool will_link;
2161   ciKlass* klass = stream()->get_klass(will_link);
2162   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2163   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2164   apush(append_split(n));
2165 }
2166 
2167 
2168 bool GraphBuilder::direct_compare(ciKlass* k) {
2169   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2170     ciInstanceKlass* ik = k->as_instance_klass();
2171     if (ik->is_final()) {
2172       return true;
2173     } else {
2174       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2175         // test class is leaf class
2176         dependency_recorder()->assert_leaf_type(ik);
2177         return true;
2178       }
2179     }
2180   }
2181   return false;
2182 }
2183 
2184 
2185 void GraphBuilder::check_cast(int klass_index) {
2186   bool will_link;
2187   ciKlass* klass = stream()->get_klass(will_link);
2188   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2189   CheckCast* c = new CheckCast(klass, apop(), state_before);
2190   apush(append_split(c));
2191   c->set_direct_compare(direct_compare(klass));
2192 
2193   if (is_profiling()) {
2194     // Note that we'd collect profile data in this method if we wanted it.
2195     compilation()->set_would_profile(true);
2196 
2197     if (profile_checkcasts()) {
2198       c->set_profiled_method(method());
2199       c->set_profiled_bci(bci());
2200       c->set_should_profile(true);
2201     }
2202   }
2203 }
2204 
2205 
2206 void GraphBuilder::instance_of(int klass_index) {
2207   bool will_link;
2208   ciKlass* klass = stream()->get_klass(will_link);
2209   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2210   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2211   ipush(append_split(i));
2212   i->set_direct_compare(direct_compare(klass));
2213 
2214   if (is_profiling()) {
2215     // Note that we'd collect profile data in this method if we wanted it.
2216     compilation()->set_would_profile(true);
2217 
2218     if (profile_checkcasts()) {
2219       i->set_profiled_method(method());
2220       i->set_profiled_bci(bci());
2221       i->set_should_profile(true);
2222     }
2223   }
2224 }
2225 
2226 
2227 void GraphBuilder::monitorenter(Value x, int bci) {
2228   // save state before locking in case of deoptimization after a NullPointerException
2229   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2230   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2231   kill_all();
2232 }
2233 
2234 
2235 void GraphBuilder::monitorexit(Value x, int bci) {
2236   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2237   kill_all();
2238 }
2239 
2240 
2241 void GraphBuilder::new_multi_array(int dimensions) {
2242   bool will_link;
2243   ciKlass* klass = stream()->get_klass(will_link);
2244   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2245 
2246   Values* dims = new Values(dimensions, dimensions, NULL);
2247   // fill in all dimensions
2248   int i = dimensions;
2249   while (i-- > 0) dims->at_put(i, ipop());
2250   // create array
2251   NewArray* n = new NewMultiArray(klass, dims, state_before);
2252   apush(append_split(n));
2253 }
2254 
2255 
2256 void GraphBuilder::throw_op(int bci) {
2257   // We require that the debug info for a Throw be the "state before"
2258   // the Throw (i.e., exception oop is still on TOS)
2259   ValueStack* state_before = copy_state_before_with_bci(bci);
2260   Throw* t = new Throw(apop(), state_before);
2261   // operand stack not needed after a throw
2262   state()->truncate_stack(0);
2263   append_with_bci(t, bci);
2264 }
2265 
2266 
2267 Value GraphBuilder::round_fp(Value fp_value) {
2268   // no rounding needed if SSE2 is used
2269   if (RoundFPResults && UseSSE < 2) {
2270     // Must currently insert rounding node for doubleword values that
2271     // are results of expressions (i.e., not loads from memory or
2272     // constants)
2273     if (fp_value->type()->tag() == doubleTag &&
2274         fp_value->as_Constant() == NULL &&
2275         fp_value->as_Local() == NULL &&       // method parameters need no rounding
2276         fp_value->as_RoundFP() == NULL) {
2277       return append(new RoundFP(fp_value));
2278     }
2279   }
2280   return fp_value;
2281 }
2282 
2283 
2284 Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) {
2285   Canonicalizer canon(compilation(), instr, bci);
2286   Instruction* i1 = canon.canonical();
2287   if (i1->is_linked() || !i1->can_be_linked()) {
2288     // Canonicalizer returned an instruction which was already
2289     // appended so simply return it.
2290     return i1;
2291   }
2292 
2293   if (UseLocalValueNumbering) {
2294     // Lookup the instruction in the ValueMap and add it to the map if
2295     // it's not found.
2296     Instruction* i2 = vmap()->find_insert(i1);
2297     if (i2 != i1) {
2298       // found an entry in the value map, so just return it.
2299       assert(i2->is_linked(), "should already be linked");
2300       return i2;
2301     }
2302     ValueNumberingEffects vne(vmap());
2303     i1->visit(&vne);
2304   }
2305 
2306   // i1 was not eliminated => append it
2307   assert(i1->next() == NULL, "shouldn't already be linked");
2308   _last = _last->set_next(i1, canon.bci());
2309 
2310   if (++_instruction_count >= InstructionCountCutoff && !bailed_out()) {
2311     // set the bailout state but complete normal processing.  We
2312     // might do a little more work before noticing the bailout so we
2313     // want processing to continue normally until it's noticed.
2314     bailout("Method and/or inlining is too large");
2315   }
2316 
2317 #ifndef PRODUCT
2318   if (PrintIRDuringConstruction) {
2319     InstructionPrinter ip;
2320     ip.print_line(i1);
2321     if (Verbose) {
2322       state()->print();
2323     }
2324   }
2325 #endif
2326 
2327   // save state after modification of operand stack for StateSplit instructions
2328   StateSplit* s = i1->as_StateSplit();
2329   if (s != NULL) {
2330     if (EliminateFieldAccess) {
2331       Intrinsic* intrinsic = s->as_Intrinsic();
2332       if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) {
2333         _memory->kill();
2334       }
2335     }
2336     s->set_state(state()->copy(ValueStack::StateAfter, canon.bci()));
2337   }
2338 
2339   // set up exception handlers for this instruction if necessary
2340   if (i1->can_trap()) {
2341     i1->set_exception_handlers(handle_exception(i1));
2342     assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
2343   }
2344   return i1;
2345 }
2346 
2347 
2348 Instruction* GraphBuilder::append(Instruction* instr) {
2349   assert(instr->as_StateSplit() == NULL || instr->as_BlockEnd() != NULL, "wrong append used");
2350   return append_with_bci(instr, bci());
2351 }
2352 
2353 
2354 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2355   return append_with_bci(instr, bci());
2356 }
2357 
2358 
2359 void GraphBuilder::null_check(Value value) {
2360   if (value->as_NewArray() != NULL || value->as_NewInstance() != NULL) {
2361     return;
2362   } else {
2363     Constant* con = value->as_Constant();
2364     if (con) {
2365       ObjectType* c = con->type()->as_ObjectType();
2366       if (c && c->is_loaded()) {
2367         ObjectConstant* oc = c->as_ObjectConstant();
2368         if (!oc || !oc->value()->is_null_object()) {
2369           return;
2370         }
2371       }
2372     }
2373   }
2374   append(new NullCheck(value, copy_state_for_exception()));
2375 }
2376 
2377 
2378 
2379 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2380   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
2381     assert(instruction->exception_state() == NULL
2382            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2383            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2384            "exception_state should be of exception kind");
2385     return new XHandlers();
2386   }
2387 
2388   XHandlers*  exception_handlers = new XHandlers();
2389   ScopeData*  cur_scope_data = scope_data();
2390   ValueStack* cur_state = instruction->state_before();
2391   ValueStack* prev_state = NULL;
2392   int scope_count = 0;
2393 
2394   assert(cur_state != NULL, "state_before must be set");
2395   do {
2396     int cur_bci = cur_state->bci();
2397     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2398     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
2399 
2400     // join with all potential exception handlers
2401     XHandlers* list = cur_scope_data->xhandlers();
2402     const int n = list->length();
2403     for (int i = 0; i < n; i++) {
2404       XHandler* h = list->handler_at(i);
2405       if (h->covers(cur_bci)) {
2406         // h is a potential exception handler => join it
2407         compilation()->set_has_exception_handlers(true);
2408 
2409         BlockBegin* entry = h->entry_block();
2410         if (entry == block()) {
2411           // It's acceptable for an exception handler to cover itself
2412           // but we don't handle that in the parser currently.  It's
2413           // very rare so we bailout instead of trying to handle it.
2414           BAILOUT_("exception handler covers itself", exception_handlers);
2415         }
2416         assert(entry->bci() == h->handler_bci(), "must match");
2417         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2418 
2419         // previously this was a BAILOUT, but this is not necessary
2420         // now because asynchronous exceptions are not handled this way.
2421         assert(entry->state() == NULL || cur_state->total_locks_size() == entry->state()->total_locks_size(), "locks do not match");
2422 
2423         // xhandler start with an empty expression stack
2424         if (cur_state->stack_size() != 0) {
2425           cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
2426         }
2427         if (instruction->exception_state() == NULL) {
2428           instruction->set_exception_state(cur_state);
2429         }
2430 
2431         // Note: Usually this join must work. However, very
2432         // complicated jsr-ret structures where we don't ret from
2433         // the subroutine can cause the objects on the monitor
2434         // stacks to not match because blocks can be parsed twice.
2435         // The only test case we've seen so far which exhibits this
2436         // problem is caught by the infinite recursion test in
2437         // GraphBuilder::jsr() if the join doesn't work.
2438         if (!entry->try_merge(cur_state)) {
2439           BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers);
2440         }
2441 
2442         // add current state for correct handling of phi functions at begin of xhandler
2443         int phi_operand = entry->add_exception_state(cur_state);
2444 
2445         // add entry to the list of xhandlers of this block
2446         _block->add_exception_handler(entry);
2447 
2448         // add back-edge from xhandler entry to this block
2449         if (!entry->is_predecessor(_block)) {
2450           entry->add_predecessor(_block);
2451         }
2452 
2453         // clone XHandler because phi_operand and scope_count can not be shared
2454         XHandler* new_xhandler = new XHandler(h);
2455         new_xhandler->set_phi_operand(phi_operand);
2456         new_xhandler->set_scope_count(scope_count);
2457         exception_handlers->append(new_xhandler);
2458 
2459         // fill in exception handler subgraph lazily
2460         assert(!entry->is_set(BlockBegin::was_visited_flag), "entry must not be visited yet");
2461         cur_scope_data->add_to_work_list(entry);
2462 
2463         // stop when reaching catchall
2464         if (h->catch_type() == 0) {
2465           return exception_handlers;
2466         }
2467       }
2468     }
2469 
2470     if (exception_handlers->length() == 0) {
2471       // This scope and all callees do not handle exceptions, so the local
2472       // variables of this scope are not needed. However, the scope itself is
2473       // required for a correct exception stack trace -> clear out the locals.
2474       if (_compilation->env()->should_retain_local_variables()) {
2475         cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
2476       } else {
2477         cur_state = cur_state->copy(ValueStack::EmptyExceptionState, cur_state->bci());
2478       }
2479       if (prev_state != NULL) {
2480         prev_state->set_caller_state(cur_state);
2481       }
2482       if (instruction->exception_state() == NULL) {
2483         instruction->set_exception_state(cur_state);
2484       }
2485     }
2486 
2487     // Set up iteration for next time.
2488     // If parsing a jsr, do not grab exception handlers from the
2489     // parent scopes for this method (already got them, and they
2490     // needed to be cloned)
2491 
2492     while (cur_scope_data->parsing_jsr()) {
2493       cur_scope_data = cur_scope_data->parent();
2494     }
2495 
2496     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2497     assert(cur_state->locks_size() == 0 || cur_state->locks_size() == 1, "unlocking must be done in a catchall exception handler");
2498 
2499     prev_state = cur_state;
2500     cur_state = cur_state->caller_state();
2501     cur_scope_data = cur_scope_data->parent();
2502     scope_count++;
2503   } while (cur_scope_data != NULL);
2504 
2505   return exception_handlers;
2506 }
2507 
2508 
2509 // Helper class for simplifying Phis.
2510 class PhiSimplifier : public BlockClosure {
2511  private:
2512   bool _has_substitutions;
2513   Value simplify(Value v);
2514 
2515  public:
2516   PhiSimplifier(BlockBegin* start) : _has_substitutions(false) {
2517     start->iterate_preorder(this);
2518     if (_has_substitutions) {
2519       SubstitutionResolver sr(start);
2520     }
2521   }
2522   void block_do(BlockBegin* b);
2523   bool has_substitutions() const { return _has_substitutions; }
2524 };
2525 
2526 
2527 Value PhiSimplifier::simplify(Value v) {
2528   Phi* phi = v->as_Phi();
2529 
2530   if (phi == NULL) {
2531     // no phi function
2532     return v;
2533   } else if (v->has_subst()) {
2534     // already substituted; subst can be phi itself -> simplify
2535     return simplify(v->subst());
2536   } else if (phi->is_set(Phi::cannot_simplify)) {
2537     // already tried to simplify phi before
2538     return phi;
2539   } else if (phi->is_set(Phi::visited)) {
2540     // break cycles in phi functions
2541     return phi;
2542   } else if (phi->type()->is_illegal()) {
2543     // illegal phi functions are ignored anyway
2544     return phi;
2545 
2546   } else {
2547     // mark phi function as processed to break cycles in phi functions
2548     phi->set(Phi::visited);
2549 
2550     // simplify x = [y, x] and x = [y, y] to y
2551     Value subst = NULL;
2552     int opd_count = phi->operand_count();
2553     for (int i = 0; i < opd_count; i++) {
2554       Value opd = phi->operand_at(i);
2555       assert(opd != NULL, "Operand must exist!");
2556 
2557       if (opd->type()->is_illegal()) {
2558         // if one operand is illegal, the entire phi function is illegal
2559         phi->make_illegal();
2560         phi->clear(Phi::visited);
2561         return phi;
2562       }
2563 
2564       Value new_opd = simplify(opd);
2565       assert(new_opd != NULL, "Simplified operand must exist!");
2566 
2567       if (new_opd != phi && new_opd != subst) {
2568         if (subst == NULL) {
2569           subst = new_opd;
2570         } else {
2571           // no simplification possible
2572           phi->set(Phi::cannot_simplify);
2573           phi->clear(Phi::visited);
2574           return phi;
2575         }
2576       }
2577     }
2578 
2579     // sucessfully simplified phi function
2580     assert(subst != NULL, "illegal phi function");
2581     _has_substitutions = true;
2582     phi->clear(Phi::visited);
2583     phi->set_subst(subst);
2584 
2585 #ifndef PRODUCT
2586     if (PrintPhiFunctions) {
2587       tty->print_cr("simplified phi function %c%d to %c%d (Block B%d)", phi->type()->tchar(), phi->id(), subst->type()->tchar(), subst->id(), phi->block()->block_id());
2588     }
2589 #endif
2590 
2591     return subst;
2592   }
2593 }
2594 
2595 
2596 void PhiSimplifier::block_do(BlockBegin* b) {
2597   for_each_phi_fun(b, phi,
2598     simplify(phi);
2599   );
2600 
2601 #ifdef ASSERT
2602   for_each_phi_fun(b, phi,
2603                    assert(phi->operand_count() != 1 || phi->subst() != phi, "missed trivial simplification");
2604   );
2605 
2606   ValueStack* state = b->state()->caller_state();
2607   for_each_state_value(state, value,
2608     Phi* phi = value->as_Phi();
2609     assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");
2610   );
2611 #endif
2612 }
2613 
2614 // This method is called after all blocks are filled with HIR instructions
2615 // It eliminates all Phi functions of the form x = [y, y] and x = [y, x]
2616 void GraphBuilder::eliminate_redundant_phis(BlockBegin* start) {
2617   PhiSimplifier simplifier(start);
2618 }
2619 
2620 
2621 void GraphBuilder::connect_to_end(BlockBegin* beg) {
2622   // setup iteration
2623   kill_all();
2624   _block = beg;
2625   _state = beg->state()->copy_for_parsing();
2626   _last  = beg;
2627   iterate_bytecodes_for_block(beg->bci());
2628 }
2629 
2630 
2631 BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
2632 #ifndef PRODUCT
2633   if (PrintIRDuringConstruction) {
2634     tty->cr();
2635     InstructionPrinter ip;
2636     ip.print_instr(_block); tty->cr();
2637     ip.print_stack(_block->state()); tty->cr();
2638     ip.print_inline_level(_block);
2639     ip.print_head();
2640     tty->print_cr("locals size: %d stack size: %d", state()->locals_size(), state()->stack_size());
2641   }
2642 #endif
2643   _skip_block = false;
2644   assert(state() != NULL, "ValueStack missing!");
2645   CompileLog* log = compilation()->log();
2646   ciBytecodeStream s(method());
2647   s.reset_to_bci(bci);
2648   int prev_bci = bci;
2649   scope_data()->set_stream(&s);
2650   // iterate
2651   Bytecodes::Code code = Bytecodes::_illegal;
2652   bool push_exception = false;
2653 
2654   if (block()->is_set(BlockBegin::exception_entry_flag) && block()->next() == NULL) {
2655     // first thing in the exception entry block should be the exception object.
2656     push_exception = true;
2657   }
2658 
2659   bool ignore_return = scope_data()->ignore_return();
2660 
2661   while (!bailed_out() && last()->as_BlockEnd() == NULL &&
2662          (code = stream()->next()) != ciBytecodeStream::EOBC() &&
2663          (block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) {
2664     assert(state()->kind() == ValueStack::Parsing, "invalid state kind");
2665 
2666     if (log != NULL)
2667       log->set_context("bc code='%d' bci='%d'", (int)code, s.cur_bci());
2668 
2669     // Check for active jsr during OSR compilation
2670     if (compilation()->is_osr_compile()
2671         && scope()->is_top_scope()
2672         && parsing_jsr()
2673         && s.cur_bci() == compilation()->osr_bci()) {
2674       bailout("OSR not supported while a jsr is active");
2675     }
2676 
2677     if (push_exception) {
2678       apush(append(new ExceptionObject()));
2679       push_exception = false;
2680     }
2681 
2682     // handle bytecode
2683     switch (code) {
2684       case Bytecodes::_nop            : /* nothing to do */ break;
2685       case Bytecodes::_aconst_null    : apush(append(new Constant(objectNull            ))); break;
2686       case Bytecodes::_iconst_m1      : ipush(append(new Constant(new IntConstant   (-1)))); break;
2687       case Bytecodes::_iconst_0       : ipush(append(new Constant(intZero               ))); break;
2688       case Bytecodes::_iconst_1       : ipush(append(new Constant(intOne                ))); break;
2689       case Bytecodes::_iconst_2       : ipush(append(new Constant(new IntConstant   ( 2)))); break;
2690       case Bytecodes::_iconst_3       : ipush(append(new Constant(new IntConstant   ( 3)))); break;
2691       case Bytecodes::_iconst_4       : ipush(append(new Constant(new IntConstant   ( 4)))); break;
2692       case Bytecodes::_iconst_5       : ipush(append(new Constant(new IntConstant   ( 5)))); break;
2693       case Bytecodes::_lconst_0       : lpush(append(new Constant(new LongConstant  ( 0)))); break;
2694       case Bytecodes::_lconst_1       : lpush(append(new Constant(new LongConstant  ( 1)))); break;
2695       case Bytecodes::_fconst_0       : fpush(append(new Constant(new FloatConstant ( 0)))); break;
2696       case Bytecodes::_fconst_1       : fpush(append(new Constant(new FloatConstant ( 1)))); break;
2697       case Bytecodes::_fconst_2       : fpush(append(new Constant(new FloatConstant ( 2)))); break;
2698       case Bytecodes::_dconst_0       : dpush(append(new Constant(new DoubleConstant( 0)))); break;
2699       case Bytecodes::_dconst_1       : dpush(append(new Constant(new DoubleConstant( 1)))); break;
2700       case Bytecodes::_bipush         : ipush(append(new Constant(new IntConstant(((signed char*)s.cur_bcp())[1])))); break;
2701       case Bytecodes::_sipush         : ipush(append(new Constant(new IntConstant((short)Bytes::get_Java_u2(s.cur_bcp()+1))))); break;
2702       case Bytecodes::_ldc            : // fall through
2703       case Bytecodes::_ldc_w          : // fall through
2704       case Bytecodes::_ldc2_w         : load_constant(); break;
2705       case Bytecodes::_iload          : load_local(intType     , s.get_index()); break;
2706       case Bytecodes::_lload          : load_local(longType    , s.get_index()); break;
2707       case Bytecodes::_fload          : load_local(floatType   , s.get_index()); break;
2708       case Bytecodes::_dload          : load_local(doubleType  , s.get_index()); break;
2709       case Bytecodes::_aload          : load_local(instanceType, s.get_index()); break;
2710       case Bytecodes::_iload_0        : load_local(intType   , 0); break;
2711       case Bytecodes::_iload_1        : load_local(intType   , 1); break;
2712       case Bytecodes::_iload_2        : load_local(intType   , 2); break;
2713       case Bytecodes::_iload_3        : load_local(intType   , 3); break;
2714       case Bytecodes::_lload_0        : load_local(longType  , 0); break;
2715       case Bytecodes::_lload_1        : load_local(longType  , 1); break;
2716       case Bytecodes::_lload_2        : load_local(longType  , 2); break;
2717       case Bytecodes::_lload_3        : load_local(longType  , 3); break;
2718       case Bytecodes::_fload_0        : load_local(floatType , 0); break;
2719       case Bytecodes::_fload_1        : load_local(floatType , 1); break;
2720       case Bytecodes::_fload_2        : load_local(floatType , 2); break;
2721       case Bytecodes::_fload_3        : load_local(floatType , 3); break;
2722       case Bytecodes::_dload_0        : load_local(doubleType, 0); break;
2723       case Bytecodes::_dload_1        : load_local(doubleType, 1); break;
2724       case Bytecodes::_dload_2        : load_local(doubleType, 2); break;
2725       case Bytecodes::_dload_3        : load_local(doubleType, 3); break;
2726       case Bytecodes::_aload_0        : load_local(objectType, 0); break;
2727       case Bytecodes::_aload_1        : load_local(objectType, 1); break;
2728       case Bytecodes::_aload_2        : load_local(objectType, 2); break;
2729       case Bytecodes::_aload_3        : load_local(objectType, 3); break;
2730       case Bytecodes::_iaload         : load_indexed(T_INT   ); break;
2731       case Bytecodes::_laload         : load_indexed(T_LONG  ); break;
2732       case Bytecodes::_faload         : load_indexed(T_FLOAT ); break;
2733       case Bytecodes::_daload         : load_indexed(T_DOUBLE); break;
2734       case Bytecodes::_aaload         : load_indexed(T_OBJECT); break;
2735       case Bytecodes::_baload         : load_indexed(T_BYTE  ); break;
2736       case Bytecodes::_caload         : load_indexed(T_CHAR  ); break;
2737       case Bytecodes::_saload         : load_indexed(T_SHORT ); break;
2738       case Bytecodes::_istore         : store_local(intType   , s.get_index()); break;
2739       case Bytecodes::_lstore         : store_local(longType  , s.get_index()); break;
2740       case Bytecodes::_fstore         : store_local(floatType , s.get_index()); break;
2741       case Bytecodes::_dstore         : store_local(doubleType, s.get_index()); break;
2742       case Bytecodes::_astore         : store_local(objectType, s.get_index()); break;
2743       case Bytecodes::_istore_0       : store_local(intType   , 0); break;
2744       case Bytecodes::_istore_1       : store_local(intType   , 1); break;
2745       case Bytecodes::_istore_2       : store_local(intType   , 2); break;
2746       case Bytecodes::_istore_3       : store_local(intType   , 3); break;
2747       case Bytecodes::_lstore_0       : store_local(longType  , 0); break;
2748       case Bytecodes::_lstore_1       : store_local(longType  , 1); break;
2749       case Bytecodes::_lstore_2       : store_local(longType  , 2); break;
2750       case Bytecodes::_lstore_3       : store_local(longType  , 3); break;
2751       case Bytecodes::_fstore_0       : store_local(floatType , 0); break;
2752       case Bytecodes::_fstore_1       : store_local(floatType , 1); break;
2753       case Bytecodes::_fstore_2       : store_local(floatType , 2); break;
2754       case Bytecodes::_fstore_3       : store_local(floatType , 3); break;
2755       case Bytecodes::_dstore_0       : store_local(doubleType, 0); break;
2756       case Bytecodes::_dstore_1       : store_local(doubleType, 1); break;
2757       case Bytecodes::_dstore_2       : store_local(doubleType, 2); break;
2758       case Bytecodes::_dstore_3       : store_local(doubleType, 3); break;
2759       case Bytecodes::_astore_0       : store_local(objectType, 0); break;
2760       case Bytecodes::_astore_1       : store_local(objectType, 1); break;
2761       case Bytecodes::_astore_2       : store_local(objectType, 2); break;
2762       case Bytecodes::_astore_3       : store_local(objectType, 3); break;
2763       case Bytecodes::_iastore        : store_indexed(T_INT   ); break;
2764       case Bytecodes::_lastore        : store_indexed(T_LONG  ); break;
2765       case Bytecodes::_fastore        : store_indexed(T_FLOAT ); break;
2766       case Bytecodes::_dastore        : store_indexed(T_DOUBLE); break;
2767       case Bytecodes::_aastore        : store_indexed(T_OBJECT); break;
2768       case Bytecodes::_bastore        : store_indexed(T_BYTE  ); break;
2769       case Bytecodes::_castore        : store_indexed(T_CHAR  ); break;
2770       case Bytecodes::_sastore        : store_indexed(T_SHORT ); break;
2771       case Bytecodes::_pop            : // fall through
2772       case Bytecodes::_pop2           : // fall through
2773       case Bytecodes::_dup            : // fall through
2774       case Bytecodes::_dup_x1         : // fall through
2775       case Bytecodes::_dup_x2         : // fall through
2776       case Bytecodes::_dup2           : // fall through
2777       case Bytecodes::_dup2_x1        : // fall through
2778       case Bytecodes::_dup2_x2        : // fall through
2779       case Bytecodes::_swap           : stack_op(code); break;
2780       case Bytecodes::_iadd           : arithmetic_op(intType   , code); break;
2781       case Bytecodes::_ladd           : arithmetic_op(longType  , code); break;
2782       case Bytecodes::_fadd           : arithmetic_op(floatType , code); break;
2783       case Bytecodes::_dadd           : arithmetic_op(doubleType, code); break;
2784       case Bytecodes::_isub           : arithmetic_op(intType   , code); break;
2785       case Bytecodes::_lsub           : arithmetic_op(longType  , code); break;
2786       case Bytecodes::_fsub           : arithmetic_op(floatType , code); break;
2787       case Bytecodes::_dsub           : arithmetic_op(doubleType, code); break;
2788       case Bytecodes::_imul           : arithmetic_op(intType   , code); break;
2789       case Bytecodes::_lmul           : arithmetic_op(longType  , code); break;
2790       case Bytecodes::_fmul           : arithmetic_op(floatType , code); break;
2791       case Bytecodes::_dmul           : arithmetic_op(doubleType, code); break;
2792       case Bytecodes::_idiv           : arithmetic_op(intType   , code, copy_state_for_exception()); break;
2793       case Bytecodes::_ldiv           : arithmetic_op(longType  , code, copy_state_for_exception()); break;
2794       case Bytecodes::_fdiv           : arithmetic_op(floatType , code); break;
2795       case Bytecodes::_ddiv           : arithmetic_op(doubleType, code); break;
2796       case Bytecodes::_irem           : arithmetic_op(intType   , code, copy_state_for_exception()); break;
2797       case Bytecodes::_lrem           : arithmetic_op(longType  , code, copy_state_for_exception()); break;
2798       case Bytecodes::_frem           : arithmetic_op(floatType , code); break;
2799       case Bytecodes::_drem           : arithmetic_op(doubleType, code); break;
2800       case Bytecodes::_ineg           : negate_op(intType   ); break;
2801       case Bytecodes::_lneg           : negate_op(longType  ); break;
2802       case Bytecodes::_fneg           : negate_op(floatType ); break;
2803       case Bytecodes::_dneg           : negate_op(doubleType); break;
2804       case Bytecodes::_ishl           : shift_op(intType , code); break;
2805       case Bytecodes::_lshl           : shift_op(longType, code); break;
2806       case Bytecodes::_ishr           : shift_op(intType , code); break;
2807       case Bytecodes::_lshr           : shift_op(longType, code); break;
2808       case Bytecodes::_iushr          : shift_op(intType , code); break;
2809       case Bytecodes::_lushr          : shift_op(longType, code); break;
2810       case Bytecodes::_iand           : logic_op(intType , code); break;
2811       case Bytecodes::_land           : logic_op(longType, code); break;
2812       case Bytecodes::_ior            : logic_op(intType , code); break;
2813       case Bytecodes::_lor            : logic_op(longType, code); break;
2814       case Bytecodes::_ixor           : logic_op(intType , code); break;
2815       case Bytecodes::_lxor           : logic_op(longType, code); break;
2816       case Bytecodes::_iinc           : increment(); break;
2817       case Bytecodes::_i2l            : convert(code, T_INT   , T_LONG  ); break;
2818       case Bytecodes::_i2f            : convert(code, T_INT   , T_FLOAT ); break;
2819       case Bytecodes::_i2d            : convert(code, T_INT   , T_DOUBLE); break;
2820       case Bytecodes::_l2i            : convert(code, T_LONG  , T_INT   ); break;
2821       case Bytecodes::_l2f            : convert(code, T_LONG  , T_FLOAT ); break;
2822       case Bytecodes::_l2d            : convert(code, T_LONG  , T_DOUBLE); break;
2823       case Bytecodes::_f2i            : convert(code, T_FLOAT , T_INT   ); break;
2824       case Bytecodes::_f2l            : convert(code, T_FLOAT , T_LONG  ); break;
2825       case Bytecodes::_f2d            : convert(code, T_FLOAT , T_DOUBLE); break;
2826       case Bytecodes::_d2i            : convert(code, T_DOUBLE, T_INT   ); break;
2827       case Bytecodes::_d2l            : convert(code, T_DOUBLE, T_LONG  ); break;
2828       case Bytecodes::_d2f            : convert(code, T_DOUBLE, T_FLOAT ); break;
2829       case Bytecodes::_i2b            : convert(code, T_INT   , T_BYTE  ); break;
2830       case Bytecodes::_i2c            : convert(code, T_INT   , T_CHAR  ); break;
2831       case Bytecodes::_i2s            : convert(code, T_INT   , T_SHORT ); break;
2832       case Bytecodes::_lcmp           : compare_op(longType  , code); break;
2833       case Bytecodes::_fcmpl          : compare_op(floatType , code); break;
2834       case Bytecodes::_fcmpg          : compare_op(floatType , code); break;
2835       case Bytecodes::_dcmpl          : compare_op(doubleType, code); break;
2836       case Bytecodes::_dcmpg          : compare_op(doubleType, code); break;
2837       case Bytecodes::_ifeq           : if_zero(intType   , If::eql); break;
2838       case Bytecodes::_ifne           : if_zero(intType   , If::neq); break;
2839       case Bytecodes::_iflt           : if_zero(intType   , If::lss); break;
2840       case Bytecodes::_ifge           : if_zero(intType   , If::geq); break;
2841       case Bytecodes::_ifgt           : if_zero(intType   , If::gtr); break;
2842       case Bytecodes::_ifle           : if_zero(intType   , If::leq); break;
2843       case Bytecodes::_if_icmpeq      : if_same(intType   , If::eql); break;
2844       case Bytecodes::_if_icmpne      : if_same(intType   , If::neq); break;
2845       case Bytecodes::_if_icmplt      : if_same(intType   , If::lss); break;
2846       case Bytecodes::_if_icmpge      : if_same(intType   , If::geq); break;
2847       case Bytecodes::_if_icmpgt      : if_same(intType   , If::gtr); break;
2848       case Bytecodes::_if_icmple      : if_same(intType   , If::leq); break;
2849       case Bytecodes::_if_acmpeq      : if_same(objectType, If::eql); break;
2850       case Bytecodes::_if_acmpne      : if_same(objectType, If::neq); break;
2851       case Bytecodes::_goto           : _goto(s.cur_bci(), s.get_dest()); break;
2852       case Bytecodes::_jsr            : jsr(s.get_dest()); break;
2853       case Bytecodes::_ret            : ret(s.get_index()); break;
2854       case Bytecodes::_tableswitch    : table_switch(); break;
2855       case Bytecodes::_lookupswitch   : lookup_switch(); break;
2856       case Bytecodes::_ireturn        : method_return(ipop(), ignore_return); break;
2857       case Bytecodes::_lreturn        : method_return(lpop(), ignore_return); break;
2858       case Bytecodes::_freturn        : method_return(fpop(), ignore_return); break;
2859       case Bytecodes::_dreturn        : method_return(dpop(), ignore_return); break;
2860       case Bytecodes::_areturn        : method_return(apop(), ignore_return); break;
2861       case Bytecodes::_return         : method_return(NULL  , ignore_return); break;
2862       case Bytecodes::_getstatic      : // fall through
2863       case Bytecodes::_putstatic      : // fall through
2864       case Bytecodes::_getfield       : // fall through
2865       case Bytecodes::_putfield       : access_field(code); break;
2866       case Bytecodes::_invokevirtual  : // fall through
2867       case Bytecodes::_invokespecial  : // fall through
2868       case Bytecodes::_invokestatic   : // fall through
2869       case Bytecodes::_invokedynamic  : // fall through
2870       case Bytecodes::_invokeinterface: invoke(code); break;
2871       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
2872       case Bytecodes::_newarray       : new_type_array(); break;
2873       case Bytecodes::_anewarray      : new_object_array(); break;
2874       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
2875       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
2876       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
2877       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
2878       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
2879       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
2880       case Bytecodes::_wide           : ShouldNotReachHere(); break;
2881       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
2882       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
2883       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
2884       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
2885       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;
2886       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
2887       default                         : ShouldNotReachHere(); break;
2888     }
2889 
2890     if (log != NULL)
2891       log->clear_context(); // skip marker if nothing was printed
2892 
2893     // save current bci to setup Goto at the end
2894     prev_bci = s.cur_bci();
2895 
2896   }
2897   CHECK_BAILOUT_(NULL);
2898   // stop processing of this block (see try_inline_full)
2899   if (_skip_block) {
2900     _skip_block = false;
2901     assert(_last && _last->as_BlockEnd(), "");
2902     return _last->as_BlockEnd();
2903   }
2904   // if there are any, check if last instruction is a BlockEnd instruction
2905   BlockEnd* end = last()->as_BlockEnd();
2906   if (end == NULL) {
2907     // all blocks must end with a BlockEnd instruction => add a Goto
2908     end = new Goto(block_at(s.cur_bci()), false);
2909     append(end);
2910   }
2911   assert(end == last()->as_BlockEnd(), "inconsistency");
2912 
2913   assert(end->state() != NULL, "state must already be present");
2914   assert(end->as_Return() == NULL || end->as_Throw() == NULL || end->state()->stack_size() == 0, "stack not needed for return and throw");
2915 
2916   // connect to begin & set state
2917   // NOTE that inlining may have changed the block we are parsing
2918   block()->set_end(end);
2919   // propagate state
2920   for (int i = end->number_of_sux() - 1; i >= 0; i--) {
2921     BlockBegin* sux = end->sux_at(i);
2922     assert(sux->is_predecessor(block()), "predecessor missing");
2923     // be careful, bailout if bytecodes are strange
2924     if (!sux->try_merge(end->state())) BAILOUT_("block join failed", NULL);
2925     scope_data()->add_to_work_list(end->sux_at(i));
2926   }
2927 
2928   scope_data()->set_stream(NULL);
2929 
2930   // done
2931   return end;
2932 }
2933 
2934 
2935 void GraphBuilder::iterate_all_blocks(bool start_in_current_block_for_inlining) {
2936   do {
2937     if (start_in_current_block_for_inlining && !bailed_out()) {
2938       iterate_bytecodes_for_block(0);
2939       start_in_current_block_for_inlining = false;
2940     } else {
2941       BlockBegin* b;
2942       while ((b = scope_data()->remove_from_work_list()) != NULL) {
2943         if (!b->is_set(BlockBegin::was_visited_flag)) {
2944           if (b->is_set(BlockBegin::osr_entry_flag)) {
2945             // we're about to parse the osr entry block, so make sure
2946             // we setup the OSR edge leading into this block so that
2947             // Phis get setup correctly.
2948             setup_osr_entry_block();
2949             // this is no longer the osr entry block, so clear it.
2950             b->clear(BlockBegin::osr_entry_flag);
2951           }
2952           b->set(BlockBegin::was_visited_flag);
2953           connect_to_end(b);
2954         }
2955       }
2956     }
2957   } while (!bailed_out() && !scope_data()->is_work_list_empty());
2958 }
2959 
2960 
2961 bool GraphBuilder::_can_trap      [Bytecodes::number_of_java_codes];
2962 
2963 void GraphBuilder::initialize() {
2964   // the following bytecodes are assumed to potentially
2965   // throw exceptions in compiled code - note that e.g.
2966   // monitorexit & the return bytecodes do not throw
2967   // exceptions since monitor pairing proved that they
2968   // succeed (if monitor pairing succeeded)
2969   Bytecodes::Code can_trap_list[] =
2970     { Bytecodes::_ldc
2971     , Bytecodes::_ldc_w
2972     , Bytecodes::_ldc2_w
2973     , Bytecodes::_iaload
2974     , Bytecodes::_laload
2975     , Bytecodes::_faload
2976     , Bytecodes::_daload
2977     , Bytecodes::_aaload
2978     , Bytecodes::_baload
2979     , Bytecodes::_caload
2980     , Bytecodes::_saload
2981     , Bytecodes::_iastore
2982     , Bytecodes::_lastore
2983     , Bytecodes::_fastore
2984     , Bytecodes::_dastore
2985     , Bytecodes::_aastore
2986     , Bytecodes::_bastore
2987     , Bytecodes::_castore
2988     , Bytecodes::_sastore
2989     , Bytecodes::_idiv
2990     , Bytecodes::_ldiv
2991     , Bytecodes::_irem
2992     , Bytecodes::_lrem
2993     , Bytecodes::_getstatic
2994     , Bytecodes::_putstatic
2995     , Bytecodes::_getfield
2996     , Bytecodes::_putfield
2997     , Bytecodes::_invokevirtual
2998     , Bytecodes::_invokespecial
2999     , Bytecodes::_invokestatic
3000     , Bytecodes::_invokedynamic
3001     , Bytecodes::_invokeinterface
3002     , Bytecodes::_new
3003     , Bytecodes::_newarray
3004     , Bytecodes::_anewarray
3005     , Bytecodes::_arraylength
3006     , Bytecodes::_athrow
3007     , Bytecodes::_checkcast
3008     , Bytecodes::_instanceof
3009     , Bytecodes::_monitorenter
3010     , Bytecodes::_multianewarray
3011     };
3012 
3013   // inititialize trap tables
3014   for (int i = 0; i < Bytecodes::number_of_java_codes; i++) {
3015     _can_trap[i] = false;
3016   }
3017   // set standard trap info
3018   for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) {
3019     _can_trap[can_trap_list[j]] = true;
3020   }
3021 }
3022 
3023 
3024 BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) {
3025   assert(entry->is_set(f), "entry/flag mismatch");
3026   // create header block
3027   BlockBegin* h = new BlockBegin(entry->bci());
3028   h->set_depth_first_number(0);
3029 
3030   Value l = h;
3031   BlockEnd* g = new Goto(entry, false);
3032   l->set_next(g, entry->bci());
3033   h->set_end(g);
3034   h->set(f);
3035   // setup header block end state
3036   ValueStack* s = state->copy(ValueStack::StateAfter, entry->bci()); // can use copy since stack is empty (=> no phis)
3037   assert(s->stack_is_empty(), "must have empty stack at entry point");
3038   g->set_state(s);
3039   return h;
3040 }
3041 
3042 
3043 
3044 BlockBegin* GraphBuilder::setup_start_block(int osr_bci, BlockBegin* std_entry, BlockBegin* osr_entry, ValueStack* state) {
3045   BlockBegin* start = new BlockBegin(0);
3046 
3047   // This code eliminates the empty start block at the beginning of
3048   // each method.  Previously, each method started with the
3049   // start-block created below, and this block was followed by the
3050   // header block that was always empty.  This header block is only
3051   // necesary if std_entry is also a backward branch target because
3052   // then phi functions may be necessary in the header block.  It's
3053   // also necessary when profiling so that there's a single block that
3054   // can increment the interpreter_invocation_count.
3055   BlockBegin* new_header_block;
3056   if (std_entry->number_of_preds() > 0 || count_invocations() || count_backedges()) {
3057     new_header_block = header_block(std_entry, BlockBegin::std_entry_flag, state);
3058   } else {
3059     new_header_block = std_entry;
3060   }
3061 
3062   // setup start block (root for the IR graph)
3063   Base* base =
3064     new Base(
3065       new_header_block,
3066       osr_entry
3067     );
3068   start->set_next(base, 0);
3069   start->set_end(base);
3070   // create & setup state for start block
3071   start->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
3072   base->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
3073 
3074   if (base->std_entry()->state() == NULL) {
3075     // setup states for header blocks
3076     base->std_entry()->merge(state);
3077   }
3078 
3079   assert(base->std_entry()->state() != NULL, "");
3080   return start;
3081 }
3082 
3083 
3084 void GraphBuilder::setup_osr_entry_block() {
3085   assert(compilation()->is_osr_compile(), "only for osrs");
3086 
3087   int osr_bci = compilation()->osr_bci();
3088   ciBytecodeStream s(method());
3089   s.reset_to_bci(osr_bci);
3090   s.next();
3091   scope_data()->set_stream(&s);
3092 
3093   // create a new block to be the osr setup code
3094   _osr_entry = new BlockBegin(osr_bci);
3095   _osr_entry->set(BlockBegin::osr_entry_flag);
3096   _osr_entry->set_depth_first_number(0);
3097   BlockBegin* target = bci2block()->at(osr_bci);
3098   assert(target != NULL && target->is_set(BlockBegin::osr_entry_flag), "must be there");
3099   // the osr entry has no values for locals
3100   ValueStack* state = target->state()->copy();
3101   _osr_entry->set_state(state);
3102 
3103   kill_all();
3104   _block = _osr_entry;
3105   _state = _osr_entry->state()->copy();
3106   assert(_state->bci() == osr_bci, "mismatch");
3107   _last  = _osr_entry;
3108   Value e = append(new OsrEntry());
3109   e->set_needs_null_check(false);
3110 
3111   // OSR buffer is
3112   //
3113   // locals[nlocals-1..0]
3114   // monitors[number_of_locks-1..0]
3115   //
3116   // locals is a direct copy of the interpreter frame so in the osr buffer
3117   // so first slot in the local array is the last local from the interpreter
3118   // and last slot is local[0] (receiver) from the interpreter
3119   //
3120   // Similarly with locks. The first lock slot in the osr buffer is the nth lock
3121   // from the interpreter frame, the nth lock slot in the osr buffer is 0th lock
3122   // in the interpreter frame (the method lock if a sync method)
3123 
3124   // Initialize monitors in the compiled activation.
3125 
3126   int index;
3127   Value local;
3128 
3129   // find all the locals that the interpreter thinks contain live oops
3130   const ResourceBitMap live_oops = method()->live_local_oops_at_bci(osr_bci);
3131 
3132   // compute the offset into the locals so that we can treat the buffer
3133   // as if the locals were still in the interpreter frame
3134   int locals_offset = BytesPerWord * (method()->max_locals() - 1);
3135   for_each_local_value(state, index, local) {
3136     int offset = locals_offset - (index + local->type()->size() - 1) * BytesPerWord;
3137     Value get;
3138     if (local->type()->is_object_kind() && !live_oops.at(index)) {
3139       // The interpreter thinks this local is dead but the compiler
3140       // doesn't so pretend that the interpreter passed in null.
3141       get = append(new Constant(objectNull));
3142     } else {
3143       get = append(new UnsafeGetRaw(as_BasicType(local->type()), e,
3144                                     append(new Constant(new IntConstant(offset))),
3145                                     0,
3146                                     true /*unaligned*/, true /*wide*/));
3147     }
3148     _state->store_local(index, get);
3149   }
3150 
3151   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3152 
3153   assert(state->caller_state() == NULL, "should be top scope");
3154   state->clear_locals();
3155   Goto* g = new Goto(target, false);
3156   append(g);
3157   _osr_entry->set_end(g);
3158   target->merge(_osr_entry->end()->state());
3159 
3160   scope_data()->set_stream(NULL);
3161 }
3162 
3163 
3164 ValueStack* GraphBuilder::state_at_entry() {
3165   ValueStack* state = new ValueStack(scope(), NULL);
3166 
3167   // Set up locals for receiver
3168   int idx = 0;
3169   if (!method()->is_static()) {
3170     // we should always see the receiver
3171     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));
3172     idx = 1;
3173   }
3174 
3175   // Set up locals for incoming arguments
3176   ciSignature* sig = method()->signature();
3177   for (int i = 0; i < sig->count(); i++) {
3178     ciType* type = sig->type_at(i);
3179     BasicType basic_type = type->basic_type();
3180     // don't allow T_ARRAY to propagate into locals types
3181     if (basic_type == T_ARRAY) basic_type = T_OBJECT;
3182     ValueType* vt = as_ValueType(basic_type);
3183     state->store_local(idx, new Local(type, vt, idx, false));
3184     idx += type->size();
3185   }
3186 
3187   // lock synchronized method
3188   if (method()->is_synchronized()) {
3189     state->lock(NULL);
3190   }
3191 
3192   return state;
3193 }
3194 
3195 
3196 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3197   : _scope_data(NULL)
3198   , _compilation(compilation)
3199   , _memory(new MemoryBuffer())
3200   , _inline_bailout_msg(NULL)
3201   , _instruction_count(0)
3202   , _osr_entry(NULL)
3203 {
3204   int osr_bci = compilation->osr_bci();
3205 
3206   // determine entry points and bci2block mapping
3207   BlockListBuilder blm(compilation, scope, osr_bci);
3208   CHECK_BAILOUT();
3209 
3210   BlockList* bci2block = blm.bci2block();
3211   BlockBegin* start_block = bci2block->at(0);
3212 
3213   push_root_scope(scope, bci2block, start_block);
3214 
3215   // setup state for std entry
3216   _initial_state = state_at_entry();
3217   start_block->merge(_initial_state);
3218 
3219   // complete graph
3220   _vmap        = new ValueMap();
3221   switch (scope->method()->intrinsic_id()) {
3222   case vmIntrinsics::_dabs          : // fall through
3223   case vmIntrinsics::_dsqrt         : // fall through
3224   case vmIntrinsics::_dsin          : // fall through
3225   case vmIntrinsics::_dcos          : // fall through
3226   case vmIntrinsics::_dtan          : // fall through
3227   case vmIntrinsics::_dlog          : // fall through
3228   case vmIntrinsics::_dlog10        : // fall through
3229   case vmIntrinsics::_dexp          : // fall through
3230   case vmIntrinsics::_dpow          : // fall through
3231     {
3232       // Compiles where the root method is an intrinsic need a special
3233       // compilation environment because the bytecodes for the method
3234       // shouldn't be parsed during the compilation, only the special
3235       // Intrinsic node should be emitted.  If this isn't done the the
3236       // code for the inlined version will be different than the root
3237       // compiled version which could lead to monotonicity problems on
3238       // intel.
3239       if (CheckIntrinsics && !scope->method()->intrinsic_candidate()) {
3240         BAILOUT("failed to inline intrinsic, method not annotated");
3241       }
3242 
3243       // Set up a stream so that appending instructions works properly.
3244       ciBytecodeStream s(scope->method());
3245       s.reset_to_bci(0);
3246       scope_data()->set_stream(&s);
3247       s.next();
3248 
3249       // setup the initial block state
3250       _block = start_block;
3251       _state = start_block->state()->copy_for_parsing();
3252       _last  = start_block;
3253       load_local(doubleType, 0);
3254       if (scope->method()->intrinsic_id() == vmIntrinsics::_dpow) {
3255         load_local(doubleType, 2);
3256       }
3257 
3258       // Emit the intrinsic node.
3259       bool result = try_inline_intrinsics(scope->method());
3260       if (!result) BAILOUT("failed to inline intrinsic");
3261       method_return(dpop());
3262 
3263       // connect the begin and end blocks and we're all done.
3264       BlockEnd* end = last()->as_BlockEnd();
3265       block()->set_end(end);
3266       break;
3267     }
3268 
3269   case vmIntrinsics::_Reference_get:
3270     {
3271       {
3272         // With java.lang.ref.reference.get() we must go through the
3273         // intrinsic - when G1 is enabled - even when get() is the root
3274         // method of the compile so that, if necessary, the value in
3275         // the referent field of the reference object gets recorded by
3276         // the pre-barrier code.
3277         // Specifically, if G1 is enabled, the value in the referent
3278         // field is recorded by the G1 SATB pre barrier. This will
3279         // result in the referent being marked live and the reference
3280         // object removed from the list of discovered references during
3281         // reference processing.
3282         if (CheckIntrinsics && !scope->method()->intrinsic_candidate()) {
3283           BAILOUT("failed to inline intrinsic, method not annotated");
3284         }
3285 
3286         // Also we need intrinsic to prevent commoning reads from this field
3287         // across safepoint since GC can change its value.
3288 
3289         // Set up a stream so that appending instructions works properly.
3290         ciBytecodeStream s(scope->method());
3291         s.reset_to_bci(0);
3292         scope_data()->set_stream(&s);
3293         s.next();
3294 
3295         // setup the initial block state
3296         _block = start_block;
3297         _state = start_block->state()->copy_for_parsing();
3298         _last  = start_block;
3299         load_local(objectType, 0);
3300 
3301         // Emit the intrinsic node.
3302         bool result = try_inline_intrinsics(scope->method());
3303         if (!result) BAILOUT("failed to inline intrinsic");
3304         method_return(apop());
3305 
3306         // connect the begin and end blocks and we're all done.
3307         BlockEnd* end = last()->as_BlockEnd();
3308         block()->set_end(end);
3309         break;
3310       }
3311       // Otherwise, fall thru
3312     }
3313 
3314   default:
3315     scope_data()->add_to_work_list(start_block);
3316     iterate_all_blocks();
3317     break;
3318   }
3319   CHECK_BAILOUT();
3320 
3321   _start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
3322 
3323   eliminate_redundant_phis(_start);
3324 
3325   NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());
3326   // for osr compile, bailout if some requirements are not fulfilled
3327   if (osr_bci != -1) {
3328     BlockBegin* osr_block = blm.bci2block()->at(osr_bci);
3329     if (!osr_block->is_set(BlockBegin::was_visited_flag)) {
3330       BAILOUT("osr entry must have been visited for osr compile");
3331     }
3332 
3333     // check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points
3334     if (!osr_block->state()->stack_is_empty()) {
3335       BAILOUT("stack not empty at OSR entry point");
3336     }
3337   }
3338 #ifndef PRODUCT
3339   if (PrintCompilation && Verbose) tty->print_cr("Created %d Instructions", _instruction_count);
3340 #endif
3341 }
3342 
3343 
3344 ValueStack* GraphBuilder::copy_state_before() {
3345   return copy_state_before_with_bci(bci());
3346 }
3347 
3348 ValueStack* GraphBuilder::copy_state_exhandling() {
3349   return copy_state_exhandling_with_bci(bci());
3350 }
3351 
3352 ValueStack* GraphBuilder::copy_state_for_exception() {
3353   return copy_state_for_exception_with_bci(bci());
3354 }
3355 
3356 ValueStack* GraphBuilder::copy_state_before_with_bci(int bci) {
3357   return state()->copy(ValueStack::StateBefore, bci);
3358 }
3359 
3360 ValueStack* GraphBuilder::copy_state_exhandling_with_bci(int bci) {
3361   if (!has_handler()) return NULL;
3362   return state()->copy(ValueStack::StateBefore, bci);
3363 }
3364 
3365 ValueStack* GraphBuilder::copy_state_for_exception_with_bci(int bci) {
3366   ValueStack* s = copy_state_exhandling_with_bci(bci);
3367   if (s == NULL) {
3368     if (_compilation->env()->should_retain_local_variables()) {
3369       s = state()->copy(ValueStack::ExceptionState, bci);
3370     } else {
3371       s = state()->copy(ValueStack::EmptyExceptionState, bci);
3372     }
3373   }
3374   return s;
3375 }
3376 
3377 int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const {
3378   int recur_level = 0;
3379   for (IRScope* s = scope(); s != NULL; s = s->caller()) {
3380     if (s->method() == cur_callee) {
3381       ++recur_level;
3382     }
3383   }
3384   return recur_level;
3385 }
3386 
3387 
3388 bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, bool ignore_return, Bytecodes::Code bc, Value receiver) {
3389   const char* msg = NULL;
3390 
3391   // clear out any existing inline bailout condition
3392   clear_inline_bailout();
3393 
3394   // exclude methods we don't want to inline
3395   msg = should_not_inline(callee);
3396   if (msg != NULL) {
3397     print_inlining(callee, msg, /*success*/ false);
3398     return false;
3399   }
3400 
3401   // method handle invokes
3402   if (callee->is_method_handle_intrinsic()) {
3403     if (try_method_handle_inline(callee, ignore_return)) {
3404       if (callee->has_reserved_stack_access()) {
3405         compilation()->set_has_reserved_stack_access(true);
3406       }
3407       return true;
3408     }
3409     return false;
3410   }
3411 
3412   // handle intrinsics
3413   if (callee->intrinsic_id() != vmIntrinsics::_none &&
3414       (CheckIntrinsics ? callee->intrinsic_candidate() : true)) {
3415     if (try_inline_intrinsics(callee, ignore_return)) {
3416       print_inlining(callee, "intrinsic");
3417       if (callee->has_reserved_stack_access()) {
3418         compilation()->set_has_reserved_stack_access(true);
3419       }
3420       return true;
3421     }
3422     // try normal inlining
3423   }
3424 
3425   // certain methods cannot be parsed at all
3426   msg = check_can_parse(callee);
3427   if (msg != NULL) {
3428     print_inlining(callee, msg, /*success*/ false);
3429     return false;
3430   }
3431 
3432   // If bytecode not set use the current one.
3433   if (bc == Bytecodes::_illegal) {
3434     bc = code();
3435   }
3436   if (try_inline_full(callee, holder_known, ignore_return, bc, receiver)) {
3437     if (callee->has_reserved_stack_access()) {
3438       compilation()->set_has_reserved_stack_access(true);
3439     }
3440     return true;
3441   }
3442 
3443   // Entire compilation could fail during try_inline_full call.
3444   // In that case printing inlining decision info is useless.
3445   if (!bailed_out())
3446     print_inlining(callee, _inline_bailout_msg, /*success*/ false);
3447 
3448   return false;
3449 }
3450 
3451 
3452 const char* GraphBuilder::check_can_parse(ciMethod* callee) const {
3453   // Certain methods cannot be parsed at all:
3454   if ( callee->is_native())            return "native method";
3455   if ( callee->is_abstract())          return "abstract method";
3456   if (!callee->can_be_compiled())      return "not compilable (disabled)";
3457   if (!callee->can_be_parsed())        return "cannot be parsed";
3458   return NULL;
3459 }
3460 
3461 // negative filter: should callee NOT be inlined?  returns NULL, ok to inline, or rejection msg
3462 const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
3463   if ( compilation()->directive()->should_not_inline(callee)) return "disallowed by CompileCommand";
3464   if ( callee->dont_inline())          return "don't inline by annotation";
3465   return NULL;
3466 }
3467 
3468 void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee, bool ignore_return) {
3469   vmIntrinsics::ID id = callee->intrinsic_id();
3470   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
3471 
3472   // Some intrinsics need special IR nodes.
3473   switch(id) {
3474   case vmIntrinsics::_getObject          : append_unsafe_get_obj(callee, T_OBJECT,  false); return;
3475   case vmIntrinsics::_getBoolean         : append_unsafe_get_obj(callee, T_BOOLEAN, false); return;
3476   case vmIntrinsics::_getByte            : append_unsafe_get_obj(callee, T_BYTE,    false); return;
3477   case vmIntrinsics::_getShort           : append_unsafe_get_obj(callee, T_SHORT,   false); return;
3478   case vmIntrinsics::_getChar            : append_unsafe_get_obj(callee, T_CHAR,    false); return;
3479   case vmIntrinsics::_getInt             : append_unsafe_get_obj(callee, T_INT,     false); return;
3480   case vmIntrinsics::_getLong            : append_unsafe_get_obj(callee, T_LONG,    false); return;
3481   case vmIntrinsics::_getFloat           : append_unsafe_get_obj(callee, T_FLOAT,   false); return;
3482   case vmIntrinsics::_getDouble          : append_unsafe_get_obj(callee, T_DOUBLE,  false); return;
3483   case vmIntrinsics::_putObject          : append_unsafe_put_obj(callee, T_OBJECT,  false); return;
3484   case vmIntrinsics::_putBoolean         : append_unsafe_put_obj(callee, T_BOOLEAN, false); return;
3485   case vmIntrinsics::_putByte            : append_unsafe_put_obj(callee, T_BYTE,    false); return;
3486   case vmIntrinsics::_putShort           : append_unsafe_put_obj(callee, T_SHORT,   false); return;
3487   case vmIntrinsics::_putChar            : append_unsafe_put_obj(callee, T_CHAR,    false); return;
3488   case vmIntrinsics::_putInt             : append_unsafe_put_obj(callee, T_INT,     false); return;
3489   case vmIntrinsics::_putLong            : append_unsafe_put_obj(callee, T_LONG,    false); return;
3490   case vmIntrinsics::_putFloat           : append_unsafe_put_obj(callee, T_FLOAT,   false); return;
3491   case vmIntrinsics::_putDouble          : append_unsafe_put_obj(callee, T_DOUBLE,  false); return;
3492   case vmIntrinsics::_getShortUnaligned  : append_unsafe_get_obj(callee, T_SHORT,   false); return;
3493   case vmIntrinsics::_getCharUnaligned   : append_unsafe_get_obj(callee, T_CHAR,    false); return;
3494   case vmIntrinsics::_getIntUnaligned    : append_unsafe_get_obj(callee, T_INT,     false); return;
3495   case vmIntrinsics::_getLongUnaligned   : append_unsafe_get_obj(callee, T_LONG,    false); return;
3496   case vmIntrinsics::_putShortUnaligned  : append_unsafe_put_obj(callee, T_SHORT,   false); return;
3497   case vmIntrinsics::_putCharUnaligned   : append_unsafe_put_obj(callee, T_CHAR,    false); return;
3498   case vmIntrinsics::_putIntUnaligned    : append_unsafe_put_obj(callee, T_INT,     false); return;
3499   case vmIntrinsics::_putLongUnaligned   : append_unsafe_put_obj(callee, T_LONG,    false); return;
3500   case vmIntrinsics::_getObjectVolatile  : append_unsafe_get_obj(callee, T_OBJECT,  true); return;
3501   case vmIntrinsics::_getBooleanVolatile : append_unsafe_get_obj(callee, T_BOOLEAN, true); return;
3502   case vmIntrinsics::_getByteVolatile    : append_unsafe_get_obj(callee, T_BYTE,    true); return;
3503   case vmIntrinsics::_getShortVolatile   : append_unsafe_get_obj(callee, T_SHORT,   true); return;
3504   case vmIntrinsics::_getCharVolatile    : append_unsafe_get_obj(callee, T_CHAR,    true); return;
3505   case vmIntrinsics::_getIntVolatile     : append_unsafe_get_obj(callee, T_INT,     true); return;
3506   case vmIntrinsics::_getLongVolatile    : append_unsafe_get_obj(callee, T_LONG,    true); return;
3507   case vmIntrinsics::_getFloatVolatile   : append_unsafe_get_obj(callee, T_FLOAT,   true); return;
3508   case vmIntrinsics::_getDoubleVolatile  : append_unsafe_get_obj(callee, T_DOUBLE,  true); return;
3509   case vmIntrinsics::_putObjectVolatile  : append_unsafe_put_obj(callee, T_OBJECT,  true); return;
3510   case vmIntrinsics::_putBooleanVolatile : append_unsafe_put_obj(callee, T_BOOLEAN, true); return;
3511   case vmIntrinsics::_putByteVolatile    : append_unsafe_put_obj(callee, T_BYTE,    true); return;
3512   case vmIntrinsics::_putShortVolatile   : append_unsafe_put_obj(callee, T_SHORT,   true); return;
3513   case vmIntrinsics::_putCharVolatile    : append_unsafe_put_obj(callee, T_CHAR,    true); return;
3514   case vmIntrinsics::_putIntVolatile     : append_unsafe_put_obj(callee, T_INT,     true); return;
3515   case vmIntrinsics::_putLongVolatile    : append_unsafe_put_obj(callee, T_LONG,    true); return;
3516   case vmIntrinsics::_putFloatVolatile   : append_unsafe_put_obj(callee, T_FLOAT,   true); return;
3517   case vmIntrinsics::_putDoubleVolatile  : append_unsafe_put_obj(callee, T_DOUBLE,  true); return;
3518   case vmIntrinsics::_compareAndSetLong:
3519   case vmIntrinsics::_compareAndSetInt:
3520   case vmIntrinsics::_compareAndSetObject: append_unsafe_CAS(callee); return;
3521   case vmIntrinsics::_getAndAddInt:
3522   case vmIntrinsics::_getAndAddLong      : append_unsafe_get_and_set_obj(callee, true); return;
3523   case vmIntrinsics::_getAndSetInt       :
3524   case vmIntrinsics::_getAndSetLong      :
3525   case vmIntrinsics::_getAndSetObject    : append_unsafe_get_and_set_obj(callee, false); return;
3526   case vmIntrinsics::_getCharStringU     : append_char_access(callee, false); return;
3527   case vmIntrinsics::_putCharStringU     : append_char_access(callee, true); return;
3528   default:
3529     break;
3530   }
3531 
3532   // create intrinsic node
3533   const bool has_receiver = !callee->is_static();
3534   ValueType* result_type = as_ValueType(callee->return_type());
3535   ValueStack* state_before = copy_state_for_exception();
3536 
3537   Values* args = state()->pop_arguments(callee->arg_size());
3538 
3539   if (is_profiling()) {
3540     // Don't profile in the special case where the root method
3541     // is the intrinsic
3542     if (callee != method()) {
3543       // Note that we'd collect profile data in this method if we wanted it.
3544       compilation()->set_would_profile(true);
3545       if (profile_calls()) {
3546         Value recv = NULL;
3547         if (has_receiver) {
3548           recv = args->at(0);
3549           null_check(recv);
3550         }
3551         profile_call(callee, recv, NULL, collect_args_for_profiling(args, callee, true), true);
3552       }
3553     }
3554   }
3555 
3556   Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(),
3557                                     args, has_receiver, state_before,
3558                                     vmIntrinsics::preserves_state(id),
3559                                     vmIntrinsics::can_trap(id));
3560   // append instruction & push result
3561   Value value = append_split(result);
3562   if (result_type != voidType && !ignore_return) {
3563     push(result_type, value);
3564   }
3565 
3566   if (callee != method() && profile_return() && result_type->is_object_kind()) {
3567     profile_return_type(result, callee);
3568   }
3569 }
3570 
3571 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee, bool ignore_return) {
3572   // Not a trivial method because C2 may do intrinsics better.
3573   compilation()->set_would_profile(true);
3574 
3575   // For calling is_intrinsic_available we need to transition to
3576   // the '_thread_in_vm' state because is_intrinsic_available()
3577   // accesses critical VM-internal data.
3578   bool is_available = false;
3579   {
3580     VM_ENTRY_MARK;
3581     methodHandle mh(THREAD, callee->get_Method());
3582     is_available = _compilation->compiler()->is_intrinsic_available(mh, _compilation->directive());
3583   }
3584 
3585   if (!is_available) {
3586     if (!InlineNatives) {
3587       // Return false and also set message that the inlining of
3588       // intrinsics has been disabled in general.
3589       INLINE_BAILOUT("intrinsic method inlining disabled");
3590     } else {
3591       return false;
3592     }
3593   }
3594   build_graph_for_intrinsic(callee, ignore_return);
3595   return true;
3596 }
3597 
3598 
3599 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3600   // Introduce a new callee continuation point - all Ret instructions
3601   // will be replaced with Gotos to this point.
3602   BlockBegin* cont = block_at(next_bci());
3603   assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
3604 
3605   // Note: can not assign state to continuation yet, as we have to
3606   // pick up the state from the Ret instructions.
3607 
3608   // Push callee scope
3609   push_scope_for_jsr(cont, jsr_dest_bci);
3610 
3611   // Temporarily set up bytecode stream so we can append instructions
3612   // (only using the bci of this stream)
3613   scope_data()->set_stream(scope_data()->parent()->stream());
3614 
3615   BlockBegin* jsr_start_block = block_at(jsr_dest_bci);
3616   assert(jsr_start_block != NULL, "jsr start block must exist");
3617   assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet");
3618   Goto* goto_sub = new Goto(jsr_start_block, false);
3619   // Must copy state to avoid wrong sharing when parsing bytecodes
3620   assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block");
3621   jsr_start_block->set_state(copy_state_before_with_bci(jsr_dest_bci));
3622   append(goto_sub);
3623   _block->set_end(goto_sub);
3624   _last = _block = jsr_start_block;
3625 
3626   // Clear out bytecode stream
3627   scope_data()->set_stream(NULL);
3628 
3629   scope_data()->add_to_work_list(jsr_start_block);
3630 
3631   // Ready to resume parsing in subroutine
3632   iterate_all_blocks();
3633 
3634   // If we bailed out during parsing, return immediately (this is bad news)
3635   CHECK_BAILOUT_(false);
3636 
3637   // Detect whether the continuation can actually be reached. If not,
3638   // it has not had state set by the join() operations in
3639   // iterate_bytecodes_for_block()/ret() and we should not touch the
3640   // iteration state. The calling activation of
3641   // iterate_bytecodes_for_block will then complete normally.
3642   if (cont->state() != NULL) {
3643     if (!cont->is_set(BlockBegin::was_visited_flag)) {
3644       // add continuation to work list instead of parsing it immediately
3645       scope_data()->parent()->add_to_work_list(cont);
3646     }
3647   }
3648 
3649   assert(jsr_continuation() == cont, "continuation must not have changed");
3650   assert(!jsr_continuation()->is_set(BlockBegin::was_visited_flag) ||
3651          jsr_continuation()->is_set(BlockBegin::parser_loop_header_flag),
3652          "continuation can only be visited in case of backward branches");
3653   assert(_last && _last->as_BlockEnd(), "block must have end");
3654 
3655   // continuation is in work list, so end iteration of current block
3656   _skip_block = true;
3657   pop_scope_for_jsr();
3658 
3659   return true;
3660 }
3661 
3662 
3663 // Inline the entry of a synchronized method as a monitor enter and
3664 // register the exception handler which releases the monitor if an
3665 // exception is thrown within the callee. Note that the monitor enter
3666 // cannot throw an exception itself, because the receiver is
3667 // guaranteed to be non-null by the explicit null check at the
3668 // beginning of inlining.
3669 void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) {
3670   assert(lock != NULL && sync_handler != NULL, "lock or handler missing");
3671 
3672   monitorenter(lock, SynchronizationEntryBCI);
3673   assert(_last->as_MonitorEnter() != NULL, "monitor enter expected");
3674   _last->set_needs_null_check(false);
3675 
3676   sync_handler->set(BlockBegin::exception_entry_flag);
3677   sync_handler->set(BlockBegin::is_on_work_list_flag);
3678 
3679   ciExceptionHandler* desc = new ciExceptionHandler(method()->holder(), 0, method()->code_size(), -1, 0);
3680   XHandler* h = new XHandler(desc);
3681   h->set_entry_block(sync_handler);
3682   scope_data()->xhandlers()->append(h);
3683   scope_data()->set_has_handler();
3684 }
3685 
3686 
3687 // If an exception is thrown and not handled within an inlined
3688 // synchronized method, the monitor must be released before the
3689 // exception is rethrown in the outer scope. Generate the appropriate
3690 // instructions here.
3691 void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool default_handler) {
3692   BlockBegin* orig_block = _block;
3693   ValueStack* orig_state = _state;
3694   Instruction* orig_last = _last;
3695   _last = _block = sync_handler;
3696   _state = sync_handler->state()->copy();
3697 
3698   assert(sync_handler != NULL, "handler missing");
3699   assert(!sync_handler->is_set(BlockBegin::was_visited_flag), "is visited here");
3700 
3701   assert(lock != NULL || default_handler, "lock or handler missing");
3702 
3703   XHandler* h = scope_data()->xhandlers()->remove_last();
3704   assert(h->entry_block() == sync_handler, "corrupt list of handlers");
3705 
3706   block()->set(BlockBegin::was_visited_flag);
3707   Value exception = append_with_bci(new ExceptionObject(), SynchronizationEntryBCI);
3708   assert(exception->is_pinned(), "must be");
3709 
3710   int bci = SynchronizationEntryBCI;
3711   if (compilation()->env()->dtrace_method_probes()) {
3712     // Report exit from inline methods.  We don't have a stream here
3713     // so pass an explicit bci of SynchronizationEntryBCI.
3714     Values* args = new Values(1);
3715     args->push(append_with_bci(new Constant(new MethodConstant(method())), bci));
3716     append_with_bci(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args), bci);
3717   }
3718 
3719   if (lock) {
3720     assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing");
3721     if (!lock->is_linked()) {
3722       lock = append_with_bci(lock, bci);
3723     }
3724 
3725     // exit the monitor in the context of the synchronized method
3726     monitorexit(lock, bci);
3727 
3728     // exit the context of the synchronized method
3729     if (!default_handler) {
3730       pop_scope();
3731       bci = _state->caller_state()->bci();
3732       _state = _state->caller_state()->copy_for_parsing();
3733     }
3734   }
3735 
3736   // perform the throw as if at the the call site
3737   apush(exception);
3738   throw_op(bci);
3739 
3740   BlockEnd* end = last()->as_BlockEnd();
3741   block()->set_end(end);
3742 
3743   _block = orig_block;
3744   _state = orig_state;
3745   _last = orig_last;
3746 }
3747 
3748 
3749 bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known, bool ignore_return, Bytecodes::Code bc, Value receiver) {
3750   assert(!callee->is_native(), "callee must not be native");
3751   if (CompilationPolicy::policy()->should_not_inline(compilation()->env(), callee)) {
3752     INLINE_BAILOUT("inlining prohibited by policy");
3753   }
3754   // first perform tests of things it's not possible to inline
3755   if (callee->has_exception_handlers() &&
3756       !InlineMethodsWithExceptionHandlers) INLINE_BAILOUT("callee has exception handlers");
3757   if (callee->is_synchronized() &&
3758       !InlineSynchronizedMethods         ) INLINE_BAILOUT("callee is synchronized");
3759   if (!callee->holder()->is_initialized()) INLINE_BAILOUT("callee's klass not initialized yet");
3760   if (!callee->has_balanced_monitors())    INLINE_BAILOUT("callee's monitors do not match");
3761 
3762   // Proper inlining of methods with jsrs requires a little more work.
3763   if (callee->has_jsrs()                 ) INLINE_BAILOUT("jsrs not handled properly by inliner yet");
3764 
3765   // When SSE2 is used on intel, then no special handling is needed
3766   // for strictfp because the enum-constant is fixed at compile time,
3767   // the check for UseSSE2 is needed here
3768   if (strict_fp_requires_explicit_rounding && UseSSE < 2 && method()->is_strict() != callee->is_strict()) {
3769     INLINE_BAILOUT("caller and callee have different strict fp requirements");
3770   }
3771 
3772   if (is_profiling() && !callee->ensure_method_data()) {
3773     INLINE_BAILOUT("mdo allocation failed");
3774   }
3775 
3776   // now perform tests that are based on flag settings
3777   bool inlinee_by_directive = compilation()->directive()->should_inline(callee);
3778   if (callee->force_inline() || inlinee_by_directive) {
3779     if (inline_level() > MaxForceInlineLevel                    ) INLINE_BAILOUT("MaxForceInlineLevel");
3780     if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");
3781 
3782     const char* msg = "";
3783     if (callee->force_inline())  msg = "force inline by annotation";
3784     if (inlinee_by_directive)    msg = "force inline by CompileCommand";
3785     print_inlining(callee, msg);
3786   } else {
3787     // use heuristic controls on inlining
3788     if (inline_level() > MaxInlineLevel                         ) INLINE_BAILOUT("inlining too deep");
3789     if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("recursive inlining too deep");
3790     if (callee->code_size_for_inlining() > max_inline_size()    ) INLINE_BAILOUT("callee is too large");
3791 
3792     // don't inline throwable methods unless the inlining tree is rooted in a throwable class
3793     if (callee->name() == ciSymbol::object_initializer_name() &&
3794         callee->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
3795       // Throwable constructor call
3796       IRScope* top = scope();
3797       while (top->caller() != NULL) {
3798         top = top->caller();
3799       }
3800       if (!top->method()->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
3801         INLINE_BAILOUT("don't inline Throwable constructors");
3802       }
3803     }
3804 
3805     if (compilation()->env()->num_inlined_bytecodes() > DesiredMethodLimit) {
3806       INLINE_BAILOUT("total inlining greater than DesiredMethodLimit");
3807     }
3808     // printing
3809     print_inlining(callee);
3810   }
3811 
3812   // NOTE: Bailouts from this point on, which occur at the
3813   // GraphBuilder level, do not cause bailout just of the inlining but
3814   // in fact of the entire compilation.
3815 
3816   BlockBegin* orig_block = block();
3817 
3818   const bool is_invokedynamic = bc == Bytecodes::_invokedynamic;
3819   const bool has_receiver = (bc != Bytecodes::_invokestatic && !is_invokedynamic);
3820 
3821   const int args_base = state()->stack_size() - callee->arg_size();
3822   assert(args_base >= 0, "stack underflow during inlining");
3823 
3824   // Insert null check if necessary
3825   Value recv = NULL;
3826   if (has_receiver) {
3827     // note: null check must happen even if first instruction of callee does
3828     //       an implicit null check since the callee is in a different scope
3829     //       and we must make sure exception handling does the right thing
3830     assert(!callee->is_static(), "callee must not be static");
3831     assert(callee->arg_size() > 0, "must have at least a receiver");
3832     recv = state()->stack_at(args_base);
3833     null_check(recv);
3834   }
3835 
3836   if (is_profiling()) {
3837     // Note that we'd collect profile data in this method if we wanted it.
3838     // this may be redundant here...
3839     compilation()->set_would_profile(true);
3840 
3841     if (profile_calls()) {
3842       int start = 0;
3843       Values* obj_args = args_list_for_profiling(callee, start, has_receiver);
3844       if (obj_args != NULL) {
3845         int s = obj_args->max_length();
3846         // if called through method handle invoke, some arguments may have been popped
3847         for (int i = args_base+start, j = 0; j < obj_args->max_length() && i < state()->stack_size(); ) {
3848           Value v = state()->stack_at_inc(i);
3849           if (v->type()->is_object_kind()) {
3850             obj_args->push(v);
3851             j++;
3852           }
3853         }
3854         check_args_for_profiling(obj_args, s);
3855       }
3856       profile_call(callee, recv, holder_known ? callee->holder() : NULL, obj_args, true);
3857     }
3858   }
3859 
3860   // Introduce a new callee continuation point - if the callee has
3861   // more than one return instruction or the return does not allow
3862   // fall-through of control flow, all return instructions of the
3863   // callee will need to be replaced by Goto's pointing to this
3864   // continuation point.
3865   BlockBegin* cont = block_at(next_bci());
3866   bool continuation_existed = true;
3867   if (cont == NULL) {
3868     cont = new BlockBegin(next_bci());
3869     // low number so that continuation gets parsed as early as possible
3870     cont->set_depth_first_number(0);
3871     if (PrintInitialBlockList) {
3872       tty->print_cr("CFG: created block %d (bci %d) as continuation for inline at bci %d",
3873                     cont->block_id(), cont->bci(), bci());
3874     }
3875     continuation_existed = false;
3876   }
3877   // Record number of predecessors of continuation block before
3878   // inlining, to detect if inlined method has edges to its
3879   // continuation after inlining.
3880   int continuation_preds = cont->number_of_preds();
3881 
3882   // Push callee scope
3883   push_scope(callee, cont);
3884 
3885   // the BlockListBuilder for the callee could have bailed out
3886   if (bailed_out())
3887       return false;
3888 
3889   // Temporarily set up bytecode stream so we can append instructions
3890   // (only using the bci of this stream)
3891   scope_data()->set_stream(scope_data()->parent()->stream());
3892 
3893   // Pass parameters into callee state: add assignments
3894   // note: this will also ensure that all arguments are computed before being passed
3895   ValueStack* callee_state = state();
3896   ValueStack* caller_state = state()->caller_state();
3897   for (int i = args_base; i < caller_state->stack_size(); ) {
3898     const int arg_no = i - args_base;
3899     Value arg = caller_state->stack_at_inc(i);
3900     store_local(callee_state, arg, arg_no);
3901   }
3902 
3903   // Remove args from stack.
3904   // Note that we preserve locals state in case we can use it later
3905   // (see use of pop_scope() below)
3906   caller_state->truncate_stack(args_base);
3907   assert(callee_state->stack_size() == 0, "callee stack must be empty");
3908 
3909   Value lock = NULL;
3910   BlockBegin* sync_handler = NULL;
3911 
3912   // Inline the locking of the receiver if the callee is synchronized
3913   if (callee->is_synchronized()) {
3914     lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror())))
3915                                : state()->local_at(0);
3916     sync_handler = new BlockBegin(SynchronizationEntryBCI);
3917     inline_sync_entry(lock, sync_handler);
3918   }
3919 
3920   if (compilation()->env()->dtrace_method_probes()) {
3921     Values* args = new Values(1);
3922     args->push(append(new Constant(new MethodConstant(method()))));
3923     append(new RuntimeCall(voidType, "dtrace_method_entry", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), args));
3924   }
3925 
3926   if (profile_inlined_calls()) {
3927     profile_invocation(callee, copy_state_before_with_bci(SynchronizationEntryBCI));
3928   }
3929 
3930   BlockBegin* callee_start_block = block_at(0);
3931   if (callee_start_block != NULL) {
3932     assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header");
3933     Goto* goto_callee = new Goto(callee_start_block, false);
3934     // The state for this goto is in the scope of the callee, so use
3935     // the entry bci for the callee instead of the call site bci.
3936     append_with_bci(goto_callee, 0);
3937     _block->set_end(goto_callee);
3938     callee_start_block->merge(callee_state);
3939 
3940     _last = _block = callee_start_block;
3941 
3942     scope_data()->add_to_work_list(callee_start_block);
3943   }
3944 
3945   // Clear out bytecode stream
3946   scope_data()->set_stream(NULL);
3947   scope_data()->set_ignore_return(ignore_return);
3948 
3949   CompileLog* log = compilation()->log();
3950   if (log != NULL) log->head("parse method='%d'", log->identify(callee));
3951 
3952   // Ready to resume parsing in callee (either in the same block we
3953   // were in before or in the callee's start block)
3954   iterate_all_blocks(callee_start_block == NULL);
3955 
3956   if (log != NULL) log->done("parse");
3957 
3958   // If we bailed out during parsing, return immediately (this is bad news)
3959   if (bailed_out())
3960       return false;
3961 
3962   // iterate_all_blocks theoretically traverses in random order; in
3963   // practice, we have only traversed the continuation if we are
3964   // inlining into a subroutine
3965   assert(continuation_existed ||
3966          !continuation()->is_set(BlockBegin::was_visited_flag),
3967          "continuation should not have been parsed yet if we created it");
3968 
3969   // At this point we are almost ready to return and resume parsing of
3970   // the caller back in the GraphBuilder. The only thing we want to do
3971   // first is an optimization: during parsing of the callee we
3972   // generated at least one Goto to the continuation block. If we
3973   // generated exactly one, and if the inlined method spanned exactly
3974   // one block (and we didn't have to Goto its entry), then we snip
3975   // off the Goto to the continuation, allowing control to fall
3976   // through back into the caller block and effectively performing
3977   // block merging. This allows load elimination and CSE to take place
3978   // across multiple callee scopes if they are relatively simple, and
3979   // is currently essential to making inlining profitable.
3980   if (num_returns() == 1
3981       && block() == orig_block
3982       && block() == inline_cleanup_block()) {
3983     _last  = inline_cleanup_return_prev();
3984     _state = inline_cleanup_state();
3985   } else if (continuation_preds == cont->number_of_preds()) {
3986     // Inlining caused that the instructions after the invoke in the
3987     // caller are not reachable any more. So skip filling this block
3988     // with instructions!
3989     assert(cont == continuation(), "");
3990     assert(_last && _last->as_BlockEnd(), "");
3991     _skip_block = true;
3992   } else {
3993     // Resume parsing in continuation block unless it was already parsed.
3994     // Note that if we don't change _last here, iteration in
3995     // iterate_bytecodes_for_block will stop when we return.
3996     if (!continuation()->is_set(BlockBegin::was_visited_flag)) {
3997       // add continuation to work list instead of parsing it immediately
3998       assert(_last && _last->as_BlockEnd(), "");
3999       scope_data()->parent()->add_to_work_list(continuation());
4000       _skip_block = true;
4001     }
4002   }
4003 
4004   // Fill the exception handler for synchronized methods with instructions
4005   if (callee->is_synchronized() && sync_handler->state() != NULL) {
4006     fill_sync_handler(lock, sync_handler);
4007   } else {
4008     pop_scope();
4009   }
4010 
4011   compilation()->notice_inlined_method(callee);
4012 
4013   return true;
4014 }
4015 
4016 
4017 bool GraphBuilder::try_method_handle_inline(ciMethod* callee, bool ignore_return) {
4018   ValueStack* state_before = copy_state_before();
4019   vmIntrinsics::ID iid = callee->intrinsic_id();
4020   switch (iid) {
4021   case vmIntrinsics::_invokeBasic:
4022     {
4023       // get MethodHandle receiver
4024       const int args_base = state()->stack_size() - callee->arg_size();
4025       ValueType* type = state()->stack_at(args_base)->type();
4026       if (type->is_constant()) {
4027         ciMethod* target = type->as_ObjectType()->constant_value()->as_method_handle()->get_vmtarget();
4028         // We don't do CHA here so only inline static and statically bindable methods.
4029         if (target->is_static() || target->can_be_statically_bound()) {
4030           if (ciMethod::is_consistent_info(callee, target)) {
4031             Bytecodes::Code bc = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokevirtual;
4032             ignore_return = ignore_return || (callee->return_type()->is_void() && !target->return_type()->is_void());
4033             if (try_inline(target, /*holder_known*/ true, ignore_return, bc)) {
4034               return true;
4035             }
4036           } else {
4037             print_inlining(target, "signatures mismatch", /*success*/ false);
4038           }
4039         } else {
4040           print_inlining(target, "not static or statically bindable", /*success*/ false);
4041         }
4042       } else {
4043         print_inlining(callee, "receiver not constant", /*success*/ false);
4044       }
4045     }
4046     break;
4047 
4048   case vmIntrinsics::_linkToVirtual:
4049   case vmIntrinsics::_linkToStatic:
4050   case vmIntrinsics::_linkToSpecial:
4051   case vmIntrinsics::_linkToInterface:
4052     {
4053       // pop MemberName argument
4054       const int args_base = state()->stack_size() - callee->arg_size();
4055       ValueType* type = apop()->type();
4056       if (type->is_constant()) {
4057         ciMethod* target = type->as_ObjectType()->constant_value()->as_member_name()->get_vmtarget();
4058         ignore_return = ignore_return || (callee->return_type()->is_void() && !target->return_type()->is_void());
4059         // If the target is another method handle invoke, try to recursively get
4060         // a better target.
4061         if (target->is_method_handle_intrinsic()) {
4062           if (try_method_handle_inline(target, ignore_return)) {
4063             return true;
4064           }
4065         } else if (!ciMethod::is_consistent_info(callee, target)) {
4066           print_inlining(target, "signatures mismatch", /*success*/ false);
4067         } else {
4068           ciSignature* signature = target->signature();
4069           const int receiver_skip = target->is_static() ? 0 : 1;
4070           // Cast receiver to its type.
4071           if (!target->is_static()) {
4072             ciKlass* tk = signature->accessing_klass();
4073             Value obj = state()->stack_at(args_base);
4074             if (obj->exact_type() == NULL &&
4075                 obj->declared_type() != tk && tk != compilation()->env()->Object_klass()) {
4076               TypeCast* c = new TypeCast(tk, obj, state_before);
4077               append(c);
4078               state()->stack_at_put(args_base, c);
4079             }
4080           }
4081           // Cast reference arguments to its type.
4082           for (int i = 0, j = 0; i < signature->count(); i++) {
4083             ciType* t = signature->type_at(i);
4084             if (t->is_klass()) {
4085               ciKlass* tk = t->as_klass();
4086               Value obj = state()->stack_at(args_base + receiver_skip + j);
4087               if (obj->exact_type() == NULL &&
4088                   obj->declared_type() != tk && tk != compilation()->env()->Object_klass()) {
4089                 TypeCast* c = new TypeCast(t, obj, state_before);
4090                 append(c);
4091                 state()->stack_at_put(args_base + receiver_skip + j, c);
4092               }
4093             }
4094             j += t->size();  // long and double take two slots
4095           }
4096           // We don't do CHA here so only inline static and statically bindable methods.
4097           if (target->is_static() || target->can_be_statically_bound()) {
4098             Bytecodes::Code bc = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokevirtual;
4099             if (try_inline(target, /*holder_known*/ true, ignore_return, bc)) {
4100               return true;
4101             }
4102           } else {
4103             print_inlining(target, "not static or statically bindable", /*success*/ false);
4104           }
4105         }
4106       } else {
4107         print_inlining(callee, "MemberName not constant", /*success*/ false);
4108       }
4109     }
4110     break;
4111 
4112   default:
4113     fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid));
4114     break;
4115   }
4116   set_state(state_before->copy_for_parsing());
4117   return false;
4118 }
4119 
4120 
4121 void GraphBuilder::inline_bailout(const char* msg) {
4122   assert(msg != NULL, "inline bailout msg must exist");
4123   _inline_bailout_msg = msg;
4124 }
4125 
4126 
4127 void GraphBuilder::clear_inline_bailout() {
4128   _inline_bailout_msg = NULL;
4129 }
4130 
4131 
4132 void GraphBuilder::push_root_scope(IRScope* scope, BlockList* bci2block, BlockBegin* start) {
4133   ScopeData* data = new ScopeData(NULL);
4134   data->set_scope(scope);
4135   data->set_bci2block(bci2block);
4136   _scope_data = data;
4137   _block = start;
4138 }
4139 
4140 
4141 void GraphBuilder::push_scope(ciMethod* callee, BlockBegin* continuation) {
4142   IRScope* callee_scope = new IRScope(compilation(), scope(), bci(), callee, -1, false);
4143   scope()->add_callee(callee_scope);
4144 
4145   BlockListBuilder blb(compilation(), callee_scope, -1);
4146   CHECK_BAILOUT();
4147 
4148   if (!blb.bci2block()->at(0)->is_set(BlockBegin::parser_loop_header_flag)) {
4149     // this scope can be inlined directly into the caller so remove
4150     // the block at bci 0.
4151     blb.bci2block()->at_put(0, NULL);
4152   }
4153 
4154   set_state(new ValueStack(callee_scope, state()->copy(ValueStack::CallerState, bci())));
4155 
4156   ScopeData* data = new ScopeData(scope_data());
4157   data->set_scope(callee_scope);
4158   data->set_bci2block(blb.bci2block());
4159   data->set_continuation(continuation);
4160   _scope_data = data;
4161 }
4162 
4163 
4164 void GraphBuilder::push_scope_for_jsr(BlockBegin* jsr_continuation, int jsr_dest_bci) {
4165   ScopeData* data = new ScopeData(scope_data());
4166   data->set_parsing_jsr();
4167   data->set_jsr_entry_bci(jsr_dest_bci);
4168   data->set_jsr_return_address_local(-1);
4169   // Must clone bci2block list as we will be mutating it in order to
4170   // properly clone all blocks in jsr region as well as exception
4171   // handlers containing rets
4172   BlockList* new_bci2block = new BlockList(bci2block()->length());
4173   new_bci2block->appendAll(bci2block());
4174   data->set_bci2block(new_bci2block);
4175   data->set_scope(scope());
4176   data->setup_jsr_xhandlers();
4177   data->set_continuation(continuation());
4178   data->set_jsr_continuation(jsr_continuation);
4179   _scope_data = data;
4180 }
4181 
4182 
4183 void GraphBuilder::pop_scope() {
4184   int number_of_locks = scope()->number_of_locks();
4185   _scope_data = scope_data()->parent();
4186   // accumulate minimum number of monitor slots to be reserved
4187   scope()->set_min_number_of_locks(number_of_locks);
4188 }
4189 
4190 
4191 void GraphBuilder::pop_scope_for_jsr() {
4192   _scope_data = scope_data()->parent();
4193 }
4194 
4195 void GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4196   Values* args = state()->pop_arguments(callee->arg_size());
4197   null_check(args->at(0));
4198   Instruction* offset = args->at(2);
4199 #ifndef _LP64
4200   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4201 #endif
4202   Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
4203   push(op->type(), op);
4204   compilation()->set_has_unsafe_access(true);
4205 }
4206 
4207 
4208 void GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
4209   Values* args = state()->pop_arguments(callee->arg_size());
4210   null_check(args->at(0));
4211   Instruction* offset = args->at(2);
4212 #ifndef _LP64
4213   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4214 #endif
4215   Value val = args->at(3);
4216   if (t == T_BOOLEAN) {
4217     Value mask = append(new Constant(new IntConstant(1)));
4218     val = append(new LogicOp(Bytecodes::_iand, val, mask));
4219   }
4220   Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, val, is_volatile));
4221   compilation()->set_has_unsafe_access(true);
4222   kill_all();
4223 }
4224 
4225 
4226 void GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
4227   Values* args = state()->pop_arguments(callee->arg_size());
4228   null_check(args->at(0));
4229   Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
4230   push(op->type(), op);
4231   compilation()->set_has_unsafe_access(true);
4232 }
4233 
4234 
4235 void GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
4236   Values* args = state()->pop_arguments(callee->arg_size());
4237   null_check(args->at(0));
4238   Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
4239   compilation()->set_has_unsafe_access(true);
4240 }
4241 
4242 
4243 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
4244   ValueStack* state_before = copy_state_for_exception();
4245   ValueType* result_type = as_ValueType(callee->return_type());
4246   assert(result_type->is_int(), "int result");
4247   Values* args = state()->pop_arguments(callee->arg_size());
4248 
4249   // Pop off some args to specially handle, then push back
4250   Value newval = args->pop();
4251   Value cmpval = args->pop();
4252   Value offset = args->pop();
4253   Value src = args->pop();
4254   Value unsafe_obj = args->pop();
4255 
4256   // Separately handle the unsafe arg. It is not needed for code
4257   // generation, but must be null checked
4258   null_check(unsafe_obj);
4259 
4260 #ifndef _LP64
4261   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4262 #endif
4263 
4264   args->push(src);
4265   args->push(offset);
4266   args->push(cmpval);
4267   args->push(newval);
4268 
4269   // An unsafe CAS can alias with other field accesses, but we don't
4270   // know which ones so mark the state as no preserved.  This will
4271   // cause CSE to invalidate memory across it.
4272   bool preserves_state = false;
4273   Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, state_before, preserves_state);
4274   append_split(result);
4275   push(result_type, result);
4276   compilation()->set_has_unsafe_access(true);
4277 }
4278 
4279 void GraphBuilder::append_char_access(ciMethod* callee, bool is_store) {
4280   // This intrinsic accesses byte[] array as char[] array. Computing the offsets
4281   // correctly requires matched array shapes.
4282   assert (arrayOopDesc::base_offset_in_bytes(T_CHAR) == arrayOopDesc::base_offset_in_bytes(T_BYTE),
4283           "sanity: byte[] and char[] bases agree");
4284   assert (type2aelembytes(T_CHAR) == type2aelembytes(T_BYTE)*2,
4285           "sanity: byte[] and char[] scales agree");
4286 
4287   ValueStack* state_before = copy_state_indexed_access();
4288   compilation()->set_has_access_indexed(true);
4289   Values* args = state()->pop_arguments(callee->arg_size());
4290   Value array = args->at(0);
4291   Value index = args->at(1);
4292   if (is_store) {
4293     Value value = args->at(2);
4294     Instruction* store = append(new StoreIndexed(array, index, NULL, T_CHAR, value, state_before, false, true));
4295     store->set_flag(Instruction::NeedsRangeCheckFlag, false);
4296     _memory->store_value(value);
4297   } else {
4298     Instruction* load = append(new LoadIndexed(array, index, NULL, T_CHAR, state_before, true));
4299     load->set_flag(Instruction::NeedsRangeCheckFlag, false);
4300     push(load->type(), load);
4301   }
4302 }
4303 
4304 static void post_inlining_event(EventCompilerInlining* event,
4305                                 int compile_id,
4306                                 const char* msg,
4307                                 bool success,
4308                                 int bci,
4309                                 ciMethod* caller,
4310                                 ciMethod* callee) {
4311   assert(caller != NULL, "invariant");
4312   assert(callee != NULL, "invariant");
4313   assert(event != NULL, "invariant");
4314   assert(event->should_commit(), "invariant");
4315   JfrStructCalleeMethod callee_struct;
4316   callee_struct.set_type(callee->holder()->name()->as_utf8());
4317   callee_struct.set_name(callee->name()->as_utf8());
4318   callee_struct.set_descriptor(callee->signature()->as_symbol()->as_utf8());
4319   event->set_compileId(compile_id);
4320   event->set_message(msg);
4321   event->set_succeeded(success);
4322   event->set_bci(bci);
4323   event->set_caller(caller->get_Method());
4324   event->set_callee(callee_struct);
4325   event->commit();
4326 }
4327 
4328 void GraphBuilder::print_inlining(ciMethod* callee, const char* msg, bool success) {
4329   CompileLog* log = compilation()->log();
4330   if (log != NULL) {
4331     if (success) {
4332       if (msg != NULL)
4333         log->inline_success(msg);
4334       else
4335         log->inline_success("receiver is statically known");
4336     } else {
4337       if (msg != NULL)
4338         log->inline_fail(msg);
4339       else
4340         log->inline_fail("reason unknown");
4341     }
4342   }
4343   EventCompilerInlining event;
4344   if (event.should_commit()) {
4345     post_inlining_event(&event, compilation()->env()->task()->compile_id(), msg, success, bci(), method(), callee);
4346   }
4347 
4348   CompileTask::print_inlining_ul(callee, scope()->level(), bci(), msg);
4349 
4350   if (!compilation()->directive()->PrintInliningOption) {
4351     return;
4352   }
4353   CompileTask::print_inlining_tty(callee, scope()->level(), bci(), msg);
4354   if (success && CIPrintMethodCodes) {
4355     callee->print_codes();
4356   }
4357 }
4358 
4359 void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
4360   Values* args = state()->pop_arguments(callee->arg_size());
4361   BasicType t = callee->return_type()->basic_type();
4362   null_check(args->at(0));
4363   Instruction* offset = args->at(2);
4364 #ifndef _LP64
4365   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4366 #endif
4367   Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4368   compilation()->set_has_unsafe_access(true);
4369   kill_all();
4370   push(op->type(), op);
4371 }
4372 
4373 #ifndef PRODUCT
4374 void GraphBuilder::print_stats() {
4375   vmap()->print();
4376 }
4377 #endif // PRODUCT
4378 
4379 void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
4380   assert(known_holder == NULL || (known_holder->is_instance_klass() &&
4381                                   (!known_holder->is_interface() ||
4382                                    ((ciInstanceKlass*)known_holder)->has_nonstatic_concrete_methods())), "should be non-static concrete method");
4383   if (known_holder != NULL) {
4384     if (known_holder->exact_klass() == NULL) {
4385       known_holder = compilation()->cha_exact_type(known_holder);
4386     }
4387   }
4388 
4389   append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));
4390 }
4391 
4392 void GraphBuilder::profile_return_type(Value ret, ciMethod* callee, ciMethod* m, int invoke_bci) {
4393   assert((m == NULL) == (invoke_bci < 0), "invalid method and invalid bci together");
4394   if (m == NULL) {
4395     m = method();
4396   }
4397   if (invoke_bci < 0) {
4398     invoke_bci = bci();
4399   }
4400   ciMethodData* md = m->method_data_or_null();
4401   ciProfileData* data = md->bci_to_data(invoke_bci);
4402   if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
4403     bool has_return = data->is_CallTypeData() ? ((ciCallTypeData*)data)->has_return() : ((ciVirtualCallTypeData*)data)->has_return();
4404     if (has_return) {
4405       append(new ProfileReturnType(m , invoke_bci, callee, ret));
4406     }
4407   }
4408 }
4409 
4410 void GraphBuilder::profile_invocation(ciMethod* callee, ValueStack* state) {
4411   append(new ProfileInvoke(callee, state));
4412 }