1 /*
   2  * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "compiler/disassembler.hpp"
  29 #include "interpreter/bytecodeHistogram.hpp"
  30 #include "interpreter/bytecodeInterpreter.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/templateTable.hpp"
  34 #include "memory/allocation.inline.hpp"
  35 #include "memory/resourceArea.hpp"
  36 #include "oops/arrayOop.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/method.hpp"
  39 #include "oops/oop.inline.hpp"
  40 #include "prims/forte.hpp"
  41 #include "prims/jvmtiExport.hpp"
  42 #include "prims/methodHandles.hpp"
  43 #include "runtime/handles.inline.hpp"
  44 #include "runtime/sharedRuntime.hpp"
  45 #include "runtime/stubRoutines.hpp"
  46 #include "runtime/timer.hpp"
  47 
  48 # define __ _masm->
  49 
  50 
  51 //------------------------------------------------------------------------------------------------------------------------
  52 // Implementation of InterpreterCodelet
  53 
  54 void InterpreterCodelet::initialize(const char* description, Bytecodes::Code bytecode) {
  55   _description       = description;
  56   _bytecode          = bytecode;
  57 }
  58 
  59 
  60 void InterpreterCodelet::verify() {
  61 }
  62 
  63 
  64 void InterpreterCodelet::print_on(outputStream* st) const {
  65   ttyLocker ttyl;
  66 
  67   if (PrintInterpreter) {
  68     st->cr();
  69     st->print_cr("----------------------------------------------------------------------");
  70   }
  71 
  72   if (description() != NULL) st->print("%s  ", description());
  73   if (bytecode()    >= 0   ) st->print("%d %s  ", bytecode(), Bytecodes::name(bytecode()));
  74   st->print_cr("[" INTPTR_FORMAT ", " INTPTR_FORMAT "]  %d bytes",
  75                 code_begin(), code_end(), code_size());
  76 
  77   if (PrintInterpreter) {
  78     st->cr();
  79     Disassembler::decode(code_begin(), code_end(), st, DEBUG_ONLY(_strings) NOT_DEBUG(CodeStrings()));
  80   }
  81 }
  82 
  83 
  84 //------------------------------------------------------------------------------------------------------------------------
  85 // Implementation of  platform independent aspects of Interpreter
  86 
  87 void AbstractInterpreter::initialize() {
  88   if (_code != NULL) return;
  89 
  90   // make sure 'imported' classes are initialized
  91   if (CountBytecodes || TraceBytecodes || StopInterpreterAt) BytecodeCounter::reset();
  92   if (PrintBytecodeHistogram)                                BytecodeHistogram::reset();
  93   if (PrintBytecodePairHistogram)                            BytecodePairHistogram::reset();
  94 
  95   InvocationCounter::reinitialize(DelayCompilationDuringStartup);
  96 
  97 }
  98 
  99 void AbstractInterpreter::print() {
 100   tty->cr();
 101   tty->print_cr("----------------------------------------------------------------------");
 102   tty->print_cr("Interpreter");
 103   tty->cr();
 104   tty->print_cr("code size        = %6dK bytes", (int)_code->used_space()/1024);
 105   tty->print_cr("total space      = %6dK bytes", (int)_code->total_space()/1024);
 106   tty->print_cr("wasted space     = %6dK bytes", (int)_code->available_space()/1024);
 107   tty->cr();
 108   tty->print_cr("# of codelets    = %6d"      , _code->number_of_stubs());
 109   tty->print_cr("avg codelet size = %6d bytes", _code->used_space() / _code->number_of_stubs());
 110   tty->cr();
 111   _code->print();
 112   tty->print_cr("----------------------------------------------------------------------");
 113   tty->cr();
 114 }
 115 
 116 
 117 void interpreter_init() {
 118   Interpreter::initialize();
 119 #ifndef PRODUCT
 120   if (TraceBytecodes) BytecodeTracer::set_closure(BytecodeTracer::std_closure());
 121 #endif // PRODUCT
 122   // need to hit every safepoint in order to call zapping routine
 123   // register the interpreter
 124   Forte::register_stub(
 125     "Interpreter",
 126     AbstractInterpreter::code()->code_start(),
 127     AbstractInterpreter::code()->code_end()
 128   );
 129 
 130   // notify JVMTI profiler
 131   if (JvmtiExport::should_post_dynamic_code_generated()) {
 132     JvmtiExport::post_dynamic_code_generated("Interpreter",
 133                                              AbstractInterpreter::code()->code_start(),
 134                                              AbstractInterpreter::code()->code_end());
 135   }
 136 }
 137 
 138 //------------------------------------------------------------------------------------------------------------------------
 139 // Implementation of interpreter
 140 
 141 StubQueue* AbstractInterpreter::_code                                       = NULL;
 142 bool       AbstractInterpreter::_notice_safepoints                          = false;
 143 address    AbstractInterpreter::_rethrow_exception_entry                    = NULL;
 144 
 145 address    AbstractInterpreter::_native_entry_begin                         = NULL;
 146 address    AbstractInterpreter::_native_entry_end                           = NULL;
 147 address    AbstractInterpreter::_slow_signature_handler;
 148 address    AbstractInterpreter::_entry_table            [AbstractInterpreter::number_of_method_entries];
 149 address    AbstractInterpreter::_native_abi_to_tosca    [AbstractInterpreter::number_of_result_handlers];
 150 
 151 //------------------------------------------------------------------------------------------------------------------------
 152 // Generation of complete interpreter
 153 
 154 AbstractInterpreterGenerator::AbstractInterpreterGenerator(StubQueue* _code) {
 155   _masm                      = NULL;
 156 }
 157 
 158 
 159 static const BasicType types[Interpreter::number_of_result_handlers] = {
 160   T_BOOLEAN,
 161   T_CHAR   ,
 162   T_BYTE   ,
 163   T_SHORT  ,
 164   T_INT    ,
 165   T_LONG   ,
 166   T_VOID   ,
 167   T_FLOAT  ,
 168   T_DOUBLE ,
 169   T_OBJECT
 170 };
 171 
 172 void AbstractInterpreterGenerator::generate_all() {
 173 
 174 
 175   { CodeletMark cm(_masm, "slow signature handler");
 176     Interpreter::_slow_signature_handler = generate_slow_signature_handler();
 177   }
 178 
 179 }
 180 
 181 //------------------------------------------------------------------------------------------------------------------------
 182 // Entry points
 183 
 184 AbstractInterpreter::MethodKind AbstractInterpreter::method_kind(methodHandle m) {
 185   // Abstract method?
 186   if (m->is_abstract()) return abstract;
 187 
 188   // Method handle primitive?
 189   if (m->is_method_handle_intrinsic()) {
 190     vmIntrinsics::ID id = m->intrinsic_id();
 191     assert(MethodHandles::is_signature_polymorphic(id), "must match an intrinsic");
 192     MethodKind kind = (MethodKind)( method_handle_invoke_FIRST +
 193                                     ((int)id - vmIntrinsics::FIRST_MH_SIG_POLY) );
 194     assert(kind <= method_handle_invoke_LAST, "parallel enum ranges");
 195     return kind;
 196   }
 197 
 198 #ifndef CC_INTERP
 199   if (UseCRC32Intrinsics && m->is_native()) {
 200     // Use optimized stub code for CRC32 native methods.
 201     switch (m->intrinsic_id()) {
 202       case vmIntrinsics::_updateCRC32            : return java_util_zip_CRC32_update;
 203       case vmIntrinsics::_updateBytesCRC32       : return java_util_zip_CRC32_updateBytes;
 204       case vmIntrinsics::_updateByteBufferCRC32  : return java_util_zip_CRC32_updateByteBuffer;
 205     }
 206   }
 207 #endif
 208 
 209   // Native method?
 210   // Note: This test must come _before_ the test for intrinsic
 211   //       methods. See also comments below.
 212   if (m->is_native()) {
 213     assert(!m->is_method_handle_intrinsic(), "overlapping bits here, watch out");
 214     return m->is_synchronized() ? native_synchronized : native;
 215   }
 216 
 217   // Synchronized?
 218   if (m->is_synchronized()) {
 219     return zerolocals_synchronized;
 220   }
 221 
 222   if (RegisterFinalizersAtInit && m->code_size() == 1 &&
 223       m->intrinsic_id() == vmIntrinsics::_Object_init) {
 224     // We need to execute the special return bytecode to check for
 225     // finalizer registration so create a normal frame.
 226     return zerolocals;
 227   }
 228 
 229   // Empty method?
 230   if (m->is_empty_method()) {
 231     return empty;
 232   }
 233 
 234   // Special intrinsic method?
 235   // Note: This test must come _after_ the test for native methods,
 236   //       otherwise we will run into problems with JDK 1.2, see also
 237   //       AbstractInterpreterGenerator::generate_method_entry() for
 238   //       for details.
 239   switch (m->intrinsic_id()) {
 240     case vmIntrinsics::_dsin  : return java_lang_math_sin  ;
 241     case vmIntrinsics::_dcos  : return java_lang_math_cos  ;
 242     case vmIntrinsics::_dtan  : return java_lang_math_tan  ;
 243     case vmIntrinsics::_dabs  : return java_lang_math_abs  ;
 244     case vmIntrinsics::_dsqrt : return java_lang_math_sqrt ;
 245     case vmIntrinsics::_dlog  : return java_lang_math_log  ;
 246     case vmIntrinsics::_dlog10: return java_lang_math_log10;
 247     case vmIntrinsics::_dpow  : return java_lang_math_pow  ;
 248     case vmIntrinsics::_dexp  : return java_lang_math_exp  ;
 249 
 250     case vmIntrinsics::_Reference_get:
 251                                 return java_lang_ref_reference_get;
 252   }
 253 
 254   // Accessor method?
 255   if (m->is_accessor()) {
 256     assert(m->size_of_parameters() == 1, "fast code for accessors assumes parameter size = 1");
 257     return accessor;
 258   }
 259 
 260   // Note: for now: zero locals for all non-empty methods
 261   return zerolocals;
 262 }
 263 
 264 
 265 void AbstractInterpreter::set_entry_for_kind(AbstractInterpreter::MethodKind kind, address entry) {
 266   assert(kind >= method_handle_invoke_FIRST &&
 267          kind <= method_handle_invoke_LAST, "late initialization only for MH entry points");
 268   assert(_entry_table[kind] == _entry_table[abstract], "previous value must be AME entry");
 269   _entry_table[kind] = entry;
 270 }
 271 
 272 
 273 // Return true if the interpreter can prove that the given bytecode has
 274 // not yet been executed (in Java semantics, not in actual operation).
 275 bool AbstractInterpreter::is_not_reached(methodHandle method, int bci) {
 276   Bytecodes::Code code = method()->code_at(bci);
 277 
 278   if (!Bytecodes::must_rewrite(code)) {
 279     // might have been reached
 280     return false;
 281   }
 282 
 283   // the bytecode might not be rewritten if the method is an accessor, etc.
 284   address ientry = method->interpreter_entry();
 285   if (ientry != entry_for_kind(AbstractInterpreter::zerolocals) &&
 286       ientry != entry_for_kind(AbstractInterpreter::zerolocals_synchronized))
 287     return false;  // interpreter does not run this method!
 288 
 289   // otherwise, we can be sure this bytecode has never been executed
 290   return true;
 291 }
 292 
 293 
 294 #ifndef PRODUCT
 295 void AbstractInterpreter::print_method_kind(MethodKind kind) {
 296   switch (kind) {
 297     case zerolocals             : tty->print("zerolocals"             ); break;
 298     case zerolocals_synchronized: tty->print("zerolocals_synchronized"); break;
 299     case native                 : tty->print("native"                 ); break;
 300     case native_synchronized    : tty->print("native_synchronized"    ); break;
 301     case empty                  : tty->print("empty"                  ); break;
 302     case accessor               : tty->print("accessor"               ); break;
 303     case abstract               : tty->print("abstract"               ); break;
 304     case java_lang_math_sin     : tty->print("java_lang_math_sin"     ); break;
 305     case java_lang_math_cos     : tty->print("java_lang_math_cos"     ); break;
 306     case java_lang_math_tan     : tty->print("java_lang_math_tan"     ); break;
 307     case java_lang_math_abs     : tty->print("java_lang_math_abs"     ); break;
 308     case java_lang_math_sqrt    : tty->print("java_lang_math_sqrt"    ); break;
 309     case java_lang_math_log     : tty->print("java_lang_math_log"     ); break;
 310     case java_lang_math_log10   : tty->print("java_lang_math_log10"   ); break;
 311     case java_util_zip_CRC32_update           : tty->print("java_util_zip_CRC32_update"); break;
 312     case java_util_zip_CRC32_updateBytes      : tty->print("java_util_zip_CRC32_updateBytes"); break;
 313     case java_util_zip_CRC32_updateByteBuffer : tty->print("java_util_zip_CRC32_updateByteBuffer"); break;
 314     default:
 315       if (kind >= method_handle_invoke_FIRST &&
 316           kind <= method_handle_invoke_LAST) {
 317         const char* kind_name = vmIntrinsics::name_at(method_handle_intrinsic(kind));
 318         if (kind_name[0] == '_')  kind_name = &kind_name[1];  // '_invokeExact' => 'invokeExact'
 319         tty->print("method_handle_%s", kind_name);
 320         break;
 321       }
 322       ShouldNotReachHere();
 323       break;
 324   }
 325 }
 326 #endif // PRODUCT
 327 
 328 
 329 //------------------------------------------------------------------------------------------------------------------------
 330 // Deoptimization support
 331 
 332 // If deoptimization happens, this function returns the point of next bytecode to continue execution
 333 address AbstractInterpreter::deopt_continue_after_entry(Method* method, address bcp, int callee_parameters, bool is_top_frame) {
 334   assert(method->contains(bcp), "just checkin'");
 335   Bytecodes::Code code   = Bytecodes::java_code_at(method, bcp);
 336   assert(!Interpreter::bytecode_should_reexecute(code), "should not reexecute");
 337   int             bci    = method->bci_from(bcp);
 338   int             length = -1; // initial value for debugging
 339   // compute continuation length
 340   length = Bytecodes::length_at(method, bcp);
 341   // compute result type
 342   BasicType type = T_ILLEGAL;
 343 
 344   switch (code) {
 345     case Bytecodes::_invokevirtual  :
 346     case Bytecodes::_invokespecial  :
 347     case Bytecodes::_invokestatic   :
 348     case Bytecodes::_invokeinterface: {
 349       Thread *thread = Thread::current();
 350       ResourceMark rm(thread);
 351       methodHandle mh(thread, method);
 352       type = Bytecode_invoke(mh, bci).result_type();
 353       // since the cache entry might not be initialized:
 354       // (NOT needed for the old calling convension)
 355       if (!is_top_frame) {
 356         int index = Bytes::get_native_u2(bcp+1);
 357         method->constants()->cache()->entry_at(index)->set_parameter_size(callee_parameters);
 358       }
 359       break;
 360     }
 361 
 362    case Bytecodes::_invokedynamic: {
 363       Thread *thread = Thread::current();
 364       ResourceMark rm(thread);
 365       methodHandle mh(thread, method);
 366       type = Bytecode_invoke(mh, bci).result_type();
 367       // since the cache entry might not be initialized:
 368       // (NOT needed for the old calling convension)
 369       if (!is_top_frame) {
 370         int index = Bytes::get_native_u4(bcp+1);
 371         method->constants()->invokedynamic_cp_cache_entry_at(index)->set_parameter_size(callee_parameters);
 372       }
 373       break;
 374     }
 375 
 376     case Bytecodes::_ldc   :
 377     case Bytecodes::_ldc_w : // fall through
 378     case Bytecodes::_ldc2_w:
 379       {
 380         Thread *thread = Thread::current();
 381         ResourceMark rm(thread);
 382         methodHandle mh(thread, method);
 383         type = Bytecode_loadconstant(mh, bci).result_type();
 384         break;
 385       }
 386 
 387     default:
 388       type = Bytecodes::result_type(code);
 389       break;
 390   }
 391 
 392   // return entry point for computed continuation state & bytecode length
 393   return
 394     is_top_frame
 395     ? Interpreter::deopt_entry (as_TosState(type), length)
 396     : Interpreter::return_entry(as_TosState(type), length);
 397 }
 398 
 399 // If deoptimization happens, this function returns the point where the interpreter reexecutes
 400 // the bytecode.
 401 // Note: Bytecodes::_athrow is a special case in that it does not return
 402 //       Interpreter::deopt_entry(vtos, 0) like others
 403 address AbstractInterpreter::deopt_reexecute_entry(Method* method, address bcp) {
 404   assert(method->contains(bcp), "just checkin'");
 405   Bytecodes::Code code   = Bytecodes::java_code_at(method, bcp);
 406 #ifdef COMPILER1
 407   if(code == Bytecodes::_athrow ) {
 408     return Interpreter::rethrow_exception_entry();
 409   }
 410 #endif /* COMPILER1 */
 411   return Interpreter::deopt_entry(vtos, 0);
 412 }
 413 
 414 // If deoptimization happens, the interpreter should reexecute these bytecodes.
 415 // This function mainly helps the compilers to set up the reexecute bit.
 416 bool AbstractInterpreter::bytecode_should_reexecute(Bytecodes::Code code) {
 417   switch (code) {
 418     case Bytecodes::_lookupswitch:
 419     case Bytecodes::_tableswitch:
 420     case Bytecodes::_fast_binaryswitch:
 421     case Bytecodes::_fast_linearswitch:
 422     // recompute condtional expression folded into _if<cond>
 423     case Bytecodes::_lcmp      :
 424     case Bytecodes::_fcmpl     :
 425     case Bytecodes::_fcmpg     :
 426     case Bytecodes::_dcmpl     :
 427     case Bytecodes::_dcmpg     :
 428     case Bytecodes::_ifnull    :
 429     case Bytecodes::_ifnonnull :
 430     case Bytecodes::_goto      :
 431     case Bytecodes::_goto_w    :
 432     case Bytecodes::_ifeq      :
 433     case Bytecodes::_ifne      :
 434     case Bytecodes::_iflt      :
 435     case Bytecodes::_ifge      :
 436     case Bytecodes::_ifgt      :
 437     case Bytecodes::_ifle      :
 438     case Bytecodes::_if_icmpeq :
 439     case Bytecodes::_if_icmpne :
 440     case Bytecodes::_if_icmplt :
 441     case Bytecodes::_if_icmpge :
 442     case Bytecodes::_if_icmpgt :
 443     case Bytecodes::_if_icmple :
 444     case Bytecodes::_if_acmpeq :
 445     case Bytecodes::_if_acmpne :
 446     // special cases
 447     case Bytecodes::_getfield  :
 448     case Bytecodes::_putfield  :
 449     case Bytecodes::_getstatic :
 450     case Bytecodes::_putstatic :
 451     case Bytecodes::_aastore   :
 452 #ifdef COMPILER1
 453     //special case of reexecution
 454     case Bytecodes::_athrow    :
 455 #endif
 456       return true;
 457 
 458     default:
 459       return false;
 460   }
 461 }
 462 
 463 void AbstractInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
 464   // Quick & dirty stack overflow checking: bang the stack & handle trap.
 465   // Note that we do the banging after the frame is setup, since the exception
 466   // handling code expects to find a valid interpreter frame on the stack.
 467   // Doing the banging earlier fails if the caller frame is not an interpreter
 468   // frame.
 469   // (Also, the exception throwing code expects to unlock any synchronized
 470   // method receiever, so do the banging after locking the receiver.)
 471 
 472   // Bang each page in the shadow zone. We can't assume it's been done for
 473   // an interpreter frame with greater than a page of locals, so each page
 474   // needs to be checked.  Only true for non-native.
 475   if (UseStackBanging) {
 476     const int start_page = native_call ? StackShadowPages : 1;
 477     const int page_size = os::vm_page_size();
 478     for (int pages = start_page; pages <= StackShadowPages ; pages++) {
 479       __ bang_stack_with_offset(pages*page_size);
 480     }
 481   }
 482 }
 483 
 484 void AbstractInterpreterGenerator::initialize_method_handle_entries() {
 485   // method handle entry kinds are generated later in MethodHandlesAdapterGenerator::generate:
 486   for (int i = Interpreter::method_handle_invoke_FIRST; i <= Interpreter::method_handle_invoke_LAST; i++) {
 487     Interpreter::MethodKind kind = (Interpreter::MethodKind) i;
 488     Interpreter::_entry_table[kind] = Interpreter::_entry_table[Interpreter::abstract];
 489   }
 490 }