1 /*
   2  * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_RUNTIME_VFRAME_HPP
  26 #define SHARE_VM_RUNTIME_VFRAME_HPP
  27 
  28 #include "code/debugInfo.hpp"
  29 #include "code/debugInfoRec.hpp"
  30 #include "code/location.hpp"
  31 #include "oops/oop.hpp"
  32 #include "runtime/frame.hpp"
  33 #include "runtime/frame.inline.hpp"
  34 #include "runtime/stackValue.hpp"
  35 #include "runtime/stackValueCollection.hpp"
  36 #include "utilities/growableArray.hpp"
  37 
  38 // vframes are virtual stack frames representing source level activations.
  39 // A single frame may hold several source level activations in the case of
  40 // optimized code. The debugging stored with the optimized code enables
  41 // us to unfold a frame as a stack of vframes.
  42 // A cVFrame represents an activation of a non-java method.
  43 
  44 // The vframe inheritance hierarchy:
  45 // - vframe
  46 //   - javaVFrame
  47 //     - interpretedVFrame
  48 //     - compiledVFrame     ; (used for both compiled Java methods and native stubs)
  49 //   - externalVFrame
  50 //     - entryVFrame        ; special frame created when calling Java from C
  51 
  52 // - BasicLock
  53 
  54 class vframe: public ResourceObj {
  55  protected:
  56   frame        _fr;      // Raw frame behind the virtual frame.
  57   RegisterMap  _reg_map; // Register map for the raw frame (used to handle callee-saved registers).
  58   JavaThread*  _thread;  // The thread owning the raw frame.
  59 
  60   vframe(const frame* fr, const RegisterMap* reg_map, JavaThread* thread);
  61   vframe(const frame* fr, JavaThread* thread);
  62  public:
  63   // Factory method for creating vframes
  64   static vframe* new_vframe(const frame* f, const RegisterMap *reg_map, JavaThread* thread);
  65 
  66   // Accessors
  67   frame              fr()           const { return _fr;       }
  68   CodeBlob*          cb()         const { return _fr.cb();  }
  69   nmethod*           nm()         const {
  70       assert( cb() != NULL && cb()->is_nmethod(), "usage");
  71       return (nmethod*) cb();
  72   }
  73 
  74 // ???? Does this need to be a copy?
  75   frame*             frame_pointer() { return &_fr;       }
  76   const RegisterMap* register_map() const { return &_reg_map; }
  77   JavaThread*        thread()       const { return _thread;   }
  78 
  79   // Returns the sender vframe
  80   virtual vframe* sender() const;
  81 
  82   // Returns the next javaVFrame on the stack (skipping all other kinds of frame)
  83   javaVFrame *java_sender() const;
  84 
  85   // Answers if the this is the top vframe in the frame, i.e., if the sender vframe
  86   // is in the caller frame
  87   virtual bool is_top() const { return true; }
  88 
  89   // Returns top vframe within same frame (see is_top())
  90   virtual vframe* top() const;
  91 
  92   // Type testing operations
  93   virtual bool is_entry_frame()       const { return false; }
  94   virtual bool is_java_frame()        const { return false; }
  95   virtual bool is_interpreted_frame() const { return false; }
  96   virtual bool is_compiled_frame()    const { return false; }
  97 
  98 #ifndef PRODUCT
  99   // printing operations
 100   virtual void print_value() const;
 101   virtual void print();
 102 #endif
 103 };
 104 
 105 
 106 class javaVFrame: public vframe {
 107  public:
 108   // JVM state
 109   virtual methodOop                    method()         const = 0;
 110   virtual int                          bci()            const = 0;
 111   virtual StackValueCollection*        locals()         const = 0;
 112   virtual StackValueCollection*        expressions()    const = 0;
 113   // the order returned by monitors() is from oldest -> youngest#4418568
 114   virtual GrowableArray<MonitorInfo*>* monitors()       const = 0;
 115 
 116   // Debugging support via JVMTI.
 117   // NOTE that this is not guaranteed to give correct results for compiled vframes.
 118   // Deoptimize first if necessary.
 119   virtual void set_locals(StackValueCollection* values) const = 0;
 120 
 121   // Test operation
 122   bool is_java_frame() const { return true; }
 123 
 124  protected:
 125   javaVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread) : vframe(fr, reg_map, thread) {}
 126   javaVFrame(const frame* fr, JavaThread* thread) : vframe(fr, thread) {}
 127 
 128  public:
 129   // casting
 130   static javaVFrame* cast(vframe* vf) {
 131     assert(vf == NULL || vf->is_java_frame(), "must be java frame");
 132     return (javaVFrame*) vf;
 133   }
 134 
 135   // Return an array of monitors locked by this frame in the youngest to oldest order
 136   GrowableArray<MonitorInfo*>* locked_monitors();
 137 
 138   // printing used during stack dumps
 139   void print_lock_info_on(outputStream* st, int frame_count);
 140   void print_lock_info(int frame_count) { print_lock_info_on(tty, frame_count); }
 141 
 142 #ifndef PRODUCT
 143  public:
 144   // printing operations
 145   void print();
 146   void print_value() const;
 147   void print_activation(int index) const;
 148 
 149   // verify operations
 150   virtual void verify() const;
 151 
 152   // Structural compare
 153   bool structural_compare(javaVFrame* other);
 154 #endif
 155   friend class vframe;
 156 };
 157 
 158 class interpretedVFrame: public javaVFrame {
 159  public:
 160   // JVM state
 161   methodOop                    method()         const;
 162   int                          bci()            const;
 163   StackValueCollection*        locals()         const;
 164   StackValueCollection*        expressions()    const;
 165   GrowableArray<MonitorInfo*>* monitors()       const;
 166 
 167   void set_locals(StackValueCollection* values) const;
 168 
 169   // Test operation
 170   bool is_interpreted_frame() const { return true; }
 171 
 172  protected:
 173   interpretedVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread) : javaVFrame(fr, reg_map, thread) {};
 174 
 175  public:
 176   // Accessors for Byte Code Pointer
 177   u_char* bcp() const;
 178   void set_bcp(u_char* bcp);
 179 
 180   // casting
 181   static interpretedVFrame* cast(vframe* vf) {
 182     assert(vf == NULL || vf->is_interpreted_frame(), "must be interpreted frame");
 183     return (interpretedVFrame*) vf;
 184   }
 185 
 186  private:
 187   static const int bcp_offset;
 188   intptr_t* locals_addr_at(int offset) const;
 189 
 190   // returns where the parameters starts relative to the frame pointer
 191   int start_of_parameters() const;
 192 
 193 #ifndef PRODUCT
 194  public:
 195   // verify operations
 196   void verify() const;
 197 #endif
 198   friend class vframe;
 199 };
 200 
 201 
 202 class externalVFrame: public vframe {
 203  protected:
 204   externalVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread) : vframe(fr, reg_map, thread) {}
 205 
 206 #ifndef PRODUCT
 207  public:
 208   // printing operations
 209   void print_value() const;
 210   void print();
 211 #endif
 212   friend class vframe;
 213 };
 214 
 215 class entryVFrame: public externalVFrame {
 216  public:
 217   bool is_entry_frame() const { return true; }
 218 
 219  protected:
 220   entryVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread);
 221 
 222  public:
 223   // casting
 224   static entryVFrame* cast(vframe* vf) {
 225     assert(vf == NULL || vf->is_entry_frame(), "must be entry frame");
 226     return (entryVFrame*) vf;
 227   }
 228 
 229 #ifndef PRODUCT
 230  public:
 231   // printing
 232   void print_value() const;
 233   void print();
 234 #endif
 235   friend class vframe;
 236 };
 237 
 238 
 239 // A MonitorInfo is a ResourceObject that describes a the pair:
 240 // 1) the owner of the monitor
 241 // 2) the monitor lock
 242 class MonitorInfo : public ResourceObj {
 243  private:
 244   oop        _owner; // the object owning the monitor
 245   BasicLock* _lock;
 246   oop        _owner_klass; // klass if owner was scalar replaced
 247   bool       _eliminated;
 248   bool       _owner_is_scalar_replaced;
 249  public:
 250   // Constructor
 251   MonitorInfo(oop owner, BasicLock* lock, bool eliminated, bool owner_is_scalar_replaced) {
 252     if (!owner_is_scalar_replaced) {
 253       _owner = owner;
 254       _owner_klass = NULL;
 255     } else {
 256       assert(eliminated, "monitor should be eliminated for scalar replaced object");
 257       _owner = NULL;
 258       _owner_klass = owner;
 259     }
 260     _lock  = lock;
 261     _eliminated = eliminated;
 262     _owner_is_scalar_replaced = owner_is_scalar_replaced;
 263   }
 264   // Accessors
 265   oop        owner() const {
 266     assert(!_owner_is_scalar_replaced, "should not be called for scalar replaced object");
 267     return _owner;
 268   }
 269   klassOop   owner_klass() const {
 270     assert(_owner_is_scalar_replaced, "should not be called for not scalar replaced object");
 271     return (klassOop)_owner_klass;
 272   }
 273   BasicLock* lock()  const { return _lock;  }
 274   bool eliminated()  const { return _eliminated; }
 275   bool owner_is_scalar_replaced()  const { return _owner_is_scalar_replaced; }
 276 };
 277 
 278 class vframeStreamCommon : StackObj {
 279  protected:
 280   // common
 281   frame        _frame;
 282   JavaThread*  _thread;
 283   RegisterMap  _reg_map;
 284   enum { interpreted_mode, compiled_mode, at_end_mode } _mode;
 285 
 286   int _sender_decode_offset;
 287 
 288   // Cached information
 289   methodOop _method;
 290   int       _bci;
 291 
 292   // Should VM activations be ignored or not
 293   bool _stop_at_java_call_stub;
 294 
 295   bool fill_in_compiled_inlined_sender();
 296   void fill_from_compiled_frame(int decode_offset);
 297   void fill_from_compiled_native_frame();
 298 
 299   void found_bad_method_frame();
 300 
 301   void fill_from_interpreter_frame();
 302   bool fill_from_frame();
 303 
 304   // Helper routine for security_get_caller_frame
 305   void skip_prefixed_method_and_wrappers();
 306 
 307  public:
 308   // Constructor
 309   vframeStreamCommon(JavaThread* thread) : _reg_map(thread, false) {
 310     _thread = thread;
 311   }
 312 
 313   // Accessors
 314   methodOop method() const { return _method; }
 315   int bci() const { return _bci; }
 316   intptr_t* frame_id() const { return _frame.id(); }
 317   address frame_pc() const { return _frame.pc(); }
 318 
 319   CodeBlob*          cb()         const { return _frame.cb();  }
 320   nmethod*           nm()         const {
 321       assert( cb() != NULL && cb()->is_nmethod(), "usage");
 322       return (nmethod*) cb();
 323   }
 324 
 325   // Frame type
 326   bool is_interpreted_frame() const { return _frame.is_interpreted_frame(); }
 327   bool is_entry_frame() const       { return _frame.is_entry_frame(); }
 328 
 329   // Iteration
 330   void next() {
 331     // handle frames with inlining
 332     if (_mode == compiled_mode    && fill_in_compiled_inlined_sender()) return;
 333 
 334     // handle general case
 335     do {
 336       _frame = _frame.sender(&_reg_map);
 337     } while (!fill_from_frame());
 338   }
 339 
 340   bool at_end() const { return _mode == at_end_mode; }
 341 
 342   // Implements security traversal. Skips depth no. of frame including
 343   // special security frames and prefixed native methods
 344   void security_get_caller_frame(int depth);
 345 
 346   // Helper routine for JVM_LatestUserDefinedLoader -- needed for 1.4
 347   // reflection implementation
 348   void skip_reflection_related_frames();
 349 };
 350 
 351 class vframeStream : public vframeStreamCommon {
 352  public:
 353   // Constructors
 354   vframeStream(JavaThread* thread, bool stop_at_java_call_stub = false)
 355     : vframeStreamCommon(thread) {
 356     _stop_at_java_call_stub = stop_at_java_call_stub;
 357 
 358     if (!thread->has_last_Java_frame()) {
 359       _mode = at_end_mode;
 360       return;
 361     }
 362 
 363     _frame = _thread->last_frame();
 364     while (!fill_from_frame()) {
 365       _frame = _frame.sender(&_reg_map);
 366     }
 367   }
 368 
 369   // top_frame may not be at safepoint, start with sender
 370   vframeStream(JavaThread* thread, frame top_frame, bool stop_at_java_call_stub = false);
 371 };
 372 
 373 
 374 inline bool vframeStreamCommon::fill_in_compiled_inlined_sender() {
 375   if (_sender_decode_offset == DebugInformationRecorder::serialized_null) {
 376     return false;
 377   }
 378   fill_from_compiled_frame(_sender_decode_offset);
 379   return true;
 380 }
 381 
 382 
 383 inline void vframeStreamCommon::fill_from_compiled_frame(int decode_offset) {
 384   _mode = compiled_mode;
 385 
 386   // Range check to detect ridiculous offsets.
 387   if (decode_offset == DebugInformationRecorder::serialized_null ||
 388       decode_offset < 0 ||
 389       decode_offset >= nm()->scopes_data_size()) {
 390     // 6379830 AsyncGetCallTrace sometimes feeds us wild frames.
 391     // If we attempt to read nmethod::scopes_data at serialized_null (== 0),
 392     // or if we read some at other crazy offset,
 393     // we will decode garbage and make wild references into the heap,
 394     // leading to crashes in product mode.
 395     // (This isn't airtight, of course, since there are internal
 396     // offsets which are also crazy.)
 397 #ifdef ASSERT
 398     if (WizardMode) {
 399       tty->print_cr("Error in fill_from_frame: pc_desc for "
 400                     INTPTR_FORMAT " not found or invalid at %d",
 401                     _frame.pc(), decode_offset);
 402       nm()->print();
 403       nm()->method()->print_codes();
 404       nm()->print_code();
 405       nm()->print_pcs();
 406     }
 407 #endif
 408     // Provide a cheap fallback in product mode.  (See comment above.)
 409     found_bad_method_frame();
 410     fill_from_compiled_native_frame();
 411     return;
 412   }
 413 
 414   // Decode first part of scopeDesc
 415   DebugInfoReadStream buffer(nm(), decode_offset);
 416   _sender_decode_offset = buffer.read_int();
 417   _method               = methodOop(buffer.read_oop());
 418   _bci                  = buffer.read_bci();
 419 
 420   assert(_method->is_method(), "checking type of decoded method");
 421 }
 422 
 423 // The native frames are handled specially. We do not rely on ScopeDesc info
 424 // since the pc might not be exact due to the _last_native_pc trick.
 425 inline void vframeStreamCommon::fill_from_compiled_native_frame() {
 426   _mode = compiled_mode;
 427   _sender_decode_offset = DebugInformationRecorder::serialized_null;
 428   _method = nm()->method();
 429   _bci = 0;
 430 }
 431 
 432 inline bool vframeStreamCommon::fill_from_frame() {
 433   // Interpreted frame
 434   if (_frame.is_interpreted_frame()) {
 435     fill_from_interpreter_frame();
 436     return true;
 437   }
 438 
 439   // Compiled frame
 440 
 441   if (cb() != NULL && cb()->is_nmethod()) {
 442     if (nm()->is_native_method()) {
 443       // Do not rely on scopeDesc since the pc might be unprecise due to the _last_native_pc trick.
 444       fill_from_compiled_native_frame();
 445     } else {
 446       PcDesc* pc_desc = nm()->pc_desc_at(_frame.pc());
 447       int decode_offset;
 448       if (pc_desc == NULL) {
 449         // Should not happen, but let fill_from_compiled_frame handle it.
 450 
 451         // If we are trying to walk the stack of a thread that is not
 452         // at a safepoint (like AsyncGetCallTrace would do) then this is an
 453         // acceptable result. [ This is assuming that safe_for_sender
 454         // is so bullet proof that we can trust the frames it produced. ]
 455         //
 456         // So if we see that the thread is not safepoint safe
 457         // then simply produce the method and a bci of zero
 458         // and skip the possibility of decoding any inlining that
 459         // may be present. That is far better than simply stopping (or
 460         // asserting. If however the thread is safepoint safe this
 461         // is the sign of a compiler bug  and we'll let
 462         // fill_from_compiled_frame handle it.
 463 
 464 
 465         JavaThreadState state = _thread->thread_state();
 466 
 467         // in_Java should be good enough to test safepoint safety
 468         // if state were say in_Java_trans then we'd expect that
 469         // the pc would have already been slightly adjusted to
 470         // one that would produce a pcDesc since the trans state
 471         // would be one that might in fact anticipate a safepoint
 472 
 473         if (state == _thread_in_Java ) {
 474           // This will get a method a zero bci and no inlining.
 475           // Might be nice to have a unique bci to signify this
 476           // particular case but for now zero will do.
 477 
 478           fill_from_compiled_native_frame();
 479 
 480           // There is something to be said for setting the mode to
 481           // at_end_mode to prevent trying to walk further up the
 482           // stack. There is evidence that if we walk any further
 483           // that we could produce a bad stack chain. However until
 484           // we see evidence that allowing this causes us to find
 485           // frames bad enough to cause segv's or assertion failures
 486           // we don't do it as while we may get a bad call chain the
 487           // probability is much higher (several magnitudes) that we
 488           // get good data.
 489 
 490           return true;
 491         }
 492         decode_offset = DebugInformationRecorder::serialized_null;
 493       } else {
 494         decode_offset = pc_desc->scope_decode_offset();
 495       }
 496       fill_from_compiled_frame(decode_offset);
 497     }
 498     return true;
 499   }
 500 
 501   // End of stack?
 502   if (_frame.is_first_frame() || (_stop_at_java_call_stub && _frame.is_entry_frame())) {
 503     _mode = at_end_mode;
 504     return true;
 505   }
 506 
 507   return false;
 508 }
 509 
 510 
 511 inline void vframeStreamCommon::fill_from_interpreter_frame() {
 512   methodOop method = _frame.interpreter_frame_method();
 513   intptr_t  bcx    = _frame.interpreter_frame_bcx();
 514   int       bci    = method->validate_bci_from_bcx(bcx);
 515   // 6379830 AsyncGetCallTrace sometimes feeds us wild frames.
 516   if (bci < 0) {
 517     found_bad_method_frame();
 518     bci = 0;  // pretend it's on the point of entering
 519   }
 520   _mode   = interpreted_mode;
 521   _method = method;
 522   _bci    = bci;
 523 }
 524 
 525 #endif // SHARE_VM_RUNTIME_VFRAME_HPP