src/share/vm/c1/c1_Compilation.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/c1

src/share/vm/c1/c1_Compilation.hpp

Print this page
rev 6086 : 8032410: compiler/uncommontrap/TestStackBangRbp.java times out on Solaris-Sparc V9
Summary: make compiled code bang the stack by the worst case size of the interpreter frame at deoptimization points.
Reviewed-by:


  71   CompileLog*        _log;
  72   ciMethod*          _method;
  73   int                _osr_bci;
  74   IR*                _hir;
  75   int                _max_spills;
  76   FrameMap*          _frame_map;
  77   C1_MacroAssembler* _masm;
  78   bool               _has_exception_handlers;
  79   bool               _has_fpu_code;
  80   bool               _has_unsafe_access;
  81   bool               _would_profile;
  82   bool               _has_method_handle_invokes;  // True if this method has MethodHandle invokes.
  83   const char*        _bailout_msg;
  84   ExceptionInfoList* _exception_info_list;
  85   ExceptionHandlerTable _exception_handler_table;
  86   ImplicitExceptionTable _implicit_exception_table;
  87   LinearScan*        _allocator;
  88   CodeOffsets        _offsets;
  89   CodeBuffer         _code;
  90   bool               _has_access_indexed;

  91 
  92   // compilation helpers
  93   void initialize();
  94   void build_hir();
  95   void emit_lir();
  96 
  97   void emit_code_epilog(LIR_Assembler* assembler);
  98   int  emit_code_body();
  99 
 100   int  compile_java_method();
 101   void install_code(int frame_size);
 102   void compile_method();
 103 
 104   void generate_exception_handler_table();
 105 
 106   ExceptionInfoList* exception_info_list() const { return _exception_info_list; }
 107   ExceptionHandlerTable* exception_handler_table() { return &_exception_handler_table; }
 108 
 109   LinearScan* allocator()                          { return _allocator;      }
 110   void        set_allocator(LinearScan* allocator) { _allocator = allocator; }


 245   bool profile_arguments() {
 246     return env()->comp_level() == CompLevel_full_profile &&
 247       C1UpdateMethodData && MethodData::profile_arguments();
 248   }
 249   bool profile_return() {
 250     return env()->comp_level() == CompLevel_full_profile &&
 251       C1UpdateMethodData && MethodData::profile_return();
 252   }
 253   // will compilation make optimistic assumptions that might lead to
 254   // deoptimization and that the runtime will account for?
 255   bool is_optimistic() const                             {
 256     return !TieredCompilation &&
 257       (RangeCheckElimination || UseLoopInvariantCodeMotion) &&
 258       method()->method_data()->trap_count(Deoptimization::Reason_none) == 0;
 259   }
 260 
 261   ciKlass* cha_exact_type(ciType* type);
 262 
 263   // Dump inlining replay data to the stream.
 264   void dump_inline_data(outputStream* out) { /* do nothing now */ }












 265 };
 266 
 267 
 268 // Macro definitions for unified bailout-support
 269 // The methods bailout() and bailed_out() are present in all classes
 270 // that might bailout, but forward all calls to Compilation
 271 #define BAILOUT(msg)               { bailout(msg); return;              }
 272 #define BAILOUT_(msg, res)         { bailout(msg); return res;          }
 273 
 274 #define CHECK_BAILOUT()            { if (bailed_out()) return;          }
 275 #define CHECK_BAILOUT_(res)        { if (bailed_out()) return res;      }
 276 
 277 
 278 class InstructionMark: public StackObj {
 279  private:
 280   Compilation* _compilation;
 281   Instruction*  _previous;
 282 
 283  public:
 284   InstructionMark(Compilation* compilation, Instruction* instr) {




  71   CompileLog*        _log;
  72   ciMethod*          _method;
  73   int                _osr_bci;
  74   IR*                _hir;
  75   int                _max_spills;
  76   FrameMap*          _frame_map;
  77   C1_MacroAssembler* _masm;
  78   bool               _has_exception_handlers;
  79   bool               _has_fpu_code;
  80   bool               _has_unsafe_access;
  81   bool               _would_profile;
  82   bool               _has_method_handle_invokes;  // True if this method has MethodHandle invokes.
  83   const char*        _bailout_msg;
  84   ExceptionInfoList* _exception_info_list;
  85   ExceptionHandlerTable _exception_handler_table;
  86   ImplicitExceptionTable _implicit_exception_table;
  87   LinearScan*        _allocator;
  88   CodeOffsets        _offsets;
  89   CodeBuffer         _code;
  90   bool               _has_access_indexed;
  91   int                _interpreter_frame_size; // Stack space needed in case of a deoptimization
  92 
  93   // compilation helpers
  94   void initialize();
  95   void build_hir();
  96   void emit_lir();
  97 
  98   void emit_code_epilog(LIR_Assembler* assembler);
  99   int  emit_code_body();
 100 
 101   int  compile_java_method();
 102   void install_code(int frame_size);
 103   void compile_method();
 104 
 105   void generate_exception_handler_table();
 106 
 107   ExceptionInfoList* exception_info_list() const { return _exception_info_list; }
 108   ExceptionHandlerTable* exception_handler_table() { return &_exception_handler_table; }
 109 
 110   LinearScan* allocator()                          { return _allocator;      }
 111   void        set_allocator(LinearScan* allocator) { _allocator = allocator; }


 246   bool profile_arguments() {
 247     return env()->comp_level() == CompLevel_full_profile &&
 248       C1UpdateMethodData && MethodData::profile_arguments();
 249   }
 250   bool profile_return() {
 251     return env()->comp_level() == CompLevel_full_profile &&
 252       C1UpdateMethodData && MethodData::profile_return();
 253   }
 254   // will compilation make optimistic assumptions that might lead to
 255   // deoptimization and that the runtime will account for?
 256   bool is_optimistic() const                             {
 257     return !TieredCompilation &&
 258       (RangeCheckElimination || UseLoopInvariantCodeMotion) &&
 259       method()->method_data()->trap_count(Deoptimization::Reason_none) == 0;
 260   }
 261 
 262   ciKlass* cha_exact_type(ciType* type);
 263 
 264   // Dump inlining replay data to the stream.
 265   void dump_inline_data(outputStream* out) { /* do nothing now */ }
 266 
 267   // How much stack space would the interpreter need in case of a
 268   // deoptimization (worst case)
 269   void update_interpreter_frame_size(int size) {
 270     if (_interpreter_frame_size < size) {
 271       _interpreter_frame_size = size;
 272     }
 273   }
 274 
 275   int interpreter_frame_size() const {
 276     return _interpreter_frame_size;
 277   }
 278 };
 279 
 280 
 281 // Macro definitions for unified bailout-support
 282 // The methods bailout() and bailed_out() are present in all classes
 283 // that might bailout, but forward all calls to Compilation
 284 #define BAILOUT(msg)               { bailout(msg); return;              }
 285 #define BAILOUT_(msg, res)         { bailout(msg); return res;          }
 286 
 287 #define CHECK_BAILOUT()            { if (bailed_out()) return;          }
 288 #define CHECK_BAILOUT_(res)        { if (bailed_out()) return res;      }
 289 
 290 
 291 class InstructionMark: public StackObj {
 292  private:
 293   Compilation* _compilation;
 294   Instruction*  _previous;
 295 
 296  public:
 297   InstructionMark(Compilation* compilation, Instruction* instr) {


src/share/vm/c1/c1_Compilation.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File