172 GrowableArray<CallGenerator*>* _intrinsics; // List of intrinsics.
173 GrowableArray<Node*>* _macro_nodes; // List of nodes which need to be expanded before matching.
174 ConnectionGraph* _congraph;
175 #ifndef PRODUCT
176 IdealGraphPrinter* _printer;
177 #endif
178
179 // Node management
180 uint _unique; // Counter for unique Node indices
181 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
182 Arena _node_arena; // Arena for new-space Nodes
183 Arena _old_arena; // Arena for old-space Nodes, lifetime during xform
184 RootNode* _root; // Unique root of compilation, or NULL after bail-out.
185 Node* _top; // Unique top node. (Reset by various phases.)
186
187 Node* _immutable_memory; // Initial memory state
188
189 Node* _recent_alloc_obj;
190 Node* _recent_alloc_ctl;
191
192 // Blocked array of debugging and profiling information,
193 // tracked per node.
194 enum { _log2_node_notes_block_size = 8,
195 _node_notes_block_size = (1<<_log2_node_notes_block_size)
196 };
197 GrowableArray<Node_Notes*>* _node_note_array;
198 Node_Notes* _default_node_notes; // default notes for new nodes
199
200 // After parsing and every bulk phase we hang onto the Root instruction.
201 // The RootNode instruction is where the whole program begins. It produces
202 // the initial Control and BOTTOM for everybody else.
203
204 // Type management
205 Arena _Compile_types; // Arena for all types
206 Arena* _type_arena; // Alias for _Compile_types except in Initialize_shared()
207 Dict* _type_dict; // Intern table
208 void* _type_hwm; // Last allocation (see Type::operator new/delete)
209 size_t _type_last_size; // Last allocation size (see Type::operator new/delete)
210 ciMethod* _last_tf_m; // Cache for
211 const TypeFunc* _last_tf; // TypeFunc::make
361 // Compilation environment.
362 Arena* comp_arena() { return &_comp_arena; }
363 ciEnv* env() const { return _env; }
364 CompileLog* log() const { return _log; }
365 bool failing() const { return _env->failing() || _failure_reason != NULL; }
366 const char* failure_reason() { return _failure_reason; }
367 bool failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); }
368
369 void record_failure(const char* reason);
370 void record_method_not_compilable(const char* reason, bool all_tiers = false) {
371 // All bailouts cover "all_tiers" when TieredCompilation is off.
372 if (!TieredCompilation) all_tiers = true;
373 env()->record_method_not_compilable(reason, all_tiers);
374 // Record failure reason.
375 record_failure(reason);
376 }
377 void record_method_not_compilable_all_tiers(const char* reason) {
378 record_method_not_compilable(reason, true);
379 }
380 bool check_node_count(uint margin, const char* reason) {
381 if (unique() + margin > (uint)MaxNodeLimit) {
382 record_method_not_compilable(reason);
383 return true;
384 } else {
385 return false;
386 }
387 }
388
389 // Node management
390 uint unique() const { return _unique; }
391 uint next_unique() { return _unique++; }
392 void set_unique(uint i) { _unique = i; }
393 static int debug_idx() { return debug_only(_debug_idx)+0; }
394 static void set_debug_idx(int i) { debug_only(_debug_idx = i); }
395 Arena* node_arena() { return &_node_arena; }
396 Arena* old_arena() { return &_old_arena; }
397 RootNode* root() const { return _root; }
398 void set_root(RootNode* r) { _root = r; }
399 StartNode* start() const; // (Derived from root.)
400 void init_start(StartNode* s);
401 Node* immutable_memory();
402
403 Node* recent_alloc_ctl() const { return _recent_alloc_ctl; }
404 Node* recent_alloc_obj() const { return _recent_alloc_obj; }
405 void set_recent_alloc(Node* ctl, Node* obj) {
406 _recent_alloc_ctl = ctl;
407 _recent_alloc_obj = obj;
|
172 GrowableArray<CallGenerator*>* _intrinsics; // List of intrinsics.
173 GrowableArray<Node*>* _macro_nodes; // List of nodes which need to be expanded before matching.
174 ConnectionGraph* _congraph;
175 #ifndef PRODUCT
176 IdealGraphPrinter* _printer;
177 #endif
178
179 // Node management
180 uint _unique; // Counter for unique Node indices
181 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
182 Arena _node_arena; // Arena for new-space Nodes
183 Arena _old_arena; // Arena for old-space Nodes, lifetime during xform
184 RootNode* _root; // Unique root of compilation, or NULL after bail-out.
185 Node* _top; // Unique top node. (Reset by various phases.)
186
187 Node* _immutable_memory; // Initial memory state
188
189 Node* _recent_alloc_obj;
190 Node* _recent_alloc_ctl;
191
192 uint _nodes_limit; // Nodes limit
193
194 // Blocked array of debugging and profiling information,
195 // tracked per node.
196 enum { _log2_node_notes_block_size = 8,
197 _node_notes_block_size = (1<<_log2_node_notes_block_size)
198 };
199 GrowableArray<Node_Notes*>* _node_note_array;
200 Node_Notes* _default_node_notes; // default notes for new nodes
201
202 // After parsing and every bulk phase we hang onto the Root instruction.
203 // The RootNode instruction is where the whole program begins. It produces
204 // the initial Control and BOTTOM for everybody else.
205
206 // Type management
207 Arena _Compile_types; // Arena for all types
208 Arena* _type_arena; // Alias for _Compile_types except in Initialize_shared()
209 Dict* _type_dict; // Intern table
210 void* _type_hwm; // Last allocation (see Type::operator new/delete)
211 size_t _type_last_size; // Last allocation size (see Type::operator new/delete)
212 ciMethod* _last_tf_m; // Cache for
213 const TypeFunc* _last_tf; // TypeFunc::make
363 // Compilation environment.
364 Arena* comp_arena() { return &_comp_arena; }
365 ciEnv* env() const { return _env; }
366 CompileLog* log() const { return _log; }
367 bool failing() const { return _env->failing() || _failure_reason != NULL; }
368 const char* failure_reason() { return _failure_reason; }
369 bool failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); }
370
371 void record_failure(const char* reason);
372 void record_method_not_compilable(const char* reason, bool all_tiers = false) {
373 // All bailouts cover "all_tiers" when TieredCompilation is off.
374 if (!TieredCompilation) all_tiers = true;
375 env()->record_method_not_compilable(reason, all_tiers);
376 // Record failure reason.
377 record_failure(reason);
378 }
379 void record_method_not_compilable_all_tiers(const char* reason) {
380 record_method_not_compilable(reason, true);
381 }
382 bool check_node_count(uint margin, const char* reason) {
383 if (unique() + margin > _nodes_limit) {
384 record_method_not_compilable(reason);
385 return true;
386 } else {
387 return false;
388 }
389 }
390 uint nodes_limit() const { return _nodes_limit; }
391 void increase_nodes_limit() { _nodes_limit = _nodes_limit*2; } // double MaxNodeLimit
392
393 // Node management
394 uint unique() const { return _unique; }
395 uint next_unique() { return _unique++; }
396 void set_unique(uint i) { _unique = i; }
397 static int debug_idx() { return debug_only(_debug_idx)+0; }
398 static void set_debug_idx(int i) { debug_only(_debug_idx = i); }
399 Arena* node_arena() { return &_node_arena; }
400 Arena* old_arena() { return &_old_arena; }
401 RootNode* root() const { return _root; }
402 void set_root(RootNode* r) { _root = r; }
403 StartNode* start() const; // (Derived from root.)
404 void init_start(StartNode* s);
405 Node* immutable_memory();
406
407 Node* recent_alloc_ctl() const { return _recent_alloc_ctl; }
408 Node* recent_alloc_obj() const { return _recent_alloc_obj; }
409 void set_recent_alloc(Node* ctl, Node* obj) {
410 _recent_alloc_ctl = ctl;
411 _recent_alloc_obj = obj;
|