326
327 JVMState* _caller; // JVMS which carries incoming args & state.
328 float _expected_uses; // expected number of calls to this code
329 float _prof_factor; // discount applied to my profile counts
330 int _depth; // Inline tree depth, for debug printouts
331 const TypeFunc*_tf; // My kind of function type
332 int _entry_bci; // the osr bci or InvocationEntryBci
333
334 ciTypeFlow* _flow; // Results of previous flow pass.
335 Block* _blocks; // Array of basic-block structs.
336 int _block_count; // Number of elements in _blocks.
337
338 GraphKit _exits; // Record all normal returns and throws here.
339 bool _wrote_final; // Did we write a final field?
340 bool _wrote_volatile; // Did we write a volatile field?
341 bool _wrote_stable; // Did we write a @Stable field?
342 bool _wrote_fields; // Did we write any field?
343 bool _count_invocations; // update and test invocation counter
344 bool _method_data_update; // update method data oop
345 Node* _alloc_with_final; // An allocation node with final field
346
347 // Variables which track Java semantics during bytecode parsing:
348
349 Block* _block; // block currently getting parsed
350 ciBytecodeStream _iter; // stream of this method's bytecodes
351
352 int _blocks_merged; // Progress meter: state merges from BB preds
353 int _blocks_parsed; // Progress meter: BBs actually parsed
354
355 const FastLockNode* _synch_lock; // FastLockNode for synchronized method
356
357 #ifndef PRODUCT
358 int _max_switch_depth; // Debugging SwitchRanges.
359 int _est_switch_depth; // Debugging SwitchRanges.
360 #endif
361
362 bool _first_return; // true if return is the first to be parsed
363 bool _replaced_nodes_for_exceptions; // needs processing of replaced nodes in exception paths?
364 uint _new_idx; // any node with _idx above were new during this parsing. Used to trim the replaced nodes list.
365
379
380 ciTypeFlow* flow() const { return _flow; }
381 // blocks() -- see rpo_at, start_block, etc.
382 int block_count() const { return _block_count; }
383
384 GraphKit& exits() { return _exits; }
385 bool wrote_final() const { return _wrote_final; }
386 void set_wrote_final(bool z) { _wrote_final = z; }
387 bool wrote_volatile() const { return _wrote_volatile; }
388 void set_wrote_volatile(bool z) { _wrote_volatile = z; }
389 bool wrote_stable() const { return _wrote_stable; }
390 void set_wrote_stable(bool z) { _wrote_stable = z; }
391 bool wrote_fields() const { return _wrote_fields; }
392 void set_wrote_fields(bool z) { _wrote_fields = z; }
393 bool count_invocations() const { return _count_invocations; }
394 bool method_data_update() const { return _method_data_update; }
395 Node* alloc_with_final() const { return _alloc_with_final; }
396 void set_alloc_with_final(Node* n) {
397 assert((_alloc_with_final == NULL) || (_alloc_with_final == n), "different init objects?");
398 _alloc_with_final = n;
399 }
400
401 Block* block() const { return _block; }
402 ciBytecodeStream& iter() { return _iter; }
403 Bytecodes::Code bc() const { return _iter.cur_bc(); }
404
405 void set_block(Block* b) { _block = b; }
406
407 // Derived accessors:
408 bool is_normal_parse() const { return _entry_bci == InvocationEntryBci; }
409 bool is_osr_parse() const { return _entry_bci != InvocationEntryBci; }
410 int osr_bci() const { assert(is_osr_parse(),""); return _entry_bci; }
411
412 void set_parse_bci(int bci);
413
414 // Must this parse be aborted?
415 bool failing() { return C->failing(); }
416
417 Block* rpo_at(int rpo) {
418 assert(0 <= rpo && rpo < _block_count, "oob");
|
326
327 JVMState* _caller; // JVMS which carries incoming args & state.
328 float _expected_uses; // expected number of calls to this code
329 float _prof_factor; // discount applied to my profile counts
330 int _depth; // Inline tree depth, for debug printouts
331 const TypeFunc*_tf; // My kind of function type
332 int _entry_bci; // the osr bci or InvocationEntryBci
333
334 ciTypeFlow* _flow; // Results of previous flow pass.
335 Block* _blocks; // Array of basic-block structs.
336 int _block_count; // Number of elements in _blocks.
337
338 GraphKit _exits; // Record all normal returns and throws here.
339 bool _wrote_final; // Did we write a final field?
340 bool _wrote_volatile; // Did we write a volatile field?
341 bool _wrote_stable; // Did we write a @Stable field?
342 bool _wrote_fields; // Did we write any field?
343 bool _count_invocations; // update and test invocation counter
344 bool _method_data_update; // update method data oop
345 Node* _alloc_with_final; // An allocation node with final field
346 Node* _alloc_with_stable; // An allocation node with stable field
347
348 // Variables which track Java semantics during bytecode parsing:
349
350 Block* _block; // block currently getting parsed
351 ciBytecodeStream _iter; // stream of this method's bytecodes
352
353 int _blocks_merged; // Progress meter: state merges from BB preds
354 int _blocks_parsed; // Progress meter: BBs actually parsed
355
356 const FastLockNode* _synch_lock; // FastLockNode for synchronized method
357
358 #ifndef PRODUCT
359 int _max_switch_depth; // Debugging SwitchRanges.
360 int _est_switch_depth; // Debugging SwitchRanges.
361 #endif
362
363 bool _first_return; // true if return is the first to be parsed
364 bool _replaced_nodes_for_exceptions; // needs processing of replaced nodes in exception paths?
365 uint _new_idx; // any node with _idx above were new during this parsing. Used to trim the replaced nodes list.
366
380
381 ciTypeFlow* flow() const { return _flow; }
382 // blocks() -- see rpo_at, start_block, etc.
383 int block_count() const { return _block_count; }
384
385 GraphKit& exits() { return _exits; }
386 bool wrote_final() const { return _wrote_final; }
387 void set_wrote_final(bool z) { _wrote_final = z; }
388 bool wrote_volatile() const { return _wrote_volatile; }
389 void set_wrote_volatile(bool z) { _wrote_volatile = z; }
390 bool wrote_stable() const { return _wrote_stable; }
391 void set_wrote_stable(bool z) { _wrote_stable = z; }
392 bool wrote_fields() const { return _wrote_fields; }
393 void set_wrote_fields(bool z) { _wrote_fields = z; }
394 bool count_invocations() const { return _count_invocations; }
395 bool method_data_update() const { return _method_data_update; }
396 Node* alloc_with_final() const { return _alloc_with_final; }
397 void set_alloc_with_final(Node* n) {
398 assert((_alloc_with_final == NULL) || (_alloc_with_final == n), "different init objects?");
399 _alloc_with_final = n;
400 }
401
402 Node* alloc_with_stable() const {
403 if (_alloc_with_stable == NodeSentinel) {
404 return NULL;
405 }
406 return _alloc_with_stable;
407 }
408
409 void set_alloc_with_stable(Node* n) {
410 // uninitialized status, initialized with current input, can be null or valid node.
411 if (_alloc_with_stable == NodeSentinel) {
412 _alloc_with_stable = n;
413 }
414 // _alloc_with_stable is isn't equal with n
415 else if (_alloc_with_stable != n) {
416 _alloc_with_stable = NULL;
417 }
418 // _alloc_with_stable is equal with n, do nothing
419 }
420
421 Block* block() const { return _block; }
422 ciBytecodeStream& iter() { return _iter; }
423 Bytecodes::Code bc() const { return _iter.cur_bc(); }
424
425 void set_block(Block* b) { _block = b; }
426
427 // Derived accessors:
428 bool is_normal_parse() const { return _entry_bci == InvocationEntryBci; }
429 bool is_osr_parse() const { return _entry_bci != InvocationEntryBci; }
430 int osr_bci() const { assert(is_osr_parse(),""); return _entry_bci; }
431
432 void set_parse_bci(int bci);
433
434 // Must this parse be aborted?
435 bool failing() { return C->failing(); }
436
437 Block* rpo_at(int rpo) {
438 assert(0 <= rpo && rpo < _block_count, "oob");
|