94 virtual const class TypePtr *adr_type() const; // returns bottom_type of address
95
96 // Shared code for Ideal methods:
97 Node *Ideal_common(PhaseGVN *phase, bool can_reshape); // Return -1 for short-circuit NULL.
98
99 // Helper function for adr_type() implementations.
100 static const TypePtr* calculate_adr_type(const Type* t, const TypePtr* cross_check = NULL);
101
102 // Raw access function, to allow copying of adr_type efficiently in
103 // product builds and retain the debug info for debug builds.
104 const TypePtr *raw_adr_type() const {
105 #ifdef ASSERT
106 return _adr_type;
107 #else
108 return 0;
109 #endif
110 }
111
112 // Map a load or store opcode to its corresponding store opcode.
113 // (Return -1 if unknown.)
114 virtual int store_Opcode() const { return -1; }
115
116 // What is the type of the value in memory? (T_VOID mean "unspecified".)
117 virtual BasicType memory_type() const = 0;
118 virtual int memory_size() const {
119 #ifdef ASSERT
120 return type2aelembytes(memory_type(), true);
121 #else
122 return type2aelembytes(memory_type());
123 #endif
124 }
125
126 // Search through memory states which precede this node (load or store).
127 // Look for an exact match for the address, with no intervening
128 // aliased stores.
129 Node* find_previous_store(PhaseTransform* phase);
130
131 // Can this node (load or store) accurately see a stored value in
132 // the given memory state? (The state may or may not be in(Memory).)
133 Node* can_see_stored_value(Node* st, PhaseTransform* phase) const;
134
181 protected:
182 virtual uint cmp(const Node &n) const;
183 virtual uint size_of() const; // Size is bigger
184 // Should LoadNode::Ideal() attempt to remove control edges?
185 virtual bool can_remove_control() const;
186 const Type* const _type; // What kind of value is loaded?
187
188 virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
189 public:
190
191 LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
192 : MemNode(c,mem,adr,at), _type(rt), _mo(mo), _control_dependency(control_dependency) {
193 init_class_id(Class_Load);
194 }
195 inline bool is_unordered() const { return !is_acquire(); }
196 inline bool is_acquire() const {
197 assert(_mo == unordered || _mo == acquire, "unexpected");
198 return _mo == acquire;
199 }
200 inline bool is_unsigned() const {
201 int lop = Opcode();
202 return (lop == Op_LoadUB) || (lop == Op_LoadUS);
203 }
204
205 // Polymorphic factory method:
206 static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
207 const TypePtr* at, const Type *rt, BasicType bt,
208 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
209 bool unaligned = false, bool mismatched = false);
210
211 virtual uint hash() const; // Check the type
212
213 // Handle algebraic identities here. If we have an identity, return the Node
214 // we are equivalent to. We look for Load of a Store.
215 virtual Node* Identity(PhaseGVN* phase);
216
217 // If the load is from Field memory and the pointer is non-null, it might be possible to
218 // zero out the control input.
219 // If the offset is constant and the base is an object allocation,
220 // try to hook me up to the exact initializing store.
221 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
233 // Common methods for LoadKlass and LoadNKlass nodes.
234 const Type* klass_value_common(PhaseGVN* phase) const;
235 Node* klass_identity_common(PhaseGVN* phase);
236
237 virtual uint ideal_reg() const;
238 virtual const Type *bottom_type() const;
239 // Following method is copied from TypeNode:
240 void set_type(const Type* t) {
241 assert(t != NULL, "sanity");
242 debug_only(uint check_hash = (VerifyHashTableKeys && _hash_lock) ? hash() : NO_HASH);
243 *(const Type**)&_type = t; // cast away const-ness
244 // If this node is in the hash table, make sure it doesn't need a rehash.
245 assert(check_hash == NO_HASH || check_hash == hash(), "type change must preserve hash code");
246 }
247 const Type* type() const { assert(_type != NULL, "sanity"); return _type; };
248
249 // Do not match memory edge
250 virtual uint match_edge(uint idx) const;
251
252 // Map a load opcode to its corresponding store opcode.
253 virtual int store_Opcode() const = 0;
254
255 // Check if the load's memory input is a Phi node with the same control.
256 bool is_instance_field_load_with_local_phi(Node* ctrl);
257
258 Node* convert_to_unsigned_load(PhaseGVN& gvn);
259 Node* convert_to_signed_load(PhaseGVN& gvn);
260
261 #ifndef PRODUCT
262 virtual void dump_spec(outputStream *st) const;
263 #endif
264 #ifdef ASSERT
265 // Helper function to allow a raw load without control edge for some cases
266 static bool is_immutable_value(Node* adr);
267 #endif
268 protected:
269 const Type* load_array_final_field(const TypeKlassPtr *tkls,
270 ciKlass* klass) const;
271
272 Node* can_see_arraycopy_value(Node* st, PhaseTransform* phase) const;
273
274 // depends_only_on_test is almost always true, and needs to be almost always
275 // true to enable key hoisting & commoning optimizations. However, for the
276 // special case of RawPtr loads from TLS top & end, and other loads performed by
277 // GC barriers, the control edge carries the dependence preventing hoisting past
278 // a Safepoint instead of the memory edge. (An unfortunate consequence of having
279 // Safepoints not set Raw Memory; itself an unfortunate consequence of having Nodes
280 // which produce results (new raw memory state) inside of loops preventing all
281 // manner of other optimizations). Basically, it's ugly but so is the alternative.
282 // See comment in macro.cpp, around line 125 expand_allocate_common().
283 virtual bool depends_only_on_test() const {
284 return adr_type() != TypeRawPtr::BOTTOM && _control_dependency == DependsOnlyOnTest;
285 }
286 };
287
288 //------------------------------LoadBNode--------------------------------------
289 // Load a byte (8bits signed) from memory
290 class LoadBNode : public LoadNode {
291 public:
292 LoadBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
293 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
294 virtual int Opcode() const;
295 virtual uint ideal_reg() const { return Op_RegI; }
296 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
297 virtual const Type* Value(PhaseGVN* phase) const;
298 virtual int store_Opcode() const { return Op_StoreB; }
299 virtual BasicType memory_type() const { return T_BYTE; }
300 };
301
302 //------------------------------LoadUBNode-------------------------------------
303 // Load a unsigned byte (8bits unsigned) from memory
304 class LoadUBNode : public LoadNode {
305 public:
306 LoadUBNode(Node* c, Node* mem, Node* adr, const TypePtr* at, const TypeInt* ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
307 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
308 virtual int Opcode() const;
309 virtual uint ideal_reg() const { return Op_RegI; }
310 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
311 virtual const Type* Value(PhaseGVN* phase) const;
312 virtual int store_Opcode() const { return Op_StoreB; }
313 virtual BasicType memory_type() const { return T_BYTE; }
314 };
315
316 //------------------------------LoadUSNode-------------------------------------
317 // Load an unsigned short/char (16bits unsigned) from memory
318 class LoadUSNode : public LoadNode {
319 public:
320 LoadUSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
321 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
322 virtual int Opcode() const;
323 virtual uint ideal_reg() const { return Op_RegI; }
324 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
325 virtual const Type* Value(PhaseGVN* phase) const;
326 virtual int store_Opcode() const { return Op_StoreC; }
327 virtual BasicType memory_type() const { return T_CHAR; }
328 };
329
330 //------------------------------LoadSNode--------------------------------------
331 // Load a short (16bits signed) from memory
332 class LoadSNode : public LoadNode {
333 public:
334 LoadSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
335 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
336 virtual int Opcode() const;
337 virtual uint ideal_reg() const { return Op_RegI; }
338 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
339 virtual const Type* Value(PhaseGVN* phase) const;
340 virtual int store_Opcode() const { return Op_StoreC; }
341 virtual BasicType memory_type() const { return T_SHORT; }
342 };
343
344 //------------------------------LoadINode--------------------------------------
345 // Load an integer from memory
346 class LoadINode : public LoadNode {
347 public:
348 LoadINode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
349 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
350 virtual int Opcode() const;
351 virtual uint ideal_reg() const { return Op_RegI; }
352 virtual int store_Opcode() const { return Op_StoreI; }
353 virtual BasicType memory_type() const { return T_INT; }
354 };
355
356 //------------------------------LoadRangeNode----------------------------------
357 // Load an array length from the array
358 class LoadRangeNode : public LoadINode {
359 public:
360 LoadRangeNode(Node *c, Node *mem, Node *adr, const TypeInt *ti = TypeInt::POS)
361 : LoadINode(c, mem, adr, TypeAryPtr::RANGE, ti, MemNode::unordered) {}
362 virtual int Opcode() const;
363 virtual const Type* Value(PhaseGVN* phase) const;
364 virtual Node* Identity(PhaseGVN* phase);
365 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
366 };
367
368 //------------------------------LoadLNode--------------------------------------
369 // Load a long from memory
370 class LoadLNode : public LoadNode {
371 virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
372 virtual uint cmp( const Node &n ) const {
373 return _require_atomic_access == ((LoadLNode&)n)._require_atomic_access
374 && LoadNode::cmp(n);
375 }
376 virtual uint size_of() const { return sizeof(*this); }
377 const bool _require_atomic_access; // is piecewise load forbidden?
378
379 public:
380 LoadLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeLong *tl,
381 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
382 : LoadNode(c, mem, adr, at, tl, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
383 virtual int Opcode() const;
384 virtual uint ideal_reg() const { return Op_RegL; }
385 virtual int store_Opcode() const { return Op_StoreL; }
386 virtual BasicType memory_type() const { return T_LONG; }
387 bool require_atomic_access() const { return _require_atomic_access; }
388 static LoadLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
389 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
390 bool unaligned = false, bool mismatched = false);
391 #ifndef PRODUCT
392 virtual void dump_spec(outputStream *st) const {
393 LoadNode::dump_spec(st);
394 if (_require_atomic_access) st->print(" Atomic!");
395 }
396 #endif
397 };
398
399 //------------------------------LoadL_unalignedNode----------------------------
400 // Load a long from unaligned memory
401 class LoadL_unalignedNode : public LoadLNode {
402 public:
403 LoadL_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
404 : LoadLNode(c, mem, adr, at, TypeLong::LONG, mo, control_dependency) {}
405 virtual int Opcode() const;
406 };
407
408 //------------------------------LoadFNode--------------------------------------
409 // Load a float (64 bits) from memory
410 class LoadFNode : public LoadNode {
411 public:
412 LoadFNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
413 : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
414 virtual int Opcode() const;
415 virtual uint ideal_reg() const { return Op_RegF; }
416 virtual int store_Opcode() const { return Op_StoreF; }
417 virtual BasicType memory_type() const { return T_FLOAT; }
418 };
419
420 //------------------------------LoadDNode--------------------------------------
421 // Load a double (64 bits) from memory
422 class LoadDNode : public LoadNode {
423 virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
424 virtual uint cmp( const Node &n ) const {
425 return _require_atomic_access == ((LoadDNode&)n)._require_atomic_access
426 && LoadNode::cmp(n);
427 }
428 virtual uint size_of() const { return sizeof(*this); }
429 const bool _require_atomic_access; // is piecewise load forbidden?
430
431 public:
432 LoadDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t,
433 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
434 : LoadNode(c, mem, adr, at, t, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
435 virtual int Opcode() const;
436 virtual uint ideal_reg() const { return Op_RegD; }
437 virtual int store_Opcode() const { return Op_StoreD; }
438 virtual BasicType memory_type() const { return T_DOUBLE; }
439 bool require_atomic_access() const { return _require_atomic_access; }
440 static LoadDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
441 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
442 bool unaligned = false, bool mismatched = false);
443 #ifndef PRODUCT
444 virtual void dump_spec(outputStream *st) const {
445 LoadNode::dump_spec(st);
446 if (_require_atomic_access) st->print(" Atomic!");
447 }
448 #endif
449 };
450
451 //------------------------------LoadD_unalignedNode----------------------------
452 // Load a double from unaligned memory
453 class LoadD_unalignedNode : public LoadDNode {
454 public:
455 LoadD_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
456 : LoadDNode(c, mem, adr, at, Type::DOUBLE, mo, control_dependency) {}
457 virtual int Opcode() const;
458 };
459
460 //------------------------------LoadPNode--------------------------------------
461 // Load a pointer from memory (either object or array)
462 class LoadPNode : public LoadNode {
463 public:
464 LoadPNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypePtr* t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
465 : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
466 virtual int Opcode() const;
467 virtual uint ideal_reg() const { return Op_RegP; }
468 virtual int store_Opcode() const { return Op_StoreP; }
469 virtual BasicType memory_type() const { return T_ADDRESS; }
470 };
471
472
473 //------------------------------LoadNNode--------------------------------------
474 // Load a narrow oop from memory (either object or array)
475 class LoadNNode : public LoadNode {
476 public:
477 LoadNNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const Type* t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
478 : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
479 virtual int Opcode() const;
480 virtual uint ideal_reg() const { return Op_RegN; }
481 virtual int store_Opcode() const { return Op_StoreN; }
482 virtual BasicType memory_type() const { return T_NARROWOOP; }
483 };
484
485 //------------------------------LoadKlassNode----------------------------------
486 // Load a Klass from an object
487 class LoadKlassNode : public LoadPNode {
488 protected:
489 // In most cases, LoadKlassNode does not have the control input set. If the control
490 // input is set, it must not be removed (by LoadNode::Ideal()).
491 virtual bool can_remove_control() const;
492 public:
493 LoadKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeKlassPtr *tk, MemOrd mo)
494 : LoadPNode(c, mem, adr, at, tk, mo) {}
495 virtual int Opcode() const;
496 virtual const Type* Value(PhaseGVN* phase) const;
497 virtual Node* Identity(PhaseGVN* phase);
498 virtual bool depends_only_on_test() const { return true; }
499
500 // Polymorphic factory method:
501 static Node* make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* at,
502 const TypeKlassPtr* tk = TypeKlassPtr::OBJECT);
503 };
504
505 //------------------------------LoadNKlassNode---------------------------------
506 // Load a narrow Klass from an object.
507 class LoadNKlassNode : public LoadNNode {
508 public:
509 LoadNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeNarrowKlass *tk, MemOrd mo)
510 : LoadNNode(c, mem, adr, at, tk, mo) {}
511 virtual int Opcode() const;
512 virtual uint ideal_reg() const { return Op_RegN; }
513 virtual int store_Opcode() const { return Op_StoreNKlass; }
514 virtual BasicType memory_type() const { return T_NARROWKLASS; }
515
516 virtual const Type* Value(PhaseGVN* phase) const;
517 virtual Node* Identity(PhaseGVN* phase);
518 virtual bool depends_only_on_test() const { return true; }
519 };
520
521
522 //------------------------------StoreNode--------------------------------------
523 // Store value; requires Store, Address and Value
524 class StoreNode : public MemNode {
525 private:
526 // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
527 // stores that can be reordered, and such requiring release semantics to
528 // adhere to the Java specification. The required behaviour is stored in
529 // this field.
530 const MemOrd _mo;
531 // Needed for proper cloning.
532 virtual uint size_of() const { return sizeof(*this); }
533 protected:
585
586 virtual uint hash() const; // Check the type
587
588 // If the store is to Field memory and the pointer is non-null, we can
589 // zero out the control input.
590 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
591
592 // Compute a new Type for this node. Basically we just do the pre-check,
593 // then call the virtual add() to set the type.
594 virtual const Type* Value(PhaseGVN* phase) const;
595
596 // Check for identity function on memory (Load then Store at same address)
597 virtual Node* Identity(PhaseGVN* phase);
598
599 // Do not match memory edge
600 virtual uint match_edge(uint idx) const;
601
602 virtual const Type *bottom_type() const; // returns Type::MEMORY
603
604 // Map a store opcode to its corresponding own opcode, trivially.
605 virtual int store_Opcode() const { return Opcode(); }
606
607 // have all possible loads of the value stored been optimized away?
608 bool value_never_loaded(PhaseTransform *phase) const;
609 };
610
611 //------------------------------StoreBNode-------------------------------------
612 // Store byte to memory
613 class StoreBNode : public StoreNode {
614 public:
615 StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
616 : StoreNode(c, mem, adr, at, val, mo) {}
617 virtual int Opcode() const;
618 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
619 virtual BasicType memory_type() const { return T_BYTE; }
620 };
621
622 //------------------------------StoreCNode-------------------------------------
623 // Store char/short to memory
624 class StoreCNode : public StoreNode {
625 public:
626 StoreCNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
627 : StoreNode(c, mem, adr, at, val, mo) {}
628 virtual int Opcode() const;
629 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
630 virtual BasicType memory_type() const { return T_CHAR; }
631 };
632
633 //------------------------------StoreINode-------------------------------------
634 // Store int to memory
635 class StoreINode : public StoreNode {
636 public:
637 StoreINode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
638 : StoreNode(c, mem, adr, at, val, mo) {}
639 virtual int Opcode() const;
640 virtual BasicType memory_type() const { return T_INT; }
641 };
642
643 //------------------------------StoreLNode-------------------------------------
644 // Store long to memory
645 class StoreLNode : public StoreNode {
646 virtual uint hash() const { return StoreNode::hash() + _require_atomic_access; }
647 virtual uint cmp( const Node &n ) const {
648 return _require_atomic_access == ((StoreLNode&)n)._require_atomic_access
649 && StoreNode::cmp(n);
650 }
651 virtual uint size_of() const { return sizeof(*this); }
652 const bool _require_atomic_access; // is piecewise store forbidden?
653
654 public:
655 StoreLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo, bool require_atomic_access = false)
656 : StoreNode(c, mem, adr, at, val, mo), _require_atomic_access(require_atomic_access) {}
657 virtual int Opcode() const;
658 virtual BasicType memory_type() const { return T_LONG; }
659 bool require_atomic_access() const { return _require_atomic_access; }
660 static StoreLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo);
661 #ifndef PRODUCT
662 virtual void dump_spec(outputStream *st) const {
663 StoreNode::dump_spec(st);
664 if (_require_atomic_access) st->print(" Atomic!");
665 }
666 #endif
667 };
668
669 //------------------------------StoreFNode-------------------------------------
670 // Store float to memory
671 class StoreFNode : public StoreNode {
672 public:
673 StoreFNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
674 : StoreNode(c, mem, adr, at, val, mo) {}
675 virtual int Opcode() const;
676 virtual BasicType memory_type() const { return T_FLOAT; }
677 };
678
679 //------------------------------StoreDNode-------------------------------------
680 // Store double to memory
681 class StoreDNode : public StoreNode {
682 virtual uint hash() const { return StoreNode::hash() + _require_atomic_access; }
683 virtual uint cmp( const Node &n ) const {
684 return _require_atomic_access == ((StoreDNode&)n)._require_atomic_access
685 && StoreNode::cmp(n);
686 }
687 virtual uint size_of() const { return sizeof(*this); }
688 const bool _require_atomic_access; // is piecewise store forbidden?
689 public:
690 StoreDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val,
691 MemOrd mo, bool require_atomic_access = false)
692 : StoreNode(c, mem, adr, at, val, mo), _require_atomic_access(require_atomic_access) {}
693 virtual int Opcode() const;
694 virtual BasicType memory_type() const { return T_DOUBLE; }
695 bool require_atomic_access() const { return _require_atomic_access; }
696 static StoreDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo);
697 #ifndef PRODUCT
698 virtual void dump_spec(outputStream *st) const {
699 StoreNode::dump_spec(st);
700 if (_require_atomic_access) st->print(" Atomic!");
701 }
702 #endif
703
704 };
705
706 //------------------------------StorePNode-------------------------------------
707 // Store pointer to memory
708 class StorePNode : public StoreNode {
709 public:
710 StorePNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
711 : StoreNode(c, mem, adr, at, val, mo) {}
712 virtual int Opcode() const;
713 virtual BasicType memory_type() const { return T_ADDRESS; }
714 };
715
716 //------------------------------StoreNNode-------------------------------------
717 // Store narrow oop to memory
718 class StoreNNode : public StoreNode {
719 public:
720 StoreNNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
721 : StoreNode(c, mem, adr, at, val, mo) {}
722 virtual int Opcode() const;
723 virtual BasicType memory_type() const { return T_NARROWOOP; }
724 };
725
726 //------------------------------StoreNKlassNode--------------------------------------
727 // Store narrow klass to memory
728 class StoreNKlassNode : public StoreNNode {
729 public:
730 StoreNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
731 : StoreNNode(c, mem, adr, at, val, mo) {}
732 virtual int Opcode() const;
733 virtual BasicType memory_type() const { return T_NARROWKLASS; }
734 };
735
736 //------------------------------StoreCMNode-----------------------------------
737 // Store card-mark byte to memory for CM
738 // The last StoreCM before a SafePoint must be preserved and occur after its "oop" store
739 // Preceeding equivalent StoreCMs may be eliminated.
740 class StoreCMNode : public StoreNode {
741 private:
742 virtual uint hash() const { return StoreNode::hash() + _oop_alias_idx; }
743 virtual uint cmp( const Node &n ) const {
744 return _oop_alias_idx == ((StoreCMNode&)n)._oop_alias_idx
745 && StoreNode::cmp(n);
746 }
747 virtual uint size_of() const { return sizeof(*this); }
748 int _oop_alias_idx; // The alias_idx of OopStore
749
750 public:
751 StoreCMNode( Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, int oop_alias_idx ) :
752 StoreNode(c, mem, adr, at, val, oop_store, MemNode::release),
753 _oop_alias_idx(oop_alias_idx) {
754 assert(_oop_alias_idx >= Compile::AliasIdxRaw ||
755 _oop_alias_idx == Compile::AliasIdxBot && Compile::current()->AliasLevel() == 0,
756 "bad oop alias idx");
757 }
758 virtual int Opcode() const;
759 virtual Node* Identity(PhaseGVN* phase);
760 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
761 virtual const Type* Value(PhaseGVN* phase) const;
762 virtual BasicType memory_type() const { return T_VOID; } // unspecific
763 int oop_alias_idx() const { return _oop_alias_idx; }
764 };
765
766 //------------------------------LoadPLockedNode---------------------------------
767 // Load-locked a pointer from memory (either object or array).
768 // On Sparc & Intel this is implemented as a normal pointer load.
769 // On PowerPC and friends it's a real load-locked.
770 class LoadPLockedNode : public LoadPNode {
771 public:
772 LoadPLockedNode(Node *c, Node *mem, Node *adr, MemOrd mo)
773 : LoadPNode(c, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, mo) {}
774 virtual int Opcode() const;
775 virtual int store_Opcode() const { return Op_StorePConditional; }
776 virtual bool depends_only_on_test() const { return true; }
777 };
778
779 //------------------------------SCMemProjNode---------------------------------------
780 // This class defines a projection of the memory state of a store conditional node.
781 // These nodes return a value, but also update memory.
782 class SCMemProjNode : public ProjNode {
783 public:
784 enum {SCMEMPROJCON = (uint)-2};
785 SCMemProjNode( Node *src) : ProjNode( src, SCMEMPROJCON) { }
786 virtual int Opcode() const;
787 virtual bool is_CFG() const { return false; }
788 virtual const Type *bottom_type() const {return Type::MEMORY;}
789 virtual const TypePtr *adr_type() const {
790 Node* ctrl = in(0);
791 if (ctrl == NULL) return NULL; // node is dead
792 return ctrl->in(MemNode::Memory)->adr_type();
793 }
794 virtual uint ideal_reg() const { return 0;} // memory projections don't have a register
795 virtual const Type* Value(PhaseGVN* phase) const;
796 #ifndef PRODUCT
797 virtual void dump_spec(outputStream *st) const {};
798 #endif
799 };
800
801 //------------------------------LoadStoreNode---------------------------
802 // Note: is_Mem() method returns 'true' for this class.
803 class LoadStoreNode : public Node {
804 private:
805 const Type* const _type; // What kind of value is loaded?
806 const TypePtr* _adr_type; // What kind of memory is being addressed?
814 virtual uint ideal_reg() const;
815 virtual const class TypePtr *adr_type() const { return _adr_type; } // returns bottom_type of address
816
817 bool result_not_used() const;
818 };
819
820 class LoadStoreConditionalNode : public LoadStoreNode {
821 public:
822 enum {
823 ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
824 };
825 LoadStoreConditionalNode(Node *c, Node *mem, Node *adr, Node *val, Node *ex);
826 };
827
828 //------------------------------StorePConditionalNode---------------------------
829 // Conditionally store pointer to memory, if no change since prior
830 // load-locked. Sets flags for success or failure of the store.
831 class StorePConditionalNode : public LoadStoreConditionalNode {
832 public:
833 StorePConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
834 virtual int Opcode() const;
835 // Produces flags
836 virtual uint ideal_reg() const { return Op_RegFlags; }
837 };
838
839 //------------------------------StoreIConditionalNode---------------------------
840 // Conditionally store int to memory, if no change since prior
841 // load-locked. Sets flags for success or failure of the store.
842 class StoreIConditionalNode : public LoadStoreConditionalNode {
843 public:
844 StoreIConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ii ) : LoadStoreConditionalNode(c, mem, adr, val, ii) { }
845 virtual int Opcode() const;
846 // Produces flags
847 virtual uint ideal_reg() const { return Op_RegFlags; }
848 };
849
850 //------------------------------StoreLConditionalNode---------------------------
851 // Conditionally store long to memory, if no change since prior
852 // load-locked. Sets flags for success or failure of the store.
853 class StoreLConditionalNode : public LoadStoreConditionalNode {
854 public:
855 StoreLConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
856 virtual int Opcode() const;
857 // Produces flags
858 virtual uint ideal_reg() const { return Op_RegFlags; }
859 };
860
861 class CompareAndSwapNode : public LoadStoreConditionalNode {
862 private:
863 const MemNode::MemOrd _mem_ord;
864 public:
865 CompareAndSwapNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : LoadStoreConditionalNode(c, mem, adr, val, ex), _mem_ord(mem_ord) {}
866 MemNode::MemOrd order() const {
867 return _mem_ord;
868 }
869 };
870
871 class CompareAndExchangeNode : public LoadStoreNode {
872 private:
873 const MemNode::MemOrd _mem_ord;
874 public:
875 enum {
876 ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
877 };
878 CompareAndExchangeNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord, const TypePtr* at, const Type* t) :
879 LoadStoreNode(c, mem, adr, val, at, t, 5), _mem_ord(mem_ord) {
880 init_req(ExpectedIn, ex );
881 }
882
883 MemNode::MemOrd order() const {
884 return _mem_ord;
885 }
886 };
887
888 //------------------------------CompareAndSwapBNode---------------------------
889 class CompareAndSwapBNode : public CompareAndSwapNode {
890 public:
891 CompareAndSwapBNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
892 virtual int Opcode() const;
893 };
894
895 //------------------------------CompareAndSwapSNode---------------------------
896 class CompareAndSwapSNode : public CompareAndSwapNode {
897 public:
898 CompareAndSwapSNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
899 virtual int Opcode() const;
900 };
901
902 //------------------------------CompareAndSwapINode---------------------------
903 class CompareAndSwapINode : public CompareAndSwapNode {
904 public:
905 CompareAndSwapINode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
906 virtual int Opcode() const;
907 };
908
909 //------------------------------CompareAndSwapLNode---------------------------
910 class CompareAndSwapLNode : public CompareAndSwapNode {
911 public:
912 CompareAndSwapLNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
913 virtual int Opcode() const;
914 };
915
916 //------------------------------CompareAndSwapPNode---------------------------
917 class CompareAndSwapPNode : public CompareAndSwapNode {
918 public:
919 CompareAndSwapPNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
920 virtual int Opcode() const;
921 };
922
923 //------------------------------CompareAndSwapNNode---------------------------
924 class CompareAndSwapNNode : public CompareAndSwapNode {
925 public:
926 CompareAndSwapNNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
927 virtual int Opcode() const;
928 };
929
930 //------------------------------WeakCompareAndSwapBNode---------------------------
931 class WeakCompareAndSwapBNode : public CompareAndSwapNode {
932 public:
933 WeakCompareAndSwapBNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
934 virtual int Opcode() const;
935 };
936
937 //------------------------------WeakCompareAndSwapSNode---------------------------
938 class WeakCompareAndSwapSNode : public CompareAndSwapNode {
939 public:
940 WeakCompareAndSwapSNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
941 virtual int Opcode() const;
942 };
943
944 //------------------------------WeakCompareAndSwapINode---------------------------
945 class WeakCompareAndSwapINode : public CompareAndSwapNode {
946 public:
947 WeakCompareAndSwapINode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
948 virtual int Opcode() const;
949 };
950
951 //------------------------------WeakCompareAndSwapLNode---------------------------
952 class WeakCompareAndSwapLNode : public CompareAndSwapNode {
953 public:
954 WeakCompareAndSwapLNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
955 virtual int Opcode() const;
956 };
957
958 //------------------------------WeakCompareAndSwapPNode---------------------------
959 class WeakCompareAndSwapPNode : public CompareAndSwapNode {
960 public:
961 WeakCompareAndSwapPNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
962 virtual int Opcode() const;
963 };
964
965 //------------------------------WeakCompareAndSwapNNode---------------------------
966 class WeakCompareAndSwapNNode : public CompareAndSwapNode {
967 public:
968 WeakCompareAndSwapNNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
969 virtual int Opcode() const;
970 };
971
972 //------------------------------CompareAndExchangeBNode---------------------------
973 class CompareAndExchangeBNode : public CompareAndExchangeNode {
974 public:
975 CompareAndExchangeBNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeInt::BYTE) { }
976 virtual int Opcode() const;
977 };
978
979
980 //------------------------------CompareAndExchangeSNode---------------------------
981 class CompareAndExchangeSNode : public CompareAndExchangeNode {
982 public:
983 CompareAndExchangeSNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeInt::SHORT) { }
984 virtual int Opcode() const;
985 };
986
987 //------------------------------CompareAndExchangeLNode---------------------------
988 class CompareAndExchangeLNode : public CompareAndExchangeNode {
989 public:
990 CompareAndExchangeLNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeLong::LONG) { }
991 virtual int Opcode() const;
992 };
993
994
995 //------------------------------CompareAndExchangeINode---------------------------
996 class CompareAndExchangeINode : public CompareAndExchangeNode {
997 public:
998 CompareAndExchangeINode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeInt::INT) { }
999 virtual int Opcode() const;
1000 };
1001
1002
1003 //------------------------------CompareAndExchangePNode---------------------------
1004 class CompareAndExchangePNode : public CompareAndExchangeNode {
1005 public:
1006 CompareAndExchangePNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, const Type* t, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, t) { }
1007 virtual int Opcode() const;
1008 };
1009
1010 //------------------------------CompareAndExchangeNNode---------------------------
1011 class CompareAndExchangeNNode : public CompareAndExchangeNode {
1012 public:
1013 CompareAndExchangeNNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, const Type* t, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, t) { }
1014 virtual int Opcode() const;
1015 };
1016
1017 //------------------------------GetAndAddBNode---------------------------
1018 class GetAndAddBNode : public LoadStoreNode {
1019 public:
1020 GetAndAddBNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::BYTE, 4) { }
1021 virtual int Opcode() const;
1022 };
1023
1024 //------------------------------GetAndAddSNode---------------------------
1025 class GetAndAddSNode : public LoadStoreNode {
1026 public:
1027 GetAndAddSNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::SHORT, 4) { }
1028 virtual int Opcode() const;
1029 };
1030
1031 //------------------------------GetAndAddINode---------------------------
1032 class GetAndAddINode : public LoadStoreNode {
1033 public:
1034 GetAndAddINode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::INT, 4) { }
1035 virtual int Opcode() const;
1036 };
1037
1038 //------------------------------GetAndAddLNode---------------------------
1039 class GetAndAddLNode : public LoadStoreNode {
1040 public:
1041 GetAndAddLNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeLong::LONG, 4) { }
1042 virtual int Opcode() const;
1043 };
1044
1045 //------------------------------GetAndSetBNode---------------------------
1046 class GetAndSetBNode : public LoadStoreNode {
1047 public:
1048 GetAndSetBNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::BYTE, 4) { }
1049 virtual int Opcode() const;
1050 };
1051
1052 //------------------------------GetAndSetSNode---------------------------
1053 class GetAndSetSNode : public LoadStoreNode {
1054 public:
1055 GetAndSetSNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::SHORT, 4) { }
1056 virtual int Opcode() const;
1057 };
1058
1059 //------------------------------GetAndSetINode---------------------------
1060 class GetAndSetINode : public LoadStoreNode {
1061 public:
1062 GetAndSetINode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::INT, 4) { }
1063 virtual int Opcode() const;
1064 };
1065
1066 //------------------------------GetAndSetLNode---------------------------
1067 class GetAndSetLNode : public LoadStoreNode {
1068 public:
1069 GetAndSetLNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeLong::LONG, 4) { }
1070 virtual int Opcode() const;
1071 };
1072
1073 //------------------------------GetAndSetPNode---------------------------
1074 class GetAndSetPNode : public LoadStoreNode {
1075 public:
1076 GetAndSetPNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1077 virtual int Opcode() const;
1078 };
1079
1080 //------------------------------GetAndSetNNode---------------------------
1081 class GetAndSetNNode : public LoadStoreNode {
1082 public:
1083 GetAndSetNNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1084 virtual int Opcode() const;
1085 };
1086
1087 //------------------------------ClearArray-------------------------------------
1088 class ClearArrayNode: public Node {
1089 private:
1090 bool _is_large;
1091 public:
1092 ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base, bool is_large)
1093 : Node(ctrl,arymem,word_cnt,base), _is_large(is_large) {
1094 init_class_id(Class_ClearArray);
1095 }
1096 virtual int Opcode() const;
1097 virtual const Type *bottom_type() const { return Type::MEMORY; }
1098 // ClearArray modifies array elements, and so affects only the
1099 // array memory addressed by the bottom_type of its base address.
1100 virtual const class TypePtr *adr_type() const;
1101 virtual Node* Identity(PhaseGVN* phase);
1102 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1103 virtual uint match_edge(uint idx) const;
1104 bool is_large() const { return _is_large; }
1105
1106 // Clear the given area of an object or array.
1107 // The start offset must always be aligned mod BytesPerInt.
1108 // The end offset must always be aligned mod BytesPerLong.
1109 // Return the new memory.
1110 static Node* clear_memory(Node* control, Node* mem, Node* dest,
1111 intptr_t start_offset,
1112 intptr_t end_offset,
1113 PhaseGVN* phase);
1114 static Node* clear_memory(Node* control, Node* mem, Node* dest,
1115 intptr_t start_offset,
1116 Node* end_offset,
1129 // Model. Monitor-enter and volatile-load act as Aquires: no following ref
1130 // can be moved to before them. We insert a MemBar-Acquire after a FastLock or
1131 // volatile-load. Monitor-exit and volatile-store act as Release: no
1132 // preceding ref can be moved to after them. We insert a MemBar-Release
1133 // before a FastUnlock or volatile-store. All volatiles need to be
1134 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
1135 // separate it from any following volatile-load.
1136 class MemBarNode: public MultiNode {
1137 virtual uint hash() const ; // { return NO_HASH; }
1138 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
1139
1140 virtual uint size_of() const { return sizeof(*this); }
1141 // Memory type this node is serializing. Usually either rawptr or bottom.
1142 const TypePtr* _adr_type;
1143
1144 public:
1145 enum {
1146 Precedent = TypeFunc::Parms // optional edge to force precedence
1147 };
1148 MemBarNode(Compile* C, int alias_idx, Node* precedent);
1149 virtual int Opcode() const = 0;
1150 virtual const class TypePtr *adr_type() const { return _adr_type; }
1151 virtual const Type* Value(PhaseGVN* phase) const;
1152 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1153 virtual uint match_edge(uint idx) const { return 0; }
1154 virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
1155 virtual Node *match( const ProjNode *proj, const Matcher *m );
1156 // Factory method. Builds a wide or narrow membar.
1157 // Optional 'precedent' becomes an extra edge if not null.
1158 static MemBarNode* make(Compile* C, int opcode,
1159 int alias_idx = Compile::AliasIdxBot,
1160 Node* precedent = NULL);
1161 };
1162
1163 // "Acquire" - no following ref can move before (but earlier refs can
1164 // follow, like an early Load stalled in cache). Requires multi-cpu
1165 // visibility. Inserted after a volatile load.
1166 class MemBarAcquireNode: public MemBarNode {
1167 public:
1168 MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent)
1169 : MemBarNode(C, alias_idx, precedent) {}
1170 virtual int Opcode() const;
1171 };
1172
1173 // "Acquire" - no following ref can move before (but earlier refs can
1174 // follow, like an early Load stalled in cache). Requires multi-cpu
1175 // visibility. Inserted independ of any load, as required
1176 // for intrinsic Unsafe.loadFence().
1177 class LoadFenceNode: public MemBarNode {
1178 public:
1179 LoadFenceNode(Compile* C, int alias_idx, Node* precedent)
1180 : MemBarNode(C, alias_idx, precedent) {}
1181 virtual int Opcode() const;
1182 };
1183
1184 // "Release" - no earlier ref can move after (but later refs can move
1185 // up, like a speculative pipelined cache-hitting Load). Requires
1186 // multi-cpu visibility. Inserted before a volatile store.
1187 class MemBarReleaseNode: public MemBarNode {
1188 public:
1189 MemBarReleaseNode(Compile* C, int alias_idx, Node* precedent)
1190 : MemBarNode(C, alias_idx, precedent) {}
1191 virtual int Opcode() const;
1192 };
1193
1194 // "Release" - no earlier ref can move after (but later refs can move
1195 // up, like a speculative pipelined cache-hitting Load). Requires
1196 // multi-cpu visibility. Inserted independent of any store, as required
1197 // for intrinsic Unsafe.storeFence().
1198 class StoreFenceNode: public MemBarNode {
1199 public:
1200 StoreFenceNode(Compile* C, int alias_idx, Node* precedent)
1201 : MemBarNode(C, alias_idx, precedent) {}
1202 virtual int Opcode() const;
1203 };
1204
1205 // "Acquire" - no following ref can move before (but earlier refs can
1206 // follow, like an early Load stalled in cache). Requires multi-cpu
1207 // visibility. Inserted after a FastLock.
1208 class MemBarAcquireLockNode: public MemBarNode {
1209 public:
1210 MemBarAcquireLockNode(Compile* C, int alias_idx, Node* precedent)
1211 : MemBarNode(C, alias_idx, precedent) {}
1212 virtual int Opcode() const;
1213 };
1214
1215 // "Release" - no earlier ref can move after (but later refs can move
1216 // up, like a speculative pipelined cache-hitting Load). Requires
1217 // multi-cpu visibility. Inserted before a FastUnLock.
1218 class MemBarReleaseLockNode: public MemBarNode {
1219 public:
1220 MemBarReleaseLockNode(Compile* C, int alias_idx, Node* precedent)
1221 : MemBarNode(C, alias_idx, precedent) {}
1222 virtual int Opcode() const;
1223 };
1224
1225 class MemBarStoreStoreNode: public MemBarNode {
1226 public:
1227 MemBarStoreStoreNode(Compile* C, int alias_idx, Node* precedent)
1228 : MemBarNode(C, alias_idx, precedent) {
1229 init_class_id(Class_MemBarStoreStore);
1230 }
1231 virtual int Opcode() const;
1232 };
1233
1234 // Ordering between a volatile store and a following volatile load.
1235 // Requires multi-CPU visibility?
1236 class MemBarVolatileNode: public MemBarNode {
1237 public:
1238 MemBarVolatileNode(Compile* C, int alias_idx, Node* precedent)
1239 : MemBarNode(C, alias_idx, precedent) {}
1240 virtual int Opcode() const;
1241 };
1242
1243 // Ordering within the same CPU. Used to order unsafe memory references
1244 // inside the compiler when we lack alias info. Not needed "outside" the
1245 // compiler because the CPU does all the ordering for us.
1246 class MemBarCPUOrderNode: public MemBarNode {
1247 public:
1248 MemBarCPUOrderNode(Compile* C, int alias_idx, Node* precedent)
1249 : MemBarNode(C, alias_idx, precedent) {}
1250 virtual int Opcode() const;
1251 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
1252 };
1253
1254 class OnSpinWaitNode: public MemBarNode {
1255 public:
1256 OnSpinWaitNode(Compile* C, int alias_idx, Node* precedent)
1257 : MemBarNode(C, alias_idx, precedent) {}
1258 virtual int Opcode() const;
1259 };
1260
1261 // Isolation of object setup after an AllocateNode and before next safepoint.
1262 // (See comment in memnode.cpp near InitializeNode::InitializeNode for semantics.)
1263 class InitializeNode: public MemBarNode {
1264 friend class AllocateNode;
1265
1266 enum {
1267 Incomplete = 0,
1268 Complete = 1,
1269 WithArraycopy = 2
1270 };
1271 int _is_complete;
1272
1273 bool _does_not_escape;
1274
1275 public:
1276 enum {
1277 Control = TypeFunc::Control,
1278 Memory = TypeFunc::Memory, // MergeMem for states affected by this op
1279 RawAddress = TypeFunc::Parms+0, // the newly-allocated raw address
1280 RawStores = TypeFunc::Parms+1 // zero or more stores (or TOP)
1281 };
1282
1283 InitializeNode(Compile* C, int adr_type, Node* rawoop);
1284 virtual int Opcode() const;
1285 virtual uint size_of() const { return sizeof(*this); }
1286 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
1287 virtual const RegMask &in_RegMask(uint) const; // mask for RawAddress
1288
1289 // Manage incoming memory edges via a MergeMem on in(Memory):
1290 Node* memory(uint alias_idx);
1291
1292 // The raw memory edge coming directly from the Allocation.
1293 // The contents of this memory are *always* all-zero-bits.
1294 Node* zero_memory() { return memory(Compile::AliasIdxRaw); }
1295
1296 // Return the corresponding allocation for this initialization (or null if none).
1297 // (Note: Both InitializeNode::allocation and AllocateNode::initialization
1298 // are defined in graphKit.cpp, which sets up the bidirectional relation.)
1299 AllocateNode* allocation();
1300
1301 // Anything other than zeroing in this init?
1302 bool is_non_zero();
1303
1304 // An InitializeNode must completed before macro expansion is done.
1353 void coalesce_subword_stores(intptr_t header_size, Node* size_in_bytes,
1354 PhaseGVN* phase);
1355
1356 intptr_t find_next_fullword_store(uint i, PhaseGVN* phase);
1357 };
1358
1359 //------------------------------MergeMem---------------------------------------
1360 // (See comment in memnode.cpp near MergeMemNode::MergeMemNode for semantics.)
1361 class MergeMemNode: public Node {
1362 virtual uint hash() const ; // { return NO_HASH; }
1363 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
1364 friend class MergeMemStream;
1365 MergeMemNode(Node* def); // clients use MergeMemNode::make
1366
1367 public:
1368 // If the input is a whole memory state, clone it with all its slices intact.
1369 // Otherwise, make a new memory state with just that base memory input.
1370 // In either case, the result is a newly created MergeMem.
1371 static MergeMemNode* make(Node* base_memory);
1372
1373 virtual int Opcode() const;
1374 virtual Node* Identity(PhaseGVN* phase);
1375 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1376 virtual uint ideal_reg() const { return NotAMachineReg; }
1377 virtual uint match_edge(uint idx) const { return 0; }
1378 virtual const RegMask &out_RegMask() const;
1379 virtual const Type *bottom_type() const { return Type::MEMORY; }
1380 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
1381 // sparse accessors
1382 // Fetch the previously stored "set_memory_at", or else the base memory.
1383 // (Caller should clone it if it is a phi-nest.)
1384 Node* memory_at(uint alias_idx) const;
1385 // set the memory, regardless of its previous value
1386 void set_memory_at(uint alias_idx, Node* n);
1387 // the "base" is the memory that provides the non-finite support
1388 Node* base_memory() const { return in(Compile::AliasIdxBot); }
1389 // warning: setting the base can implicitly set any of the other slices too
1390 void set_base_memory(Node* def);
1391 // sentinel value which denotes a copy of the base memory:
1392 Node* empty_memory() const { return in(Compile::AliasIdxTop); }
1393 static Node* make_empty_memory(); // where the sentinel comes from
1581 bool next_non_empty(bool have_mm2) {
1582 while (next(have_mm2)) {
1583 if (!is_empty()) {
1584 // make sure _mem2 is filled in sensibly
1585 if (have_mm2 && _mem2->is_top()) _mem2 = _mm2->base_memory();
1586 return true;
1587 } else if (have_mm2 && !is_empty2()) {
1588 return true; // is_empty() == true
1589 }
1590 }
1591 return false;
1592 }
1593 };
1594
1595 //------------------------------Prefetch---------------------------------------
1596
1597 // Allocation prefetch which may fault, TLAB size have to be adjusted.
1598 class PrefetchAllocationNode : public Node {
1599 public:
1600 PrefetchAllocationNode(Node *mem, Node *adr) : Node(0,mem,adr) {}
1601 virtual int Opcode() const;
1602 virtual uint ideal_reg() const { return NotAMachineReg; }
1603 virtual uint match_edge(uint idx) const { return idx==2; }
1604 virtual const Type *bottom_type() const { return ( AllocatePrefetchStyle == 3 ) ? Type::MEMORY : Type::ABIO; }
1605 };
1606
1607 #endif // SHARE_VM_OPTO_MEMNODE_HPP
|
94 virtual const class TypePtr *adr_type() const; // returns bottom_type of address
95
96 // Shared code for Ideal methods:
97 Node *Ideal_common(PhaseGVN *phase, bool can_reshape); // Return -1 for short-circuit NULL.
98
99 // Helper function for adr_type() implementations.
100 static const TypePtr* calculate_adr_type(const Type* t, const TypePtr* cross_check = NULL);
101
102 // Raw access function, to allow copying of adr_type efficiently in
103 // product builds and retain the debug info for debug builds.
104 const TypePtr *raw_adr_type() const {
105 #ifdef ASSERT
106 return _adr_type;
107 #else
108 return 0;
109 #endif
110 }
111
112 // Map a load or store opcode to its corresponding store opcode.
113 // (Return -1 if unknown.)
114 virtual uint store_Opcode() const { return ~0; }
115
116 // What is the type of the value in memory? (T_VOID mean "unspecified".)
117 virtual BasicType memory_type() const = 0;
118 virtual int memory_size() const {
119 #ifdef ASSERT
120 return type2aelembytes(memory_type(), true);
121 #else
122 return type2aelembytes(memory_type());
123 #endif
124 }
125
126 // Search through memory states which precede this node (load or store).
127 // Look for an exact match for the address, with no intervening
128 // aliased stores.
129 Node* find_previous_store(PhaseTransform* phase);
130
131 // Can this node (load or store) accurately see a stored value in
132 // the given memory state? (The state may or may not be in(Memory).)
133 Node* can_see_stored_value(Node* st, PhaseTransform* phase) const;
134
181 protected:
182 virtual uint cmp(const Node &n) const;
183 virtual uint size_of() const; // Size is bigger
184 // Should LoadNode::Ideal() attempt to remove control edges?
185 virtual bool can_remove_control() const;
186 const Type* const _type; // What kind of value is loaded?
187
188 virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
189 public:
190
191 LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
192 : MemNode(c,mem,adr,at), _type(rt), _mo(mo), _control_dependency(control_dependency) {
193 init_class_id(Class_Load);
194 }
195 inline bool is_unordered() const { return !is_acquire(); }
196 inline bool is_acquire() const {
197 assert(_mo == unordered || _mo == acquire, "unexpected");
198 return _mo == acquire;
199 }
200 inline bool is_unsigned() const {
201 uint lop = Opcode();
202 return (lop == Op_LoadUB) || (lop == Op_LoadUS);
203 }
204
205 // Polymorphic factory method:
206 static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
207 const TypePtr* at, const Type *rt, BasicType bt,
208 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
209 bool unaligned = false, bool mismatched = false);
210
211 virtual uint hash() const; // Check the type
212
213 // Handle algebraic identities here. If we have an identity, return the Node
214 // we are equivalent to. We look for Load of a Store.
215 virtual Node* Identity(PhaseGVN* phase);
216
217 // If the load is from Field memory and the pointer is non-null, it might be possible to
218 // zero out the control input.
219 // If the offset is constant and the base is an object allocation,
220 // try to hook me up to the exact initializing store.
221 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
233 // Common methods for LoadKlass and LoadNKlass nodes.
234 const Type* klass_value_common(PhaseGVN* phase) const;
235 Node* klass_identity_common(PhaseGVN* phase);
236
237 virtual uint ideal_reg() const;
238 virtual const Type *bottom_type() const;
239 // Following method is copied from TypeNode:
240 void set_type(const Type* t) {
241 assert(t != NULL, "sanity");
242 debug_only(uint check_hash = (VerifyHashTableKeys && _hash_lock) ? hash() : NO_HASH);
243 *(const Type**)&_type = t; // cast away const-ness
244 // If this node is in the hash table, make sure it doesn't need a rehash.
245 assert(check_hash == NO_HASH || check_hash == hash(), "type change must preserve hash code");
246 }
247 const Type* type() const { assert(_type != NULL, "sanity"); return _type; };
248
249 // Do not match memory edge
250 virtual uint match_edge(uint idx) const;
251
252 // Map a load opcode to its corresponding store opcode.
253 virtual uint store_Opcode() const = 0;
254
255 // Check if the load's memory input is a Phi node with the same control.
256 bool is_instance_field_load_with_local_phi(Node* ctrl);
257
258 Node* convert_to_unsigned_load(PhaseGVN& gvn);
259 Node* convert_to_signed_load(PhaseGVN& gvn);
260
261 #ifndef PRODUCT
262 virtual void dump_spec(outputStream *st) const;
263 #endif
264 #ifdef ASSERT
265 // Helper function to allow a raw load without control edge for some cases
266 static bool is_immutable_value(Node* adr);
267 #endif
268 protected:
269 const Type* load_array_final_field(const TypeKlassPtr *tkls,
270 ciKlass* klass) const;
271
272 Node* can_see_arraycopy_value(Node* st, PhaseTransform* phase) const;
273
274 // depends_only_on_test is almost always true, and needs to be almost always
275 // true to enable key hoisting & commoning optimizations. However, for the
276 // special case of RawPtr loads from TLS top & end, and other loads performed by
277 // GC barriers, the control edge carries the dependence preventing hoisting past
278 // a Safepoint instead of the memory edge. (An unfortunate consequence of having
279 // Safepoints not set Raw Memory; itself an unfortunate consequence of having Nodes
280 // which produce results (new raw memory state) inside of loops preventing all
281 // manner of other optimizations). Basically, it's ugly but so is the alternative.
282 // See comment in macro.cpp, around line 125 expand_allocate_common().
283 virtual bool depends_only_on_test() const {
284 return adr_type() != TypeRawPtr::BOTTOM && _control_dependency == DependsOnlyOnTest;
285 }
286 };
287
288 //------------------------------LoadBNode--------------------------------------
289 // Load a byte (8bits signed) from memory
290 class LoadBNode : public LoadNode {
291 public:
292 LoadBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
293 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
294 virtual uint Opcode() const;
295 virtual uint ideal_reg() const { return Op_RegI; }
296 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
297 virtual const Type* Value(PhaseGVN* phase) const;
298 virtual uint store_Opcode() const { return Op_StoreB; }
299 virtual BasicType memory_type() const { return T_BYTE; }
300 };
301
302 //------------------------------LoadUBNode-------------------------------------
303 // Load a unsigned byte (8bits unsigned) from memory
304 class LoadUBNode : public LoadNode {
305 public:
306 LoadUBNode(Node* c, Node* mem, Node* adr, const TypePtr* at, const TypeInt* ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
307 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
308 virtual uint Opcode() const;
309 virtual uint ideal_reg() const { return Op_RegI; }
310 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
311 virtual const Type* Value(PhaseGVN* phase) const;
312 virtual uint store_Opcode() const { return Op_StoreB; }
313 virtual BasicType memory_type() const { return T_BYTE; }
314 };
315
316 //------------------------------LoadUSNode-------------------------------------
317 // Load an unsigned short/char (16bits unsigned) from memory
318 class LoadUSNode : public LoadNode {
319 public:
320 LoadUSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
321 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
322 virtual uint Opcode() const;
323 virtual uint ideal_reg() const { return Op_RegI; }
324 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
325 virtual const Type* Value(PhaseGVN* phase) const;
326 virtual uint store_Opcode() const { return Op_StoreC; }
327 virtual BasicType memory_type() const { return T_CHAR; }
328 };
329
330 //------------------------------LoadSNode--------------------------------------
331 // Load a short (16bits signed) from memory
332 class LoadSNode : public LoadNode {
333 public:
334 LoadSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
335 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
336 virtual uint Opcode() const;
337 virtual uint ideal_reg() const { return Op_RegI; }
338 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
339 virtual const Type* Value(PhaseGVN* phase) const;
340 virtual uint store_Opcode() const { return Op_StoreC; }
341 virtual BasicType memory_type() const { return T_SHORT; }
342 };
343
344 //------------------------------LoadINode--------------------------------------
345 // Load an integer from memory
346 class LoadINode : public LoadNode {
347 public:
348 LoadINode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
349 : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
350 virtual uint Opcode() const;
351 virtual uint ideal_reg() const { return Op_RegI; }
352 virtual uint store_Opcode() const { return Op_StoreI; }
353 virtual BasicType memory_type() const { return T_INT; }
354 };
355
356 //------------------------------LoadRangeNode----------------------------------
357 // Load an array length from the array
358 class LoadRangeNode : public LoadINode {
359 public:
360 LoadRangeNode(Node *c, Node *mem, Node *adr, const TypeInt *ti = TypeInt::POS)
361 : LoadINode(c, mem, adr, TypeAryPtr::RANGE, ti, MemNode::unordered) {}
362 virtual uint Opcode() const;
363 virtual const Type* Value(PhaseGVN* phase) const;
364 virtual Node* Identity(PhaseGVN* phase);
365 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
366 };
367
368 //------------------------------LoadLNode--------------------------------------
369 // Load a long from memory
370 class LoadLNode : public LoadNode {
371 virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
372 virtual uint cmp( const Node &n ) const {
373 return _require_atomic_access == ((LoadLNode&)n)._require_atomic_access
374 && LoadNode::cmp(n);
375 }
376 virtual uint size_of() const { return sizeof(*this); }
377 const bool _require_atomic_access; // is piecewise load forbidden?
378
379 public:
380 LoadLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeLong *tl,
381 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
382 : LoadNode(c, mem, adr, at, tl, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
383 virtual uint Opcode() const;
384 virtual uint ideal_reg() const { return Op_RegL; }
385 virtual uint store_Opcode() const { return Op_StoreL; }
386 virtual BasicType memory_type() const { return T_LONG; }
387 bool require_atomic_access() const { return _require_atomic_access; }
388 static LoadLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
389 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
390 bool unaligned = false, bool mismatched = false);
391 #ifndef PRODUCT
392 virtual void dump_spec(outputStream *st) const {
393 LoadNode::dump_spec(st);
394 if (_require_atomic_access) st->print(" Atomic!");
395 }
396 #endif
397 };
398
399 //------------------------------LoadL_unalignedNode----------------------------
400 // Load a long from unaligned memory
401 class LoadL_unalignedNode : public LoadLNode {
402 public:
403 LoadL_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
404 : LoadLNode(c, mem, adr, at, TypeLong::LONG, mo, control_dependency) {}
405 virtual uint Opcode() const;
406 };
407
408 //------------------------------LoadFNode--------------------------------------
409 // Load a float (64 bits) from memory
410 class LoadFNode : public LoadNode {
411 public:
412 LoadFNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
413 : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
414 virtual uint Opcode() const;
415 virtual uint ideal_reg() const { return Op_RegF; }
416 virtual uint store_Opcode() const { return Op_StoreF; }
417 virtual BasicType memory_type() const { return T_FLOAT; }
418 };
419
420 //------------------------------LoadDNode--------------------------------------
421 // Load a double (64 bits) from memory
422 class LoadDNode : public LoadNode {
423 virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
424 virtual uint cmp( const Node &n ) const {
425 return _require_atomic_access == ((LoadDNode&)n)._require_atomic_access
426 && LoadNode::cmp(n);
427 }
428 virtual uint size_of() const { return sizeof(*this); }
429 const bool _require_atomic_access; // is piecewise load forbidden?
430
431 public:
432 LoadDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t,
433 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
434 : LoadNode(c, mem, adr, at, t, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
435 virtual uint Opcode() const;
436 virtual uint ideal_reg() const { return Op_RegD; }
437 virtual uint store_Opcode() const { return Op_StoreD; }
438 virtual BasicType memory_type() const { return T_DOUBLE; }
439 bool require_atomic_access() const { return _require_atomic_access; }
440 static LoadDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
441 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
442 bool unaligned = false, bool mismatched = false);
443 #ifndef PRODUCT
444 virtual void dump_spec(outputStream *st) const {
445 LoadNode::dump_spec(st);
446 if (_require_atomic_access) st->print(" Atomic!");
447 }
448 #endif
449 };
450
451 //------------------------------LoadD_unalignedNode----------------------------
452 // Load a double from unaligned memory
453 class LoadD_unalignedNode : public LoadDNode {
454 public:
455 LoadD_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
456 : LoadDNode(c, mem, adr, at, Type::DOUBLE, mo, control_dependency) {}
457 virtual uint Opcode() const;
458 };
459
460 //------------------------------LoadPNode--------------------------------------
461 // Load a pointer from memory (either object or array)
462 class LoadPNode : public LoadNode {
463 public:
464 LoadPNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypePtr* t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
465 : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
466 virtual uint Opcode() const;
467 virtual uint ideal_reg() const { return Op_RegP; }
468 virtual uint store_Opcode() const { return Op_StoreP; }
469 virtual BasicType memory_type() const { return T_ADDRESS; }
470 };
471
472
473 //------------------------------LoadNNode--------------------------------------
474 // Load a narrow oop from memory (either object or array)
475 class LoadNNode : public LoadNode {
476 public:
477 LoadNNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const Type* t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
478 : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
479 virtual uint Opcode() const;
480 virtual uint ideal_reg() const { return Op_RegN; }
481 virtual uint store_Opcode() const { return Op_StoreN; }
482 virtual BasicType memory_type() const { return T_NARROWOOP; }
483 };
484
485 //------------------------------LoadKlassNode----------------------------------
486 // Load a Klass from an object
487 class LoadKlassNode : public LoadPNode {
488 protected:
489 // In most cases, LoadKlassNode does not have the control input set. If the control
490 // input is set, it must not be removed (by LoadNode::Ideal()).
491 virtual bool can_remove_control() const;
492 public:
493 LoadKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeKlassPtr *tk, MemOrd mo)
494 : LoadPNode(c, mem, adr, at, tk, mo) {}
495 virtual uint Opcode() const;
496 virtual const Type* Value(PhaseGVN* phase) const;
497 virtual Node* Identity(PhaseGVN* phase);
498 virtual bool depends_only_on_test() const { return true; }
499
500 // Polymorphic factory method:
501 static Node* make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* at,
502 const TypeKlassPtr* tk = TypeKlassPtr::OBJECT);
503 };
504
505 //------------------------------LoadNKlassNode---------------------------------
506 // Load a narrow Klass from an object.
507 class LoadNKlassNode : public LoadNNode {
508 public:
509 LoadNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeNarrowKlass *tk, MemOrd mo)
510 : LoadNNode(c, mem, adr, at, tk, mo) {}
511 virtual uint Opcode() const;
512 virtual uint ideal_reg() const { return Op_RegN; }
513 virtual uint store_Opcode() const { return Op_StoreNKlass; }
514 virtual BasicType memory_type() const { return T_NARROWKLASS; }
515
516 virtual const Type* Value(PhaseGVN* phase) const;
517 virtual Node* Identity(PhaseGVN* phase);
518 virtual bool depends_only_on_test() const { return true; }
519 };
520
521
522 //------------------------------StoreNode--------------------------------------
523 // Store value; requires Store, Address and Value
524 class StoreNode : public MemNode {
525 private:
526 // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
527 // stores that can be reordered, and such requiring release semantics to
528 // adhere to the Java specification. The required behaviour is stored in
529 // this field.
530 const MemOrd _mo;
531 // Needed for proper cloning.
532 virtual uint size_of() const { return sizeof(*this); }
533 protected:
585
586 virtual uint hash() const; // Check the type
587
588 // If the store is to Field memory and the pointer is non-null, we can
589 // zero out the control input.
590 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
591
592 // Compute a new Type for this node. Basically we just do the pre-check,
593 // then call the virtual add() to set the type.
594 virtual const Type* Value(PhaseGVN* phase) const;
595
596 // Check for identity function on memory (Load then Store at same address)
597 virtual Node* Identity(PhaseGVN* phase);
598
599 // Do not match memory edge
600 virtual uint match_edge(uint idx) const;
601
602 virtual const Type *bottom_type() const; // returns Type::MEMORY
603
604 // Map a store opcode to its corresponding own opcode, trivially.
605 virtual uint store_Opcode() const { return Opcode(); }
606
607 // have all possible loads of the value stored been optimized away?
608 bool value_never_loaded(PhaseTransform *phase) const;
609 };
610
611 //------------------------------StoreBNode-------------------------------------
612 // Store byte to memory
613 class StoreBNode : public StoreNode {
614 public:
615 StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
616 : StoreNode(c, mem, adr, at, val, mo) {}
617 virtual uint Opcode() const;
618 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
619 virtual BasicType memory_type() const { return T_BYTE; }
620 };
621
622 //------------------------------StoreCNode-------------------------------------
623 // Store char/short to memory
624 class StoreCNode : public StoreNode {
625 public:
626 StoreCNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
627 : StoreNode(c, mem, adr, at, val, mo) {}
628 virtual uint Opcode() const;
629 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
630 virtual BasicType memory_type() const { return T_CHAR; }
631 };
632
633 //------------------------------StoreINode-------------------------------------
634 // Store int to memory
635 class StoreINode : public StoreNode {
636 public:
637 StoreINode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
638 : StoreNode(c, mem, adr, at, val, mo) {}
639 virtual uint Opcode() const;
640 virtual BasicType memory_type() const { return T_INT; }
641 };
642
643 //------------------------------StoreLNode-------------------------------------
644 // Store long to memory
645 class StoreLNode : public StoreNode {
646 virtual uint hash() const { return StoreNode::hash() + _require_atomic_access; }
647 virtual uint cmp( const Node &n ) const {
648 return _require_atomic_access == ((StoreLNode&)n)._require_atomic_access
649 && StoreNode::cmp(n);
650 }
651 virtual uint size_of() const { return sizeof(*this); }
652 const bool _require_atomic_access; // is piecewise store forbidden?
653
654 public:
655 StoreLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo, bool require_atomic_access = false)
656 : StoreNode(c, mem, adr, at, val, mo), _require_atomic_access(require_atomic_access) {}
657 virtual uint Opcode() const;
658 virtual BasicType memory_type() const { return T_LONG; }
659 bool require_atomic_access() const { return _require_atomic_access; }
660 static StoreLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo);
661 #ifndef PRODUCT
662 virtual void dump_spec(outputStream *st) const {
663 StoreNode::dump_spec(st);
664 if (_require_atomic_access) st->print(" Atomic!");
665 }
666 #endif
667 };
668
669 //------------------------------StoreFNode-------------------------------------
670 // Store float to memory
671 class StoreFNode : public StoreNode {
672 public:
673 StoreFNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
674 : StoreNode(c, mem, adr, at, val, mo) {}
675 virtual uint Opcode() const;
676 virtual BasicType memory_type() const { return T_FLOAT; }
677 };
678
679 //------------------------------StoreDNode-------------------------------------
680 // Store double to memory
681 class StoreDNode : public StoreNode {
682 virtual uint hash() const { return StoreNode::hash() + _require_atomic_access; }
683 virtual uint cmp( const Node &n ) const {
684 return _require_atomic_access == ((StoreDNode&)n)._require_atomic_access
685 && StoreNode::cmp(n);
686 }
687 virtual uint size_of() const { return sizeof(*this); }
688 const bool _require_atomic_access; // is piecewise store forbidden?
689 public:
690 StoreDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val,
691 MemOrd mo, bool require_atomic_access = false)
692 : StoreNode(c, mem, adr, at, val, mo), _require_atomic_access(require_atomic_access) {}
693 virtual uint Opcode() const;
694 virtual BasicType memory_type() const { return T_DOUBLE; }
695 bool require_atomic_access() const { return _require_atomic_access; }
696 static StoreDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo);
697 #ifndef PRODUCT
698 virtual void dump_spec(outputStream *st) const {
699 StoreNode::dump_spec(st);
700 if (_require_atomic_access) st->print(" Atomic!");
701 }
702 #endif
703
704 };
705
706 //------------------------------StorePNode-------------------------------------
707 // Store pointer to memory
708 class StorePNode : public StoreNode {
709 public:
710 StorePNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
711 : StoreNode(c, mem, adr, at, val, mo) {}
712 virtual uint Opcode() const;
713 virtual BasicType memory_type() const { return T_ADDRESS; }
714 };
715
716 //------------------------------StoreNNode-------------------------------------
717 // Store narrow oop to memory
718 class StoreNNode : public StoreNode {
719 public:
720 StoreNNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
721 : StoreNode(c, mem, adr, at, val, mo) {}
722 virtual uint Opcode() const;
723 virtual BasicType memory_type() const { return T_NARROWOOP; }
724 };
725
726 //------------------------------StoreNKlassNode--------------------------------------
727 // Store narrow klass to memory
728 class StoreNKlassNode : public StoreNNode {
729 public:
730 StoreNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
731 : StoreNNode(c, mem, adr, at, val, mo) {}
732 virtual uint Opcode() const;
733 virtual BasicType memory_type() const { return T_NARROWKLASS; }
734 };
735
736 //------------------------------StoreCMNode-----------------------------------
737 // Store card-mark byte to memory for CM
738 // The last StoreCM before a SafePoint must be preserved and occur after its "oop" store
739 // Preceeding equivalent StoreCMs may be eliminated.
740 class StoreCMNode : public StoreNode {
741 private:
742 virtual uint hash() const { return StoreNode::hash() + _oop_alias_idx; }
743 virtual uint cmp( const Node &n ) const {
744 return _oop_alias_idx == ((StoreCMNode&)n)._oop_alias_idx
745 && StoreNode::cmp(n);
746 }
747 virtual uint size_of() const { return sizeof(*this); }
748 int _oop_alias_idx; // The alias_idx of OopStore
749
750 public:
751 StoreCMNode( Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, int oop_alias_idx ) :
752 StoreNode(c, mem, adr, at, val, oop_store, MemNode::release),
753 _oop_alias_idx(oop_alias_idx) {
754 assert(_oop_alias_idx >= Compile::AliasIdxRaw ||
755 _oop_alias_idx == Compile::AliasIdxBot && Compile::current()->AliasLevel() == 0,
756 "bad oop alias idx");
757 }
758 virtual uint Opcode() const;
759 virtual Node* Identity(PhaseGVN* phase);
760 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
761 virtual const Type* Value(PhaseGVN* phase) const;
762 virtual BasicType memory_type() const { return T_VOID; } // unspecific
763 int oop_alias_idx() const { return _oop_alias_idx; }
764 };
765
766 //------------------------------LoadPLockedNode---------------------------------
767 // Load-locked a pointer from memory (either object or array).
768 // On Sparc & Intel this is implemented as a normal pointer load.
769 // On PowerPC and friends it's a real load-locked.
770 class LoadPLockedNode : public LoadPNode {
771 public:
772 LoadPLockedNode(Node *c, Node *mem, Node *adr, MemOrd mo)
773 : LoadPNode(c, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, mo) {}
774 virtual uint Opcode() const;
775 virtual uint store_Opcode() const { return Op_StorePConditional; }
776 virtual bool depends_only_on_test() const { return true; }
777 };
778
779 //------------------------------SCMemProjNode---------------------------------------
780 // This class defines a projection of the memory state of a store conditional node.
781 // These nodes return a value, but also update memory.
782 class SCMemProjNode : public ProjNode {
783 public:
784 enum {SCMEMPROJCON = (uint)-2};
785 SCMemProjNode( Node *src) : ProjNode( src, SCMEMPROJCON) { }
786 virtual uint Opcode() const;
787 virtual bool is_CFG() const { return false; }
788 virtual const Type *bottom_type() const {return Type::MEMORY;}
789 virtual const TypePtr *adr_type() const {
790 Node* ctrl = in(0);
791 if (ctrl == NULL) return NULL; // node is dead
792 return ctrl->in(MemNode::Memory)->adr_type();
793 }
794 virtual uint ideal_reg() const { return 0;} // memory projections don't have a register
795 virtual const Type* Value(PhaseGVN* phase) const;
796 #ifndef PRODUCT
797 virtual void dump_spec(outputStream *st) const {};
798 #endif
799 };
800
801 //------------------------------LoadStoreNode---------------------------
802 // Note: is_Mem() method returns 'true' for this class.
803 class LoadStoreNode : public Node {
804 private:
805 const Type* const _type; // What kind of value is loaded?
806 const TypePtr* _adr_type; // What kind of memory is being addressed?
814 virtual uint ideal_reg() const;
815 virtual const class TypePtr *adr_type() const { return _adr_type; } // returns bottom_type of address
816
817 bool result_not_used() const;
818 };
819
820 class LoadStoreConditionalNode : public LoadStoreNode {
821 public:
822 enum {
823 ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
824 };
825 LoadStoreConditionalNode(Node *c, Node *mem, Node *adr, Node *val, Node *ex);
826 };
827
828 //------------------------------StorePConditionalNode---------------------------
829 // Conditionally store pointer to memory, if no change since prior
830 // load-locked. Sets flags for success or failure of the store.
831 class StorePConditionalNode : public LoadStoreConditionalNode {
832 public:
833 StorePConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
834 virtual uint Opcode() const;
835 // Produces flags
836 virtual uint ideal_reg() const { return Op_RegFlags; }
837 };
838
839 //------------------------------StoreIConditionalNode---------------------------
840 // Conditionally store int to memory, if no change since prior
841 // load-locked. Sets flags for success or failure of the store.
842 class StoreIConditionalNode : public LoadStoreConditionalNode {
843 public:
844 StoreIConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ii ) : LoadStoreConditionalNode(c, mem, adr, val, ii) { }
845 virtual uint Opcode() const;
846 // Produces flags
847 virtual uint ideal_reg() const { return Op_RegFlags; }
848 };
849
850 //------------------------------StoreLConditionalNode---------------------------
851 // Conditionally store long to memory, if no change since prior
852 // load-locked. Sets flags for success or failure of the store.
853 class StoreLConditionalNode : public LoadStoreConditionalNode {
854 public:
855 StoreLConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
856 virtual uint Opcode() const;
857 // Produces flags
858 virtual uint ideal_reg() const { return Op_RegFlags; }
859 };
860
861 class CompareAndSwapNode : public LoadStoreConditionalNode {
862 private:
863 const MemNode::MemOrd _mem_ord;
864 public:
865 CompareAndSwapNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : LoadStoreConditionalNode(c, mem, adr, val, ex), _mem_ord(mem_ord) {}
866 MemNode::MemOrd order() const {
867 return _mem_ord;
868 }
869 };
870
871 class CompareAndExchangeNode : public LoadStoreNode {
872 private:
873 const MemNode::MemOrd _mem_ord;
874 public:
875 enum {
876 ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
877 };
878 CompareAndExchangeNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord, const TypePtr* at, const Type* t) :
879 LoadStoreNode(c, mem, adr, val, at, t, 5), _mem_ord(mem_ord) {
880 init_req(ExpectedIn, ex );
881 }
882
883 MemNode::MemOrd order() const {
884 return _mem_ord;
885 }
886 };
887
888 //------------------------------CompareAndSwapBNode---------------------------
889 class CompareAndSwapBNode : public CompareAndSwapNode {
890 public:
891 CompareAndSwapBNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
892 virtual uint Opcode() const;
893 };
894
895 //------------------------------CompareAndSwapSNode---------------------------
896 class CompareAndSwapSNode : public CompareAndSwapNode {
897 public:
898 CompareAndSwapSNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
899 virtual uint Opcode() const;
900 };
901
902 //------------------------------CompareAndSwapINode---------------------------
903 class CompareAndSwapINode : public CompareAndSwapNode {
904 public:
905 CompareAndSwapINode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
906 virtual uint Opcode() const;
907 };
908
909 //------------------------------CompareAndSwapLNode---------------------------
910 class CompareAndSwapLNode : public CompareAndSwapNode {
911 public:
912 CompareAndSwapLNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
913 virtual uint Opcode() const;
914 };
915
916 //------------------------------CompareAndSwapPNode---------------------------
917 class CompareAndSwapPNode : public CompareAndSwapNode {
918 public:
919 CompareAndSwapPNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
920 virtual uint Opcode() const;
921 };
922
923 //------------------------------CompareAndSwapNNode---------------------------
924 class CompareAndSwapNNode : public CompareAndSwapNode {
925 public:
926 CompareAndSwapNNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
927 virtual uint Opcode() const;
928 };
929
930 //------------------------------WeakCompareAndSwapBNode---------------------------
931 class WeakCompareAndSwapBNode : public CompareAndSwapNode {
932 public:
933 WeakCompareAndSwapBNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
934 virtual uint Opcode() const;
935 };
936
937 //------------------------------WeakCompareAndSwapSNode---------------------------
938 class WeakCompareAndSwapSNode : public CompareAndSwapNode {
939 public:
940 WeakCompareAndSwapSNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
941 virtual uint Opcode() const;
942 };
943
944 //------------------------------WeakCompareAndSwapINode---------------------------
945 class WeakCompareAndSwapINode : public CompareAndSwapNode {
946 public:
947 WeakCompareAndSwapINode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
948 virtual uint Opcode() const;
949 };
950
951 //------------------------------WeakCompareAndSwapLNode---------------------------
952 class WeakCompareAndSwapLNode : public CompareAndSwapNode {
953 public:
954 WeakCompareAndSwapLNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
955 virtual uint Opcode() const;
956 };
957
958 //------------------------------WeakCompareAndSwapPNode---------------------------
959 class WeakCompareAndSwapPNode : public CompareAndSwapNode {
960 public:
961 WeakCompareAndSwapPNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
962 virtual uint Opcode() const;
963 };
964
965 //------------------------------WeakCompareAndSwapNNode---------------------------
966 class WeakCompareAndSwapNNode : public CompareAndSwapNode {
967 public:
968 WeakCompareAndSwapNNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapNode(c, mem, adr, val, ex, mem_ord) { }
969 virtual uint Opcode() const;
970 };
971
972 //------------------------------CompareAndExchangeBNode---------------------------
973 class CompareAndExchangeBNode : public CompareAndExchangeNode {
974 public:
975 CompareAndExchangeBNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeInt::BYTE) { }
976 virtual uint Opcode() const;
977 };
978
979
980 //------------------------------CompareAndExchangeSNode---------------------------
981 class CompareAndExchangeSNode : public CompareAndExchangeNode {
982 public:
983 CompareAndExchangeSNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeInt::SHORT) { }
984 virtual uint Opcode() const;
985 };
986
987 //------------------------------CompareAndExchangeLNode---------------------------
988 class CompareAndExchangeLNode : public CompareAndExchangeNode {
989 public:
990 CompareAndExchangeLNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeLong::LONG) { }
991 virtual uint Opcode() const;
992 };
993
994
995 //------------------------------CompareAndExchangeINode---------------------------
996 class CompareAndExchangeINode : public CompareAndExchangeNode {
997 public:
998 CompareAndExchangeINode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, TypeInt::INT) { }
999 virtual uint Opcode() const;
1000 };
1001
1002
1003 //------------------------------CompareAndExchangePNode---------------------------
1004 class CompareAndExchangePNode : public CompareAndExchangeNode {
1005 public:
1006 CompareAndExchangePNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, const Type* t, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, t) { }
1007 virtual uint Opcode() const;
1008 };
1009
1010 //------------------------------CompareAndExchangeNNode---------------------------
1011 class CompareAndExchangeNNode : public CompareAndExchangeNode {
1012 public:
1013 CompareAndExchangeNNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, const Type* t, MemNode::MemOrd mem_ord) : CompareAndExchangeNode(c, mem, adr, val, ex, mem_ord, at, t) { }
1014 virtual uint Opcode() const;
1015 };
1016
1017 //------------------------------GetAndAddBNode---------------------------
1018 class GetAndAddBNode : public LoadStoreNode {
1019 public:
1020 GetAndAddBNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::BYTE, 4) { }
1021 virtual uint Opcode() const;
1022 };
1023
1024 //------------------------------GetAndAddSNode---------------------------
1025 class GetAndAddSNode : public LoadStoreNode {
1026 public:
1027 GetAndAddSNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::SHORT, 4) { }
1028 virtual uint Opcode() const;
1029 };
1030
1031 //------------------------------GetAndAddINode---------------------------
1032 class GetAndAddINode : public LoadStoreNode {
1033 public:
1034 GetAndAddINode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::INT, 4) { }
1035 virtual uint Opcode() const;
1036 };
1037
1038 //------------------------------GetAndAddLNode---------------------------
1039 class GetAndAddLNode : public LoadStoreNode {
1040 public:
1041 GetAndAddLNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeLong::LONG, 4) { }
1042 virtual uint Opcode() const;
1043 };
1044
1045 //------------------------------GetAndSetBNode---------------------------
1046 class GetAndSetBNode : public LoadStoreNode {
1047 public:
1048 GetAndSetBNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::BYTE, 4) { }
1049 virtual uint Opcode() const;
1050 };
1051
1052 //------------------------------GetAndSetSNode---------------------------
1053 class GetAndSetSNode : public LoadStoreNode {
1054 public:
1055 GetAndSetSNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::SHORT, 4) { }
1056 virtual uint Opcode() const;
1057 };
1058
1059 //------------------------------GetAndSetINode---------------------------
1060 class GetAndSetINode : public LoadStoreNode {
1061 public:
1062 GetAndSetINode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::INT, 4) { }
1063 virtual uint Opcode() const;
1064 };
1065
1066 //------------------------------GetAndSetLNode---------------------------
1067 class GetAndSetLNode : public LoadStoreNode {
1068 public:
1069 GetAndSetLNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeLong::LONG, 4) { }
1070 virtual uint Opcode() const;
1071 };
1072
1073 //------------------------------GetAndSetPNode---------------------------
1074 class GetAndSetPNode : public LoadStoreNode {
1075 public:
1076 GetAndSetPNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1077 virtual uint Opcode() const;
1078 };
1079
1080 //------------------------------GetAndSetNNode---------------------------
1081 class GetAndSetNNode : public LoadStoreNode {
1082 public:
1083 GetAndSetNNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1084 virtual uint Opcode() const;
1085 };
1086
1087 //------------------------------ClearArray-------------------------------------
1088 class ClearArrayNode: public Node {
1089 private:
1090 bool _is_large;
1091 public:
1092 ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base, bool is_large)
1093 : Node(ctrl,arymem,word_cnt,base), _is_large(is_large) {
1094 init_class_id(Class_ClearArray);
1095 }
1096 virtual uint Opcode() const;
1097 virtual const Type *bottom_type() const { return Type::MEMORY; }
1098 // ClearArray modifies array elements, and so affects only the
1099 // array memory addressed by the bottom_type of its base address.
1100 virtual const class TypePtr *adr_type() const;
1101 virtual Node* Identity(PhaseGVN* phase);
1102 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1103 virtual uint match_edge(uint idx) const;
1104 bool is_large() const { return _is_large; }
1105
1106 // Clear the given area of an object or array.
1107 // The start offset must always be aligned mod BytesPerInt.
1108 // The end offset must always be aligned mod BytesPerLong.
1109 // Return the new memory.
1110 static Node* clear_memory(Node* control, Node* mem, Node* dest,
1111 intptr_t start_offset,
1112 intptr_t end_offset,
1113 PhaseGVN* phase);
1114 static Node* clear_memory(Node* control, Node* mem, Node* dest,
1115 intptr_t start_offset,
1116 Node* end_offset,
1129 // Model. Monitor-enter and volatile-load act as Aquires: no following ref
1130 // can be moved to before them. We insert a MemBar-Acquire after a FastLock or
1131 // volatile-load. Monitor-exit and volatile-store act as Release: no
1132 // preceding ref can be moved to after them. We insert a MemBar-Release
1133 // before a FastUnlock or volatile-store. All volatiles need to be
1134 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
1135 // separate it from any following volatile-load.
1136 class MemBarNode: public MultiNode {
1137 virtual uint hash() const ; // { return NO_HASH; }
1138 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
1139
1140 virtual uint size_of() const { return sizeof(*this); }
1141 // Memory type this node is serializing. Usually either rawptr or bottom.
1142 const TypePtr* _adr_type;
1143
1144 public:
1145 enum {
1146 Precedent = TypeFunc::Parms // optional edge to force precedence
1147 };
1148 MemBarNode(Compile* C, int alias_idx, Node* precedent);
1149 virtual uint Opcode() const = 0;
1150 virtual const class TypePtr *adr_type() const { return _adr_type; }
1151 virtual const Type* Value(PhaseGVN* phase) const;
1152 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1153 virtual uint match_edge(uint idx) const { return 0; }
1154 virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
1155 virtual Node *match( const ProjNode *proj, const Matcher *m );
1156 // Factory method. Builds a wide or narrow membar.
1157 // Optional 'precedent' becomes an extra edge if not null.
1158 static MemBarNode* make(Compile* C, int opcode,
1159 int alias_idx = Compile::AliasIdxBot,
1160 Node* precedent = NULL);
1161 };
1162
1163 // "Acquire" - no following ref can move before (but earlier refs can
1164 // follow, like an early Load stalled in cache). Requires multi-cpu
1165 // visibility. Inserted after a volatile load.
1166 class MemBarAcquireNode: public MemBarNode {
1167 public:
1168 MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent)
1169 : MemBarNode(C, alias_idx, precedent) {}
1170 virtual uint Opcode() const;
1171 };
1172
1173 // "Acquire" - no following ref can move before (but earlier refs can
1174 // follow, like an early Load stalled in cache). Requires multi-cpu
1175 // visibility. Inserted independ of any load, as required
1176 // for intrinsic Unsafe.loadFence().
1177 class LoadFenceNode: public MemBarNode {
1178 public:
1179 LoadFenceNode(Compile* C, int alias_idx, Node* precedent)
1180 : MemBarNode(C, alias_idx, precedent) {}
1181 virtual uint Opcode() const;
1182 };
1183
1184 // "Release" - no earlier ref can move after (but later refs can move
1185 // up, like a speculative pipelined cache-hitting Load). Requires
1186 // multi-cpu visibility. Inserted before a volatile store.
1187 class MemBarReleaseNode: public MemBarNode {
1188 public:
1189 MemBarReleaseNode(Compile* C, int alias_idx, Node* precedent)
1190 : MemBarNode(C, alias_idx, precedent) {}
1191 virtual uint Opcode() const;
1192 };
1193
1194 // "Release" - no earlier ref can move after (but later refs can move
1195 // up, like a speculative pipelined cache-hitting Load). Requires
1196 // multi-cpu visibility. Inserted independent of any store, as required
1197 // for intrinsic Unsafe.storeFence().
1198 class StoreFenceNode: public MemBarNode {
1199 public:
1200 StoreFenceNode(Compile* C, int alias_idx, Node* precedent)
1201 : MemBarNode(C, alias_idx, precedent) {}
1202 virtual uint Opcode() const;
1203 };
1204
1205 // "Acquire" - no following ref can move before (but earlier refs can
1206 // follow, like an early Load stalled in cache). Requires multi-cpu
1207 // visibility. Inserted after a FastLock.
1208 class MemBarAcquireLockNode: public MemBarNode {
1209 public:
1210 MemBarAcquireLockNode(Compile* C, int alias_idx, Node* precedent)
1211 : MemBarNode(C, alias_idx, precedent) {}
1212 virtual uint Opcode() const;
1213 };
1214
1215 // "Release" - no earlier ref can move after (but later refs can move
1216 // up, like a speculative pipelined cache-hitting Load). Requires
1217 // multi-cpu visibility. Inserted before a FastUnLock.
1218 class MemBarReleaseLockNode: public MemBarNode {
1219 public:
1220 MemBarReleaseLockNode(Compile* C, int alias_idx, Node* precedent)
1221 : MemBarNode(C, alias_idx, precedent) {}
1222 virtual uint Opcode() const;
1223 };
1224
1225 class MemBarStoreStoreNode: public MemBarNode {
1226 public:
1227 MemBarStoreStoreNode(Compile* C, int alias_idx, Node* precedent)
1228 : MemBarNode(C, alias_idx, precedent) {
1229 init_class_id(Class_MemBarStoreStore);
1230 }
1231 virtual uint Opcode() const;
1232 };
1233
1234 // Ordering between a volatile store and a following volatile load.
1235 // Requires multi-CPU visibility?
1236 class MemBarVolatileNode: public MemBarNode {
1237 public:
1238 MemBarVolatileNode(Compile* C, int alias_idx, Node* precedent)
1239 : MemBarNode(C, alias_idx, precedent) {}
1240 virtual uint Opcode() const;
1241 };
1242
1243 // Ordering within the same CPU. Used to order unsafe memory references
1244 // inside the compiler when we lack alias info. Not needed "outside" the
1245 // compiler because the CPU does all the ordering for us.
1246 class MemBarCPUOrderNode: public MemBarNode {
1247 public:
1248 MemBarCPUOrderNode(Compile* C, int alias_idx, Node* precedent)
1249 : MemBarNode(C, alias_idx, precedent) {}
1250 virtual uint Opcode() const;
1251 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
1252 };
1253
1254 class OnSpinWaitNode: public MemBarNode {
1255 public:
1256 OnSpinWaitNode(Compile* C, int alias_idx, Node* precedent)
1257 : MemBarNode(C, alias_idx, precedent) {}
1258 virtual uint Opcode() const;
1259 };
1260
1261 // Isolation of object setup after an AllocateNode and before next safepoint.
1262 // (See comment in memnode.cpp near InitializeNode::InitializeNode for semantics.)
1263 class InitializeNode: public MemBarNode {
1264 friend class AllocateNode;
1265
1266 enum {
1267 Incomplete = 0,
1268 Complete = 1,
1269 WithArraycopy = 2
1270 };
1271 int _is_complete;
1272
1273 bool _does_not_escape;
1274
1275 public:
1276 enum {
1277 Control = TypeFunc::Control,
1278 Memory = TypeFunc::Memory, // MergeMem for states affected by this op
1279 RawAddress = TypeFunc::Parms+0, // the newly-allocated raw address
1280 RawStores = TypeFunc::Parms+1 // zero or more stores (or TOP)
1281 };
1282
1283 InitializeNode(Compile* C, int adr_type, Node* rawoop);
1284 virtual uint Opcode() const;
1285 virtual uint size_of() const { return sizeof(*this); }
1286 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
1287 virtual const RegMask &in_RegMask(uint) const; // mask for RawAddress
1288
1289 // Manage incoming memory edges via a MergeMem on in(Memory):
1290 Node* memory(uint alias_idx);
1291
1292 // The raw memory edge coming directly from the Allocation.
1293 // The contents of this memory are *always* all-zero-bits.
1294 Node* zero_memory() { return memory(Compile::AliasIdxRaw); }
1295
1296 // Return the corresponding allocation for this initialization (or null if none).
1297 // (Note: Both InitializeNode::allocation and AllocateNode::initialization
1298 // are defined in graphKit.cpp, which sets up the bidirectional relation.)
1299 AllocateNode* allocation();
1300
1301 // Anything other than zeroing in this init?
1302 bool is_non_zero();
1303
1304 // An InitializeNode must completed before macro expansion is done.
1353 void coalesce_subword_stores(intptr_t header_size, Node* size_in_bytes,
1354 PhaseGVN* phase);
1355
1356 intptr_t find_next_fullword_store(uint i, PhaseGVN* phase);
1357 };
1358
1359 //------------------------------MergeMem---------------------------------------
1360 // (See comment in memnode.cpp near MergeMemNode::MergeMemNode for semantics.)
1361 class MergeMemNode: public Node {
1362 virtual uint hash() const ; // { return NO_HASH; }
1363 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
1364 friend class MergeMemStream;
1365 MergeMemNode(Node* def); // clients use MergeMemNode::make
1366
1367 public:
1368 // If the input is a whole memory state, clone it with all its slices intact.
1369 // Otherwise, make a new memory state with just that base memory input.
1370 // In either case, the result is a newly created MergeMem.
1371 static MergeMemNode* make(Node* base_memory);
1372
1373 virtual uint Opcode() const;
1374 virtual Node* Identity(PhaseGVN* phase);
1375 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1376 virtual uint ideal_reg() const { return NotAMachineReg; }
1377 virtual uint match_edge(uint idx) const { return 0; }
1378 virtual const RegMask &out_RegMask() const;
1379 virtual const Type *bottom_type() const { return Type::MEMORY; }
1380 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
1381 // sparse accessors
1382 // Fetch the previously stored "set_memory_at", or else the base memory.
1383 // (Caller should clone it if it is a phi-nest.)
1384 Node* memory_at(uint alias_idx) const;
1385 // set the memory, regardless of its previous value
1386 void set_memory_at(uint alias_idx, Node* n);
1387 // the "base" is the memory that provides the non-finite support
1388 Node* base_memory() const { return in(Compile::AliasIdxBot); }
1389 // warning: setting the base can implicitly set any of the other slices too
1390 void set_base_memory(Node* def);
1391 // sentinel value which denotes a copy of the base memory:
1392 Node* empty_memory() const { return in(Compile::AliasIdxTop); }
1393 static Node* make_empty_memory(); // where the sentinel comes from
1581 bool next_non_empty(bool have_mm2) {
1582 while (next(have_mm2)) {
1583 if (!is_empty()) {
1584 // make sure _mem2 is filled in sensibly
1585 if (have_mm2 && _mem2->is_top()) _mem2 = _mm2->base_memory();
1586 return true;
1587 } else if (have_mm2 && !is_empty2()) {
1588 return true; // is_empty() == true
1589 }
1590 }
1591 return false;
1592 }
1593 };
1594
1595 //------------------------------Prefetch---------------------------------------
1596
1597 // Allocation prefetch which may fault, TLAB size have to be adjusted.
1598 class PrefetchAllocationNode : public Node {
1599 public:
1600 PrefetchAllocationNode(Node *mem, Node *adr) : Node(0,mem,adr) {}
1601 virtual uint Opcode() const;
1602 virtual uint ideal_reg() const { return NotAMachineReg; }
1603 virtual uint match_edge(uint idx) const { return idx==2; }
1604 virtual const Type *bottom_type() const { return ( AllocatePrefetchStyle == 3 ) ? Type::MEMORY : Type::ABIO; }
1605 };
1606
1607 #endif // SHARE_VM_OPTO_MEMNODE_HPP
|