< prev index next >

src/share/vm/gc/shared/collectedHeap.hpp

Print this page
rev 10742 : Make fields used in lock-free algorithms volatile


 333   }
 334 
 335   // Return the address "addr" aligned by "alignment_in_bytes" if such
 336   // an address is below "end".  Return NULL otherwise.
 337   inline static HeapWord* align_allocation_or_fail(HeapWord* addr,
 338                                                    HeapWord* end,
 339                                                    unsigned short alignment_in_bytes);
 340 
 341   // Some heaps may offer a contiguous region for shared non-blocking
 342   // allocation, via inlined code (by exporting the address of the top and
 343   // end fields defining the extent of the contiguous allocation region.)
 344 
 345   // This function returns "true" iff the heap supports this kind of
 346   // allocation.  (Default is "no".)
 347   virtual bool supports_inline_contig_alloc() const {
 348     return false;
 349   }
 350   // These functions return the addresses of the fields that define the
 351   // boundaries of the contiguous allocation area.  (These fields should be
 352   // physically near to one another.)
 353   virtual HeapWord** top_addr() const {
 354     guarantee(false, "inline contiguous allocation not supported");
 355     return NULL;
 356   }
 357   virtual HeapWord** end_addr() const {
 358     guarantee(false, "inline contiguous allocation not supported");
 359     return NULL;
 360   }
 361 
 362   // Some heaps may be in an unparseable state at certain times between
 363   // collections. This may be necessary for efficient implementation of
 364   // certain allocation-related activities. Calling this function before
 365   // attempting to parse a heap ensures that the heap is in a parsable
 366   // state (provided other concurrent activity does not introduce
 367   // unparsability). It is normally expected, therefore, that this
 368   // method is invoked with the world stopped.
 369   // NOTE: if you override this method, make sure you call
 370   // super::ensure_parsability so that the non-generational
 371   // part of the work gets done. See implementation of
 372   // CollectedHeap::ensure_parsability and, for instance,
 373   // that of GenCollectedHeap::ensure_parsability().




 333   }
 334 
 335   // Return the address "addr" aligned by "alignment_in_bytes" if such
 336   // an address is below "end".  Return NULL otherwise.
 337   inline static HeapWord* align_allocation_or_fail(HeapWord* addr,
 338                                                    HeapWord* end,
 339                                                    unsigned short alignment_in_bytes);
 340 
 341   // Some heaps may offer a contiguous region for shared non-blocking
 342   // allocation, via inlined code (by exporting the address of the top and
 343   // end fields defining the extent of the contiguous allocation region.)
 344 
 345   // This function returns "true" iff the heap supports this kind of
 346   // allocation.  (Default is "no".)
 347   virtual bool supports_inline_contig_alloc() const {
 348     return false;
 349   }
 350   // These functions return the addresses of the fields that define the
 351   // boundaries of the contiguous allocation area.  (These fields should be
 352   // physically near to one another.)
 353   virtual HeapWord* volatile* top_addr() const {
 354     guarantee(false, "inline contiguous allocation not supported");
 355     return NULL;
 356   }
 357   virtual HeapWord** end_addr() const {
 358     guarantee(false, "inline contiguous allocation not supported");
 359     return NULL;
 360   }
 361 
 362   // Some heaps may be in an unparseable state at certain times between
 363   // collections. This may be necessary for efficient implementation of
 364   // certain allocation-related activities. Calling this function before
 365   // attempting to parse a heap ensures that the heap is in a parsable
 366   // state (provided other concurrent activity does not introduce
 367   // unparsability). It is normally expected, therefore, that this
 368   // method is invoked with the world stopped.
 369   // NOTE: if you override this method, make sure you call
 370   // super::ensure_parsability so that the non-generational
 371   // part of the work gets done. See implementation of
 372   // CollectedHeap::ensure_parsability and, for instance,
 373   // that of GenCollectedHeap::ensure_parsability().


< prev index next >