< prev index next >

src/share/vm/gc/shared/collectedHeap.hpp

Print this page
rev 11970 : imported patch base_volatiles


 336   }
 337 
 338   // Return the address "addr" aligned by "alignment_in_bytes" if such
 339   // an address is below "end".  Return NULL otherwise.
 340   inline static HeapWord* align_allocation_or_fail(HeapWord* addr,
 341                                                    HeapWord* end,
 342                                                    unsigned short alignment_in_bytes);
 343 
 344   // Some heaps may offer a contiguous region for shared non-blocking
 345   // allocation, via inlined code (by exporting the address of the top and
 346   // end fields defining the extent of the contiguous allocation region.)
 347 
 348   // This function returns "true" iff the heap supports this kind of
 349   // allocation.  (Default is "no".)
 350   virtual bool supports_inline_contig_alloc() const {
 351     return false;
 352   }
 353   // These functions return the addresses of the fields that define the
 354   // boundaries of the contiguous allocation area.  (These fields should be
 355   // physically near to one another.)
 356   virtual HeapWord** top_addr() const {
 357     guarantee(false, "inline contiguous allocation not supported");
 358     return NULL;
 359   }
 360   virtual HeapWord** end_addr() const {
 361     guarantee(false, "inline contiguous allocation not supported");
 362     return NULL;
 363   }
 364 
 365   // Some heaps may be in an unparseable state at certain times between
 366   // collections. This may be necessary for efficient implementation of
 367   // certain allocation-related activities. Calling this function before
 368   // attempting to parse a heap ensures that the heap is in a parsable
 369   // state (provided other concurrent activity does not introduce
 370   // unparsability). It is normally expected, therefore, that this
 371   // method is invoked with the world stopped.
 372   // NOTE: if you override this method, make sure you call
 373   // super::ensure_parsability so that the non-generational
 374   // part of the work gets done. See implementation of
 375   // CollectedHeap::ensure_parsability and, for instance,
 376   // that of GenCollectedHeap::ensure_parsability().




 336   }
 337 
 338   // Return the address "addr" aligned by "alignment_in_bytes" if such
 339   // an address is below "end".  Return NULL otherwise.
 340   inline static HeapWord* align_allocation_or_fail(HeapWord* addr,
 341                                                    HeapWord* end,
 342                                                    unsigned short alignment_in_bytes);
 343 
 344   // Some heaps may offer a contiguous region for shared non-blocking
 345   // allocation, via inlined code (by exporting the address of the top and
 346   // end fields defining the extent of the contiguous allocation region.)
 347 
 348   // This function returns "true" iff the heap supports this kind of
 349   // allocation.  (Default is "no".)
 350   virtual bool supports_inline_contig_alloc() const {
 351     return false;
 352   }
 353   // These functions return the addresses of the fields that define the
 354   // boundaries of the contiguous allocation area.  (These fields should be
 355   // physically near to one another.)
 356   virtual HeapWord* volatile* top_addr() const {
 357     guarantee(false, "inline contiguous allocation not supported");
 358     return NULL;
 359   }
 360   virtual HeapWord** end_addr() const {
 361     guarantee(false, "inline contiguous allocation not supported");
 362     return NULL;
 363   }
 364 
 365   // Some heaps may be in an unparseable state at certain times between
 366   // collections. This may be necessary for efficient implementation of
 367   // certain allocation-related activities. Calling this function before
 368   // attempting to parse a heap ensures that the heap is in a parsable
 369   // state (provided other concurrent activity does not introduce
 370   // unparsability). It is normally expected, therefore, that this
 371   // method is invoked with the world stopped.
 372   // NOTE: if you override this method, make sure you call
 373   // super::ensure_parsability so that the non-generational
 374   // part of the work gets done. See implementation of
 375   // CollectedHeap::ensure_parsability and, for instance,
 376   // that of GenCollectedHeap::ensure_parsability().


< prev index next >