src/share/vm/memory/heap.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/memory

src/share/vm/memory/heap.cpp

Print this page




 258 void* CodeHeap::find_start(void* p) const {
 259   if (!contains(p)) {
 260     return NULL;
 261   }
 262   size_t seg_idx = segment_for(p);
 263   address seg_map = (address)_segmap.low();
 264   if (is_segment_unused(seg_map[seg_idx])) {
 265     return NULL;
 266   }
 267   while (seg_map[seg_idx] > 0) {
 268     seg_idx -= (int)seg_map[seg_idx];
 269   }
 270 
 271   HeapBlock* h = block_at(seg_idx);
 272   if (h->free()) {
 273     return NULL;
 274   }
 275   return h->allocated_space();
 276 }
 277 







 278 
 279 size_t CodeHeap::alignment_unit() const {
 280   // this will be a power of two
 281   return _segment_size;
 282 }
 283 
 284 
 285 size_t CodeHeap::alignment_offset() const {
 286   // The lowest address in any allocated block will be
 287   // equal to alignment_offset (mod alignment_unit).
 288   return sizeof(HeapBlock) & (_segment_size - 1);
 289 }
 290 
 291 // Returns the current block if available and used.
 292 // If not, it returns the subsequent block (if available), NULL otherwise.
 293 // Free blocks are merged, therefore there is at most one free block
 294 // between two used ones. As a result, the subsequent block (if available) is
 295 // guaranteed to be used.
 296 void* CodeHeap::next_used(HeapBlock* b) const {
 297   if (b != NULL && b->free()) b = next_block(b);




 258 void* CodeHeap::find_start(void* p) const {
 259   if (!contains(p)) {
 260     return NULL;
 261   }
 262   size_t seg_idx = segment_for(p);
 263   address seg_map = (address)_segmap.low();
 264   if (is_segment_unused(seg_map[seg_idx])) {
 265     return NULL;
 266   }
 267   while (seg_map[seg_idx] > 0) {
 268     seg_idx -= (int)seg_map[seg_idx];
 269   }
 270 
 271   HeapBlock* h = block_at(seg_idx);
 272   if (h->free()) {
 273     return NULL;
 274   }
 275   return h->allocated_space();
 276 }
 277 
 278 CodeBlob* CodeHeap::find_blob_unsafe(void* start) const {
 279   CodeBlob* result = (CodeBlob*)CodeHeap::find_start(start);
 280   if (result != NULL && result->blob_contains((address)start)) {
 281     return result;
 282   }
 283   return NULL;
 284 }
 285 
 286 size_t CodeHeap::alignment_unit() const {
 287   // this will be a power of two
 288   return _segment_size;
 289 }
 290 
 291 
 292 size_t CodeHeap::alignment_offset() const {
 293   // The lowest address in any allocated block will be
 294   // equal to alignment_offset (mod alignment_unit).
 295   return sizeof(HeapBlock) & (_segment_size - 1);
 296 }
 297 
 298 // Returns the current block if available and used.
 299 // If not, it returns the subsequent block (if available), NULL otherwise.
 300 // Free blocks are merged, therefore there is at most one free block
 301 // between two used ones. As a result, the subsequent block (if available) is
 302 // guaranteed to be used.
 303 void* CodeHeap::next_used(HeapBlock* b) const {
 304   if (b != NULL && b->free()) b = next_block(b);


src/share/vm/memory/heap.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File