< prev index next >

src/share/vm/memory/allocation.cpp

Print this page




 732 
 733 julong Arena::_bytes_allocated = 0;
 734 
 735 void Arena::inc_bytes_allocated(size_t x) { inc_stat_counter(&_bytes_allocated, x); }
 736 
 737 AllocStats::AllocStats() {
 738   start_mallocs      = os::num_mallocs;
 739   start_frees        = os::num_frees;
 740   start_malloc_bytes = os::alloc_bytes;
 741   start_mfree_bytes  = os::free_bytes;
 742   start_res_bytes    = Arena::_bytes_allocated;
 743 }
 744 
 745 julong  AllocStats::num_mallocs() { return os::num_mallocs - start_mallocs; }
 746 julong  AllocStats::alloc_bytes() { return os::alloc_bytes - start_malloc_bytes; }
 747 julong  AllocStats::num_frees()   { return os::num_frees - start_frees; }
 748 julong  AllocStats::free_bytes()  { return os::free_bytes - start_mfree_bytes; }
 749 julong  AllocStats::resource_bytes() { return Arena::_bytes_allocated - start_res_bytes; }
 750 void    AllocStats::print() {
 751   tty->print_cr(UINT64_FORMAT " mallocs (" UINT64_FORMAT "MB), "
 752                 UINT64_FORMAT" frees (" UINT64_FORMAT "MB), " UINT64_FORMAT "MB resrc",
 753                 num_mallocs(), alloc_bytes()/M, num_frees(), free_bytes()/M, resource_bytes()/M);
 754 }
 755 
 756 
 757 // debugging code
 758 inline void Arena::free_all(char** start, char** end) {
 759   for (char** p = start; p < end; p++) if (*p) os::free(*p);
 760 }
 761 
 762 void Arena::free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2) {
 763   assert(UseMallocOnly, "should not call");
 764   // free all objects malloced since resource mark was created; resource area
 765   // contains their addresses
 766   if (chunk->next()) {
 767     // this chunk is full, and some others too
 768     for (Chunk* c = chunk->next(); c != NULL; c = c->next()) {
 769       char* top = c->top();
 770       if (c->next() == NULL) {
 771         top = hwm2;     // last junk is only used up to hwm2
 772         assert(c->contains(hwm2), "bad hwm2");




 732 
 733 julong Arena::_bytes_allocated = 0;
 734 
 735 void Arena::inc_bytes_allocated(size_t x) { inc_stat_counter(&_bytes_allocated, x); }
 736 
 737 AllocStats::AllocStats() {
 738   start_mallocs      = os::num_mallocs;
 739   start_frees        = os::num_frees;
 740   start_malloc_bytes = os::alloc_bytes;
 741   start_mfree_bytes  = os::free_bytes;
 742   start_res_bytes    = Arena::_bytes_allocated;
 743 }
 744 
 745 julong  AllocStats::num_mallocs() { return os::num_mallocs - start_mallocs; }
 746 julong  AllocStats::alloc_bytes() { return os::alloc_bytes - start_malloc_bytes; }
 747 julong  AllocStats::num_frees()   { return os::num_frees - start_frees; }
 748 julong  AllocStats::free_bytes()  { return os::free_bytes - start_mfree_bytes; }
 749 julong  AllocStats::resource_bytes() { return Arena::_bytes_allocated - start_res_bytes; }
 750 void    AllocStats::print() {
 751   tty->print_cr(UINT64_FORMAT " mallocs (" UINT64_FORMAT "MB), "
 752                 UINT64_FORMAT " frees (" UINT64_FORMAT "MB), " UINT64_FORMAT "MB resrc",
 753                 num_mallocs(), alloc_bytes()/M, num_frees(), free_bytes()/M, resource_bytes()/M);
 754 }
 755 
 756 
 757 // debugging code
 758 inline void Arena::free_all(char** start, char** end) {
 759   for (char** p = start; p < end; p++) if (*p) os::free(*p);
 760 }
 761 
 762 void Arena::free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2) {
 763   assert(UseMallocOnly, "should not call");
 764   // free all objects malloced since resource mark was created; resource area
 765   // contains their addresses
 766   if (chunk->next()) {
 767     // this chunk is full, and some others too
 768     for (Chunk* c = chunk->next(); c != NULL; c = c->next()) {
 769       char* top = c->top();
 770       if (c->next() == NULL) {
 771         top = hwm2;     // last junk is only used up to hwm2
 772         assert(c->contains(hwm2), "bad hwm2");


< prev index next >