172 return (nmethod*)cb;
173 }
174
175 static size_t maxCodeCacheUsed = 0;
176
177 CodeBlob* CodeCache::allocate(int size, bool is_critical) {
178 // Do not seize the CodeCache lock here--if the caller has not
179 // already done so, we are going to lose bigtime, since the code
180 // cache will contain a garbage CodeBlob until the caller can
181 // run the constructor for the CodeBlob subclass he is busy
182 // instantiating.
183 guarantee(size >= 0, "allocation request must be reasonable");
184 assert_locked_or_safepoint(CodeCache_lock);
185 CodeBlob* cb = NULL;
186 _number_of_blobs++;
187 while (true) {
188 cb = (CodeBlob*)_heap->allocate(size, is_critical);
189 if (cb != NULL) break;
190 if (!_heap->expand_by(CodeCacheExpansionSize)) {
191 // Expansion failed
192 return NULL;
193 }
194 if (PrintCodeCacheExtension) {
195 ResourceMark rm;
196 tty->print_cr("code cache extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (" SSIZE_FORMAT " bytes)",
197 (intptr_t)_heap->low_boundary(), (intptr_t)_heap->high(),
198 (address)_heap->high() - (address)_heap->low_boundary());
199 }
200 }
201 maxCodeCacheUsed = MAX2(maxCodeCacheUsed, ((address)_heap->high_boundary() -
202 (address)_heap->low_boundary()) - unallocated_capacity());
203 verify_if_often();
204 print_trace("allocation", cb, size);
205 return cb;
206 }
207
208 void CodeCache::free(CodeBlob* cb) {
209 assert_locked_or_safepoint(CodeCache_lock);
210 verify_if_often();
211
741 void CodeCache::make_marked_nmethods_not_entrant() {
742 assert_locked_or_safepoint(CodeCache_lock);
743 FOR_ALL_ALIVE_NMETHODS(nm) {
744 if (nm->is_marked_for_deoptimization()) {
745 nm->make_not_entrant();
746 }
747 }
748 }
749
750 void CodeCache::verify() {
751 _heap->verify();
752 FOR_ALL_ALIVE_BLOBS(p) {
753 p->verify();
754 }
755 }
756
757 void CodeCache::report_codemem_full() {
758 _codemem_full_count++;
759 EventCodeCacheFull event;
760 if (event.should_commit()) {
761 event.set_startAddress((u8)low_bound());
762 event.set_commitedTopAddress((u8)high());
763 event.set_reservedTopAddress((u8)high_bound());
764 event.set_entryCount(nof_blobs());
765 event.set_methodCount(nof_nmethods());
766 event.set_adaptorCount(nof_adapters());
767 event.set_unallocatedCapacity(unallocated_capacity()/K);
768 event.set_fullCount(_codemem_full_count);
769 event.commit();
770 }
771 }
772
773 //------------------------------------------------------------------------------------------------
774 // Non-product version
775
776 #ifndef PRODUCT
777
778 void CodeCache::verify_if_often() {
779 if (VerifyCodeCacheOften) {
780 _heap->verify();
|
172 return (nmethod*)cb;
173 }
174
175 static size_t maxCodeCacheUsed = 0;
176
177 CodeBlob* CodeCache::allocate(int size, bool is_critical) {
178 // Do not seize the CodeCache lock here--if the caller has not
179 // already done so, we are going to lose bigtime, since the code
180 // cache will contain a garbage CodeBlob until the caller can
181 // run the constructor for the CodeBlob subclass he is busy
182 // instantiating.
183 guarantee(size >= 0, "allocation request must be reasonable");
184 assert_locked_or_safepoint(CodeCache_lock);
185 CodeBlob* cb = NULL;
186 _number_of_blobs++;
187 while (true) {
188 cb = (CodeBlob*)_heap->allocate(size, is_critical);
189 if (cb != NULL) break;
190 if (!_heap->expand_by(CodeCacheExpansionSize)) {
191 // Expansion failed
192 if (CodeCache_lock->owned_by_self()) {
193 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
194 report_codemem_full();
195 } else {
196 report_codemem_full();
197 }
198 return NULL;
199 }
200 if (PrintCodeCacheExtension) {
201 ResourceMark rm;
202 tty->print_cr("code cache extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (" SSIZE_FORMAT " bytes)",
203 (intptr_t)_heap->low_boundary(), (intptr_t)_heap->high(),
204 (address)_heap->high() - (address)_heap->low_boundary());
205 }
206 }
207 maxCodeCacheUsed = MAX2(maxCodeCacheUsed, ((address)_heap->high_boundary() -
208 (address)_heap->low_boundary()) - unallocated_capacity());
209 verify_if_often();
210 print_trace("allocation", cb, size);
211 return cb;
212 }
213
214 void CodeCache::free(CodeBlob* cb) {
215 assert_locked_or_safepoint(CodeCache_lock);
216 verify_if_often();
217
747 void CodeCache::make_marked_nmethods_not_entrant() {
748 assert_locked_or_safepoint(CodeCache_lock);
749 FOR_ALL_ALIVE_NMETHODS(nm) {
750 if (nm->is_marked_for_deoptimization()) {
751 nm->make_not_entrant();
752 }
753 }
754 }
755
756 void CodeCache::verify() {
757 _heap->verify();
758 FOR_ALL_ALIVE_BLOBS(p) {
759 p->verify();
760 }
761 }
762
763 void CodeCache::report_codemem_full() {
764 _codemem_full_count++;
765 EventCodeCacheFull event;
766 if (event.should_commit()) {
767 event.set_codeBlobType((u1)CodeBlobType::All);
768 event.set_startAddress((u8)low_bound());
769 event.set_commitedTopAddress((u8)high());
770 event.set_reservedTopAddress((u8)high_bound());
771 event.set_entryCount(nof_blobs());
772 event.set_methodCount(nof_nmethods());
773 event.set_adaptorCount(nof_adapters());
774 event.set_unallocatedCapacity(unallocated_capacity()/K);
775 event.set_fullCount(_codemem_full_count);
776 event.commit();
777 }
778 }
779
780 //------------------------------------------------------------------------------------------------
781 // Non-product version
782
783 #ifndef PRODUCT
784
785 void CodeCache::verify_if_often() {
786 if (VerifyCodeCacheOften) {
787 _heap->verify();
|