172 return (nmethod*)cb;
173 }
174
175 static size_t maxCodeCacheUsed = 0;
176
177 CodeBlob* CodeCache::allocate(int size, bool is_critical) {
178 // Do not seize the CodeCache lock here--if the caller has not
179 // already done so, we are going to lose bigtime, since the code
180 // cache will contain a garbage CodeBlob until the caller can
181 // run the constructor for the CodeBlob subclass he is busy
182 // instantiating.
183 guarantee(size >= 0, "allocation request must be reasonable");
184 assert_locked_or_safepoint(CodeCache_lock);
185 CodeBlob* cb = NULL;
186 _number_of_blobs++;
187 while (true) {
188 cb = (CodeBlob*)_heap->allocate(size, is_critical);
189 if (cb != NULL) break;
190 if (!_heap->expand_by(CodeCacheExpansionSize)) {
191 // Expansion failed
192 return NULL;
193 }
194 if (PrintCodeCacheExtension) {
195 ResourceMark rm;
196 tty->print_cr("code cache extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (" SSIZE_FORMAT " bytes)",
197 (intptr_t)_heap->low_boundary(), (intptr_t)_heap->high(),
198 (address)_heap->high() - (address)_heap->low_boundary());
199 }
200 }
201 maxCodeCacheUsed = MAX2(maxCodeCacheUsed, ((address)_heap->high_boundary() -
202 (address)_heap->low_boundary()) - unallocated_capacity());
203 verify_if_often();
204 print_trace("allocation", cb, size);
205 return cb;
206 }
207
208 void CodeCache::free(CodeBlob* cb) {
209 assert_locked_or_safepoint(CodeCache_lock);
210 verify_if_often();
211
763 void CodeCache::make_marked_nmethods_not_entrant() {
764 assert_locked_or_safepoint(CodeCache_lock);
765 FOR_ALL_ALIVE_NMETHODS(nm) {
766 if (nm->is_marked_for_deoptimization()) {
767 nm->make_not_entrant();
768 }
769 }
770 }
771
772 void CodeCache::verify() {
773 _heap->verify();
774 FOR_ALL_ALIVE_BLOBS(p) {
775 p->verify();
776 }
777 }
778
779 void CodeCache::report_codemem_full() {
780 _codemem_full_count++;
781 EventCodeCacheFull event;
782 if (event.should_commit()) {
783 event.set_startAddress((u8)low_bound());
784 event.set_commitedTopAddress((u8)high());
785 event.set_reservedTopAddress((u8)high_bound());
786 event.set_entryCount(nof_blobs());
787 event.set_methodCount(nof_nmethods());
788 event.set_adaptorCount(nof_adapters());
789 event.set_unallocatedCapacity(unallocated_capacity()/K);
790 event.set_fullCount(_codemem_full_count);
791 event.commit();
792 }
793 }
794
795 //------------------------------------------------------------------------------------------------
796 // Non-product version
797
798 #ifndef PRODUCT
799
800 void CodeCache::verify_if_often() {
801 if (VerifyCodeCacheOften) {
802 _heap->verify();
|
172 return (nmethod*)cb;
173 }
174
175 static size_t maxCodeCacheUsed = 0;
176
177 CodeBlob* CodeCache::allocate(int size, bool is_critical) {
178 // Do not seize the CodeCache lock here--if the caller has not
179 // already done so, we are going to lose bigtime, since the code
180 // cache will contain a garbage CodeBlob until the caller can
181 // run the constructor for the CodeBlob subclass he is busy
182 // instantiating.
183 guarantee(size >= 0, "allocation request must be reasonable");
184 assert_locked_or_safepoint(CodeCache_lock);
185 CodeBlob* cb = NULL;
186 _number_of_blobs++;
187 while (true) {
188 cb = (CodeBlob*)_heap->allocate(size, is_critical);
189 if (cb != NULL) break;
190 if (!_heap->expand_by(CodeCacheExpansionSize)) {
191 // Expansion failed
192 if (CodeCache_lock->owned_by_self()) {
193 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
194 report_codemem_full();
195 } else {
196 report_codemem_full();
197 }
198 return NULL;
199 }
200 if (PrintCodeCacheExtension) {
201 ResourceMark rm;
202 tty->print_cr("code cache extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (" SSIZE_FORMAT " bytes)",
203 (intptr_t)_heap->low_boundary(), (intptr_t)_heap->high(),
204 (address)_heap->high() - (address)_heap->low_boundary());
205 }
206 }
207 maxCodeCacheUsed = MAX2(maxCodeCacheUsed, ((address)_heap->high_boundary() -
208 (address)_heap->low_boundary()) - unallocated_capacity());
209 verify_if_often();
210 print_trace("allocation", cb, size);
211 return cb;
212 }
213
214 void CodeCache::free(CodeBlob* cb) {
215 assert_locked_or_safepoint(CodeCache_lock);
216 verify_if_often();
217
769 void CodeCache::make_marked_nmethods_not_entrant() {
770 assert_locked_or_safepoint(CodeCache_lock);
771 FOR_ALL_ALIVE_NMETHODS(nm) {
772 if (nm->is_marked_for_deoptimization()) {
773 nm->make_not_entrant();
774 }
775 }
776 }
777
778 void CodeCache::verify() {
779 _heap->verify();
780 FOR_ALL_ALIVE_BLOBS(p) {
781 p->verify();
782 }
783 }
784
785 void CodeCache::report_codemem_full() {
786 _codemem_full_count++;
787 EventCodeCacheFull event;
788 if (event.should_commit()) {
789 event.set_codeBlobType((u1)CodeBlobType::All);
790 event.set_startAddress((u8)low_bound());
791 event.set_commitedTopAddress((u8)high());
792 event.set_reservedTopAddress((u8)high_bound());
793 event.set_entryCount(nof_blobs());
794 event.set_methodCount(nof_nmethods());
795 event.set_adaptorCount(nof_adapters());
796 event.set_unallocatedCapacity(unallocated_capacity()/K);
797 event.set_fullCount(_codemem_full_count);
798 event.commit();
799 }
800 }
801
802 //------------------------------------------------------------------------------------------------
803 // Non-product version
804
805 #ifndef PRODUCT
806
807 void CodeCache::verify_if_often() {
808 if (VerifyCodeCacheOften) {
809 _heap->verify();
|