181 guarantee(size >= 0, "allocation request must be reasonable");
182 assert_locked_or_safepoint(CodeCache_lock);
183 CodeBlob* cb = NULL;
184 _number_of_blobs++;
185 while (true) {
186 cb = (CodeBlob*)_heap->allocate(size, is_critical);
187 if (cb != NULL) break;
188 if (!_heap->expand_by(CodeCacheExpansionSize)) {
189 // Expansion failed
190 return NULL;
191 }
192 if (PrintCodeCacheExtension) {
193 ResourceMark rm;
194 tty->print_cr("code cache extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (%d bytes)",
195 (intptr_t)_heap->low_boundary(), (intptr_t)_heap->high(),
196 (address)_heap->high() - (address)_heap->low_boundary());
197 }
198 }
199 maxCodeCacheUsed = MAX2(maxCodeCacheUsed, ((address)_heap->high_boundary() -
200 (address)_heap->low_boundary()) - unallocated_capacity());
201 verify_if_often();
202 print_trace("allocation", cb, size);
203 return cb;
204 }
205
206 void CodeCache::free(CodeBlob* cb) {
207 assert_locked_or_safepoint(CodeCache_lock);
208 verify_if_often();
209
210 print_trace("free", cb);
211 if (cb->is_nmethod()) {
212 _number_of_nmethods--;
213 if (((nmethod *)cb)->has_dependencies()) {
214 _number_of_nmethods_with_dependencies--;
215 }
216 }
217 if (cb->is_adapter_blob()) {
218 _number_of_adapters--;
219 }
220 _number_of_blobs--;
221
222 _heap->deallocate(cb);
223
224 verify_if_often();
225 assert(_number_of_blobs >= 0, "sanity check");
226 }
227
228
229 void CodeCache::commit(CodeBlob* cb) {
230 // this is called by nmethod::nmethod, which must already own CodeCache_lock
231 assert_locked_or_safepoint(CodeCache_lock);
232 if (cb->is_nmethod()) {
233 _number_of_nmethods++;
234 if (((nmethod *)cb)->has_dependencies()) {
235 _number_of_nmethods_with_dependencies++;
236 }
237 }
238 if (cb->is_adapter_blob()) {
239 _number_of_adapters++;
240 }
241
242 // flush the hardware I-cache
243 ICache::invalidate_range(cb->content_begin(), cb->content_size());
244 }
252
253 // Iteration over CodeBlobs
254
255 #define FOR_ALL_BLOBS(var) for (CodeBlob *var = first() ; var != NULL; var = next(var) )
256 #define FOR_ALL_ALIVE_BLOBS(var) for (CodeBlob *var = alive(first()); var != NULL; var = alive(next(var)))
257 #define FOR_ALL_ALIVE_NMETHODS(var) for (nmethod *var = alive_nmethod(first()); var != NULL; var = alive_nmethod(next(var)))
258
259
260 bool CodeCache::contains(void *p) {
261 // It should be ok to call contains without holding a lock
262 return _heap->contains(p);
263 }
264
265
266 // This method is safe to call without holding the CodeCache_lock, as long as a dead codeblob is not
267 // looked up (i.e., one that has been marked for deletion). It only dependes on the _segmap to contain
268 // valid indices, which it will always do, as long as the CodeBlob is not in the process of being recycled.
269 CodeBlob* CodeCache::find_blob(void* start) {
270 CodeBlob* result = find_blob_unsafe(start);
271 if (result == NULL) return NULL;
272 // We could potientially look up non_entrant methods
273 guarantee(!result->is_zombie() || result->is_locked_by_vm() || is_error_reported(), "unsafe access to zombie method");
274 return result;
275 }
276
277 nmethod* CodeCache::find_nmethod(void* start) {
278 CodeBlob *cb = find_blob(start);
279 assert(cb == NULL || cb->is_nmethod(), "did not find an nmethod");
280 return (nmethod*)cb;
281 }
282
283
284 void CodeCache::blobs_do(void f(CodeBlob* nm)) {
285 assert_locked_or_safepoint(CodeCache_lock);
286 FOR_ALL_BLOBS(p) {
287 f(p);
288 }
289 }
290
291
292 void CodeCache::nmethods_do(void f(nmethod* nm)) {
731 void CodeCache::report_codemem_full() {
732 _codemem_full_count++;
733 EventCodeCacheFull event;
734 if (event.should_commit()) {
735 event.set_startAddress((u8)low_bound());
736 event.set_commitedTopAddress((u8)high());
737 event.set_reservedTopAddress((u8)high_bound());
738 event.set_entryCount(nof_blobs());
739 event.set_methodCount(nof_nmethods());
740 event.set_adaptorCount(nof_adapters());
741 event.set_unallocatedCapacity(unallocated_capacity()/K);
742 event.set_fullCount(_codemem_full_count);
743 event.commit();
744 }
745 }
746
747 //------------------------------------------------------------------------------------------------
748 // Non-product version
749
750 #ifndef PRODUCT
751
752 void CodeCache::verify_if_often() {
753 if (VerifyCodeCacheOften) {
754 _heap->verify();
755 }
756 }
757
758 void CodeCache::print_trace(const char* event, CodeBlob* cb, int size) {
759 if (PrintCodeCache2) { // Need to add a new flag
760 ResourceMark rm;
761 if (size == 0) size = cb->size();
762 tty->print_cr("CodeCache %s: addr: " INTPTR_FORMAT ", size: 0x%x", event, cb, size);
763 }
764 }
765
766 void CodeCache::print_internals() {
767 int nmethodCount = 0;
768 int runtimeStubCount = 0;
769 int adapterCount = 0;
770 int deoptimizationStubCount = 0;
771 int uncommonTrapStubCount = 0;
772 int bufferBlobCount = 0;
773 int total = 0;
774 int nmethodAlive = 0;
775 int nmethodNotEntrant = 0;
776 int nmethodZombie = 0;
|
181 guarantee(size >= 0, "allocation request must be reasonable");
182 assert_locked_or_safepoint(CodeCache_lock);
183 CodeBlob* cb = NULL;
184 _number_of_blobs++;
185 while (true) {
186 cb = (CodeBlob*)_heap->allocate(size, is_critical);
187 if (cb != NULL) break;
188 if (!_heap->expand_by(CodeCacheExpansionSize)) {
189 // Expansion failed
190 return NULL;
191 }
192 if (PrintCodeCacheExtension) {
193 ResourceMark rm;
194 tty->print_cr("code cache extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (%d bytes)",
195 (intptr_t)_heap->low_boundary(), (intptr_t)_heap->high(),
196 (address)_heap->high() - (address)_heap->low_boundary());
197 }
198 }
199 maxCodeCacheUsed = MAX2(maxCodeCacheUsed, ((address)_heap->high_boundary() -
200 (address)_heap->low_boundary()) - unallocated_capacity());
201 print_trace("allocation", cb, size);
202 return cb;
203 }
204
205 void CodeCache::free(CodeBlob* cb) {
206 assert_locked_or_safepoint(CodeCache_lock);
207
208 print_trace("free", cb);
209 if (cb->is_nmethod()) {
210 _number_of_nmethods--;
211 if (((nmethod *)cb)->has_dependencies()) {
212 _number_of_nmethods_with_dependencies--;
213 }
214 }
215 if (cb->is_adapter_blob()) {
216 _number_of_adapters--;
217 }
218 _number_of_blobs--;
219
220 _heap->deallocate(cb);
221
222 assert(_number_of_blobs >= 0, "sanity check");
223 }
224
225
226 void CodeCache::commit(CodeBlob* cb) {
227 // this is called by nmethod::nmethod, which must already own CodeCache_lock
228 assert_locked_or_safepoint(CodeCache_lock);
229 if (cb->is_nmethod()) {
230 _number_of_nmethods++;
231 if (((nmethod *)cb)->has_dependencies()) {
232 _number_of_nmethods_with_dependencies++;
233 }
234 }
235 if (cb->is_adapter_blob()) {
236 _number_of_adapters++;
237 }
238
239 // flush the hardware I-cache
240 ICache::invalidate_range(cb->content_begin(), cb->content_size());
241 }
249
250 // Iteration over CodeBlobs
251
252 #define FOR_ALL_BLOBS(var) for (CodeBlob *var = first() ; var != NULL; var = next(var) )
253 #define FOR_ALL_ALIVE_BLOBS(var) for (CodeBlob *var = alive(first()); var != NULL; var = alive(next(var)))
254 #define FOR_ALL_ALIVE_NMETHODS(var) for (nmethod *var = alive_nmethod(first()); var != NULL; var = alive_nmethod(next(var)))
255
256
257 bool CodeCache::contains(void *p) {
258 // It should be ok to call contains without holding a lock
259 return _heap->contains(p);
260 }
261
262
263 // This method is safe to call without holding the CodeCache_lock, as long as a dead codeblob is not
264 // looked up (i.e., one that has been marked for deletion). It only dependes on the _segmap to contain
265 // valid indices, which it will always do, as long as the CodeBlob is not in the process of being recycled.
266 CodeBlob* CodeCache::find_blob(void* start) {
267 CodeBlob* result = find_blob_unsafe(start);
268 if (result == NULL) return NULL;
269 // We could potentially look up non_entrant methods
270 guarantee(!result->is_zombie() || result->is_locked_by_vm() || is_error_reported(), "unsafe access to zombie method");
271 return result;
272 }
273
274 nmethod* CodeCache::find_nmethod(void* start) {
275 CodeBlob *cb = find_blob(start);
276 assert(cb == NULL || cb->is_nmethod(), "did not find an nmethod");
277 return (nmethod*)cb;
278 }
279
280
281 void CodeCache::blobs_do(void f(CodeBlob* nm)) {
282 assert_locked_or_safepoint(CodeCache_lock);
283 FOR_ALL_BLOBS(p) {
284 f(p);
285 }
286 }
287
288
289 void CodeCache::nmethods_do(void f(nmethod* nm)) {
728 void CodeCache::report_codemem_full() {
729 _codemem_full_count++;
730 EventCodeCacheFull event;
731 if (event.should_commit()) {
732 event.set_startAddress((u8)low_bound());
733 event.set_commitedTopAddress((u8)high());
734 event.set_reservedTopAddress((u8)high_bound());
735 event.set_entryCount(nof_blobs());
736 event.set_methodCount(nof_nmethods());
737 event.set_adaptorCount(nof_adapters());
738 event.set_unallocatedCapacity(unallocated_capacity()/K);
739 event.set_fullCount(_codemem_full_count);
740 event.commit();
741 }
742 }
743
744 //------------------------------------------------------------------------------------------------
745 // Non-product version
746
747 #ifndef PRODUCT
748
749 void CodeCache::print_trace(const char* event, CodeBlob* cb, int size) {
750 if (PrintCodeCache2) { // Need to add a new flag
751 ResourceMark rm;
752 if (size == 0) size = cb->size();
753 tty->print_cr("CodeCache %s: addr: " INTPTR_FORMAT ", size: 0x%x", event, cb, size);
754 }
755 }
756
757 void CodeCache::print_internals() {
758 int nmethodCount = 0;
759 int runtimeStubCount = 0;
760 int adapterCount = 0;
761 int deoptimizationStubCount = 0;
762 int uncommonTrapStubCount = 0;
763 int bufferBlobCount = 0;
764 int total = 0;
765 int nmethodAlive = 0;
766 int nmethodNotEntrant = 0;
767 int nmethodZombie = 0;
|