1 /* 2 * Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "code/codeBlob.hpp" 27 #include "code/codeCache.hpp" 28 #include "code/scopeDesc.hpp" 29 #include "code/vtableStubs.hpp" 30 #include "memory/allocation.inline.hpp" 31 #include "memory/resourceArea.hpp" 32 #include "oops/oop.inline.hpp" 33 #include "prims/jvmtiCodeBlobEvents.hpp" 34 #include "prims/jvmtiExport.hpp" 35 #include "runtime/handles.hpp" 36 #include "runtime/handles.inline.hpp" 37 #include "runtime/vmThread.hpp" 38 39 // Support class to collect a list of the non-nmethod CodeBlobs in 40 // the CodeCache. 41 // 42 // This class actually creates a list of JvmtiCodeBlobDesc - each JvmtiCodeBlobDesc 43 // describes a single CodeBlob in the CodeCache. Note that collection is 44 // done to a static list - this is because CodeCache::blobs_do is defined 45 // as void CodeCache::blobs_do(void f(CodeBlob* nm)) and hence requires 46 // a C or static method. 47 // 48 // Usage :- 49 // 50 // CodeBlobCollector collector; 51 // 52 // collector.collect(); 53 // JvmtiCodeBlobDesc* blob = collector.first(); 54 // while (blob != NULL) { 55 // : 56 // blob = collector.next(); 57 // } 58 // 59 60 class CodeBlobCollector : StackObj { 61 private: 62 GrowableArray<JvmtiCodeBlobDesc*>* _code_blobs; // collected blobs 63 int _pos; // iterator position 64 65 // used during a collection 66 static GrowableArray<JvmtiCodeBlobDesc*>* _global_code_blobs; 67 static void do_blob(CodeBlob* cb); 68 static void do_vtable_stub(VtableStub* vs); 69 public: 70 CodeBlobCollector() { 71 _code_blobs = NULL; 72 _pos = -1; 73 } 74 ~CodeBlobCollector() { 75 if (_code_blobs != NULL) { 76 for (int i=0; i<_code_blobs->length(); i++) { 77 FreeHeap(_code_blobs->at(i)); 78 } 79 delete _code_blobs; 80 } 81 } 82 83 // collect list of code blobs in the cache 84 void collect(); 85 86 // iteration support - return first code blob 87 JvmtiCodeBlobDesc* first() { 88 assert(_code_blobs != NULL, "not collected"); 89 if (_code_blobs->length() == 0) { 90 return NULL; 91 } 92 _pos = 0; 93 return _code_blobs->at(0); 94 } 95 96 // iteration support - return next code blob 97 JvmtiCodeBlobDesc* next() { 98 assert(_pos >= 0, "iteration not started"); 99 if (_pos+1 >= _code_blobs->length()) { 100 return NULL; 101 } 102 return _code_blobs->at(++_pos); 103 } 104 105 }; 106 107 // used during collection 108 GrowableArray<JvmtiCodeBlobDesc*>* CodeBlobCollector::_global_code_blobs; 109 110 111 // called for each CodeBlob in the CodeCache 112 // 113 // This function filters out nmethods as it is only interested in 114 // other CodeBlobs. This function also filters out CodeBlobs that have 115 // a duplicate starting address as previous blobs. This is needed to 116 // handle the case where multiple stubs are generated into a single 117 // BufferBlob. 118 119 void CodeBlobCollector::do_blob(CodeBlob* cb) { 120 121 // ignore nmethods 122 if (cb->is_nmethod()) { 123 return; 124 } 125 // exclude VtableStubs, which are processed separately 126 if (cb->is_buffer_blob() && strcmp(cb->name(), "vtable chunks") == 0) { 127 return; 128 } 129 130 // check if this starting address has been seen already - the 131 // assumption is that stubs are inserted into the list before the 132 // enclosing BufferBlobs. 133 address addr = cb->code_begin(); 134 for (int i=0; i<_global_code_blobs->length(); i++) { 135 JvmtiCodeBlobDesc* scb = _global_code_blobs->at(i); 136 if (addr == scb->code_begin()) { 137 return; 138 } 139 } 140 141 // record the CodeBlob details as a JvmtiCodeBlobDesc 142 JvmtiCodeBlobDesc* scb = new JvmtiCodeBlobDesc(cb->name(), cb->code_begin(), cb->code_end()); 143 _global_code_blobs->append(scb); 144 } 145 146 // called for each VtableStub in VtableStubs 147 148 void CodeBlobCollector::do_vtable_stub(VtableStub* vs) { 149 JvmtiCodeBlobDesc* scb = new JvmtiCodeBlobDesc(vs->is_vtable_stub() ? "vtable stub" : "itable stub", 150 vs->code_begin(), vs->code_end()); 151 _global_code_blobs->append(scb); 152 } 153 154 // collects a list of CodeBlobs in the CodeCache. 155 // 156 // The created list is growable array of JvmtiCodeBlobDesc - each one describes 157 // a CodeBlob. Note that the list is static - this is because CodeBlob::blobs_do 158 // requires a a C or static function so we can't use an instance function. This 159 // isn't a problem as the iteration is serial anyway as we need the CodeCache_lock 160 // to iterate over the code cache. 161 // 162 // Note that the CodeBlobs in the CodeCache will include BufferBlobs that may 163 // contain multiple stubs. As a profiler is interested in the stubs rather than 164 // the enclosing container we first iterate over the stub code descriptors so 165 // that the stubs go into the list first. do_blob will then filter out the 166 // enclosing blobs if the starting address of the enclosing blobs matches the 167 // starting address of first stub generated in the enclosing blob. 168 169 void CodeBlobCollector::collect() { 170 assert_locked_or_safepoint(CodeCache_lock); 171 assert(_global_code_blobs == NULL, "checking"); 172 173 // create the global list 174 _global_code_blobs = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<JvmtiCodeBlobDesc*>(50,true); 175 176 // iterate over the stub code descriptors and put them in the list first. 177 for (StubCodeDesc* desc = StubCodeDesc::first(); desc != NULL; desc = StubCodeDesc::next(desc)) { 178 _global_code_blobs->append(new JvmtiCodeBlobDesc(desc->name(), desc->begin(), desc->end())); 179 } 180 181 // Vtable stubs are not described with StubCodeDesc, 182 // process them separately 183 VtableStubs::vtable_stub_do(do_vtable_stub); 184 185 // next iterate over all the non-nmethod code blobs and add them to 186 // the list - as noted above this will filter out duplicates and 187 // enclosing blobs. 188 CodeCache::blobs_do(do_blob); 189 190 // make the global list the instance list so that it can be used 191 // for other iterations. 192 _code_blobs = _global_code_blobs; 193 _global_code_blobs = NULL; 194 } 195 196 197 // Generate a DYNAMIC_CODE_GENERATED event for each non-nmethod code blob. 198 199 jvmtiError JvmtiCodeBlobEvents::generate_dynamic_code_events(JvmtiEnv* env) { 200 CodeBlobCollector collector; 201 202 // First collect all the code blobs. This has to be done in a 203 // single pass over the code cache with CodeCache_lock held because 204 // there isn't any safe way to iterate over regular CodeBlobs since 205 // they can be freed at any point. 206 { 207 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 208 collector.collect(); 209 } 210 211 // iterate over the collected list and post an event for each blob 212 JvmtiCodeBlobDesc* blob = collector.first(); 213 while (blob != NULL) { 214 JvmtiExport::post_dynamic_code_generated(env, blob->name(), blob->code_begin(), blob->code_end()); 215 blob = collector.next(); 216 } 217 return JVMTI_ERROR_NONE; 218 } 219 220 221 // Generate a COMPILED_METHOD_LOAD event for each nnmethod 222 jvmtiError JvmtiCodeBlobEvents::generate_compiled_method_load_events(JvmtiEnv* env) { 223 HandleMark hm; 224 225 // Walk the CodeCache notifying for live nmethods. The code cache 226 // may be changing while this is happening which is ok since newly 227 // created nmethod will notify normally and nmethods which are freed 228 // can be safely skipped. 229 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 230 // Iterate over non-profiled and profiled nmethods 231 NMethodIterator iter; 232 while(iter.next_alive()) { 233 nmethod* current = iter.method(); 234 // Lock the nmethod so it can't be freed 235 nmethodLocker nml(current); 236 237 // Don't hold the lock over the notify or jmethodID creation 238 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 239 current->get_and_cache_jmethod_id(); 240 JvmtiExport::post_compiled_method_load(current); 241 } 242 return JVMTI_ERROR_NONE; 243 } 244 245 246 // create a C-heap allocated address location map for an nmethod 247 void JvmtiCodeBlobEvents::build_jvmti_addr_location_map(nmethod *nm, 248 jvmtiAddrLocationMap** map_ptr, 249 jint *map_length_ptr) 250 { 251 ResourceMark rm; 252 jvmtiAddrLocationMap* map = NULL; 253 jint map_length = 0; 254 255 256 // Generate line numbers using PcDesc and ScopeDesc info 257 methodHandle mh(nm->method()); 258 259 if (!mh->is_native()) { 260 PcDesc *pcd; 261 int pcds_in_method; 262 263 pcds_in_method = (nm->scopes_pcs_end() - nm->scopes_pcs_begin()); 264 map = NEW_C_HEAP_ARRAY(jvmtiAddrLocationMap, pcds_in_method, mtInternal); 265 266 address scopes_data = nm->scopes_data_begin(); 267 for( pcd = nm->scopes_pcs_begin(); pcd < nm->scopes_pcs_end(); ++pcd ) { 268 ScopeDesc sc0(nm, pcd->scope_decode_offset(), pcd->should_reexecute(), pcd->rethrow_exception(), pcd->return_oop()); 269 ScopeDesc *sd = &sc0; 270 while( !sd->is_top() ) { sd = sd->sender(); } 271 int bci = sd->bci(); 272 if (bci >= 0) { 273 assert(map_length < pcds_in_method, "checking"); 274 map[map_length].start_address = (const void*)pcd->real_pc(nm); 275 map[map_length].location = bci; 276 ++map_length; 277 } 278 } 279 } 280 281 *map_ptr = map; 282 *map_length_ptr = map_length; 283 }