1 /* 2 * Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "gc_implementation/shared/markSweep.inline.hpp" 27 #include "interpreter/interpreter.hpp" 28 #include "memory/gcLocker.hpp" 29 #include "memory/resourceArea.hpp" 30 #include "oops/constMethodKlass.hpp" 31 #include "oops/constMethodOop.hpp" 32 #include "oops/oop.inline.hpp" 33 #include "oops/oop.inline2.hpp" 34 #include "runtime/handles.inline.hpp" 35 36 37 klassOop constMethodKlass::create_klass(TRAPS) { 38 constMethodKlass o; 39 KlassHandle h_this_klass(THREAD, Universe::klassKlassObj()); 40 KlassHandle k = base_create_klass(h_this_klass, header_size(), 41 o.vtbl_value(), CHECK_NULL); 42 // Make sure size calculation is right 43 assert(k()->size() == align_object_size(header_size()), 44 "wrong size for object"); 45 //java_lang_Class::create_mirror(k, CHECK_NULL); // Allocate mirror 46 return k(); 47 } 48 49 50 int constMethodKlass::oop_size(oop obj) const { 51 assert(obj->is_constMethod(), "must be constMethod oop"); 52 return constMethodOop(obj)->object_size(); 53 } 54 55 bool constMethodKlass::oop_is_parsable(oop obj) const { 56 assert(obj->is_constMethod(), "must be constMethod oop"); 57 return constMethodOop(obj)->object_is_parsable(); 58 } 59 60 bool constMethodKlass::oop_is_conc_safe(oop obj) const { 61 assert(obj->is_constMethod(), "must be constMethod oop"); 62 return constMethodOop(obj)->is_conc_safe(); 63 } 64 65 constMethodOop constMethodKlass::allocate(int byte_code_size, 66 int compressed_line_number_size, 67 int localvariable_table_length, 68 int exception_table_length, 69 int checked_exceptions_length, 70 bool is_conc_safe, 71 TRAPS) { 72 73 int size = constMethodOopDesc::object_size(byte_code_size, 74 compressed_line_number_size, 75 localvariable_table_length, 76 exception_table_length, 77 checked_exceptions_length); 78 KlassHandle h_k(THREAD, as_klassOop()); 79 constMethodOop cm = (constMethodOop) 80 CollectedHeap::permanent_obj_allocate(h_k, size, CHECK_NULL); 81 assert(!cm->is_parsable(), "Not yet safely parsable"); 82 No_Safepoint_Verifier no_safepoint; 83 cm->set_interpreter_kind(Interpreter::invalid); 84 cm->init_fingerprint(); 85 cm->set_method(NULL); 86 cm->set_stackmap_data(NULL); 87 cm->set_code_size(byte_code_size); 88 cm->set_constMethod_size(size); 89 cm->set_result_type(T_VOID); 90 cm->set_inlined_tables_length(checked_exceptions_length, 91 compressed_line_number_size, 92 localvariable_table_length, 93 exception_table_length); 94 assert(cm->size() == size, "wrong size for object"); 95 cm->set_is_conc_safe(is_conc_safe); 96 cm->set_partially_loaded(); 97 assert(cm->is_parsable(), "Is safely parsable by gc"); 98 return cm; 99 } 100 101 void constMethodKlass::oop_follow_contents(oop obj) { 102 assert (obj->is_constMethod(), "object must be constMethod"); 103 constMethodOop cm = constMethodOop(obj); 104 MarkSweep::mark_and_push(cm->adr_method()); 105 MarkSweep::mark_and_push(cm->adr_stackmap_data()); 106 // Performance tweak: We skip iterating over the klass pointer since we 107 // know that Universe::constMethodKlassObj never moves. 108 } 109 110 #ifndef SERIALGC 111 void constMethodKlass::oop_follow_contents(ParCompactionManager* cm, 112 oop obj) { 113 assert (obj->is_constMethod(), "object must be constMethod"); 114 constMethodOop cm_oop = constMethodOop(obj); 115 PSParallelCompact::mark_and_push(cm, cm_oop->adr_method()); 116 PSParallelCompact::mark_and_push(cm, cm_oop->adr_stackmap_data()); 117 // Performance tweak: We skip iterating over the klass pointer since we 118 // know that Universe::constMethodKlassObj never moves. 119 } 120 #endif // SERIALGC 121 122 int constMethodKlass::oop_oop_iterate(oop obj, OopClosure* blk) { 123 assert (obj->is_constMethod(), "object must be constMethod"); 124 constMethodOop cm = constMethodOop(obj); 125 blk->do_oop(cm->adr_method()); 126 blk->do_oop(cm->adr_stackmap_data()); 127 // Get size before changing pointers. 128 // Don't call size() or oop_size() since that is a virtual call. 129 int size = cm->object_size(); 130 return size; 131 } 132 133 134 int constMethodKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) { 135 assert (obj->is_constMethod(), "object must be constMethod"); 136 constMethodOop cm = constMethodOop(obj); 137 oop* adr; 138 adr = cm->adr_method(); 139 if (mr.contains(adr)) blk->do_oop(adr); 140 adr = cm->adr_stackmap_data(); 141 if (mr.contains(adr)) blk->do_oop(adr); 142 // Get size before changing pointers. 143 // Don't call size() or oop_size() since that is a virtual call. 144 int size = cm->object_size(); 145 // Performance tweak: We skip iterating over the klass pointer since we 146 // know that Universe::constMethodKlassObj never moves. 147 return size; 148 } 149 150 151 int constMethodKlass::oop_adjust_pointers(oop obj) { 152 assert(obj->is_constMethod(), "should be constMethod"); 153 constMethodOop cm = constMethodOop(obj); 154 MarkSweep::adjust_pointer(cm->adr_method()); 155 MarkSweep::adjust_pointer(cm->adr_stackmap_data()); 156 // Get size before changing pointers. 157 // Don't call size() or oop_size() since that is a virtual call. 158 int size = cm->object_size(); 159 // Performance tweak: We skip iterating over the klass pointer since we 160 // know that Universe::constMethodKlassObj never moves. 161 return size; 162 } 163 164 #ifndef SERIALGC 165 void constMethodKlass::oop_push_contents(PSPromotionManager* pm, oop obj) { 166 assert(obj->is_constMethod(), "should be constMethod"); 167 } 168 169 int constMethodKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) { 170 assert(obj->is_constMethod(), "should be constMethod"); 171 constMethodOop cm_oop = constMethodOop(obj); 172 oop* const beg_oop = cm_oop->oop_block_beg(); 173 oop* const end_oop = cm_oop->oop_block_end(); 174 for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) { 175 PSParallelCompact::adjust_pointer(cur_oop); 176 } 177 return cm_oop->object_size(); 178 } 179 #endif // SERIALGC 180 181 // Printing 182 183 void constMethodKlass::oop_print_on(oop obj, outputStream* st) { 184 ResourceMark rm; 185 assert(obj->is_constMethod(), "must be constMethod"); 186 Klass::oop_print_on(obj, st); 187 constMethodOop m = constMethodOop(obj); 188 st->print(" - method: " INTPTR_FORMAT " ", (address)m->method()); 189 m->method()->print_value_on(st); st->cr(); 190 if (m->has_stackmap_table()) { 191 st->print(" - stackmap data: "); 192 m->stackmap_data()->print_value_on(st); 193 st->cr(); 194 } 195 } 196 197 // Short version of printing constMethodOop - just print the name of the 198 // method it belongs to. 199 void constMethodKlass::oop_print_value_on(oop obj, outputStream* st) { 200 assert(obj->is_constMethod(), "must be constMethod"); 201 constMethodOop m = constMethodOop(obj); 202 st->print(" const part of method " ); 203 m->method()->print_value_on(st); 204 } 205 206 const char* constMethodKlass::internal_name() const { 207 return "{constMethod}"; 208 } 209 210 211 // Verification 212 213 void constMethodKlass::oop_verify_on(oop obj, outputStream* st) { 214 Klass::oop_verify_on(obj, st); 215 guarantee(obj->is_constMethod(), "object must be constMethod"); 216 constMethodOop m = constMethodOop(obj); 217 guarantee(m->is_perm(), "should be in permspace"); 218 219 // Verification can occur during oop construction before the method or 220 // other fields have been initialized. 221 if (!obj->partially_loaded()) { 222 guarantee(m->method()->is_perm(), "should be in permspace"); 223 guarantee(m->method()->is_method(), "should be method"); 224 typeArrayOop stackmap_data = m->stackmap_data(); 225 guarantee(stackmap_data == NULL || 226 stackmap_data->is_perm(), "should be in permspace"); 227 228 address m_end = (address)((oop*) m + m->size()); 229 address compressed_table_start = m->code_end(); 230 guarantee(compressed_table_start <= m_end, "invalid method layout"); 231 address compressed_table_end = compressed_table_start; 232 // Verify line number table 233 if (m->has_linenumber_table()) { 234 CompressedLineNumberReadStream stream(m->compressed_linenumber_table()); 235 while (stream.read_pair()) { 236 guarantee(stream.bci() >= 0 && stream.bci() <= m->code_size(), "invalid bci in line number table"); 237 } 238 compressed_table_end += stream.position(); 239 } 240 guarantee(compressed_table_end <= m_end, "invalid method layout"); 241 // Verify checked exceptions, exception table and local variable tables 242 if (m->has_checked_exceptions()) { 243 u2* addr = m->checked_exceptions_length_addr(); 244 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout"); 245 } 246 if (m->has_exception_handler()) { 247 u2* addr = m->exception_table_length_addr(); 248 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout"); 249 } 250 if (m->has_localvariable_table()) { 251 u2* addr = m->localvariable_table_length_addr(); 252 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout"); 253 } 254 // Check compressed_table_end relative to uncompressed_table_start 255 u2* uncompressed_table_start; 256 if (m->has_localvariable_table()) { 257 uncompressed_table_start = (u2*) m->localvariable_table_start(); 258 } else if (m->has_exception_handler()) { 259 uncompressed_table_start = (u2*) m->exception_table_start(); 260 } else if (m->has_checked_exceptions()) { 261 uncompressed_table_start = (u2*) m->checked_exceptions_start(); 262 } else { 263 uncompressed_table_start = (u2*) m_end; 264 } 265 int gap = (intptr_t) uncompressed_table_start - (intptr_t) compressed_table_end; 266 int max_gap = align_object_size(1)*BytesPerWord; 267 guarantee(gap >= 0 && gap < max_gap, "invalid method layout"); 268 } 269 } 270 271 bool constMethodKlass::oop_partially_loaded(oop obj) const { 272 assert(obj->is_constMethod(), "object must be klass"); 273 constMethodOop m = constMethodOop(obj); 274 // check whether stackmap_data points to self (flag for partially loaded) 275 return m->stackmap_data() == (typeArrayOop)obj; 276 } 277 278 279 // The exception_table is the last field set when loading an object. 280 void constMethodKlass::oop_set_partially_loaded(oop obj) { 281 assert(obj->is_constMethod(), "object must be klass"); 282 constMethodOop m = constMethodOop(obj); 283 // Temporarily set stackmap_data to point to self 284 m->set_stackmap_data((typeArrayOop)obj); 285 }