299
300 jlong* state_adr = &_method_state[code_id];
301 address metadata_table = method_data->_metadata_table;
302 int metadata_size = method_data->_metadata_size;
303 assert(code_id < _method_count, "sanity");
304 _aot_id++;
305
306 #ifdef ASSERT
307 if (_aot_id > CIStop || _aot_id < CIStart) {
308 // Skip compilation
309 return;
310 }
311 #endif
312 // Check one more time.
313 if (_code_to_aot[code_id]._state == invalid) {
314 return;
315 }
316 AOTCompiledMethod *aot = new AOTCompiledMethod(code, mh(), meta, metadata_table, metadata_size, state_adr, this, name, code_id, _aot_id);
317 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
318 _code_to_aot[code_id]._aot = aot; // Should set this first
319 if (Atomic::cmpxchg(in_use, (jint*)&_code_to_aot[code_id]._state, not_set) != not_set) {
320 _code_to_aot[code_id]._aot = NULL; // Clean
321 } else { // success
322 // Publish method
323 #ifdef TIERED
324 mh->set_aot_code(aot);
325 #endif
326 Method::set_code(mh, aot);
327 if (PrintAOT || (PrintCompilation && PrintAOT)) {
328 aot->print_on(tty, NULL);
329 }
330 // Publish oop only after we are visible to CompiledMethodIterator
331 aot->set_oop(mh()->method_holder()->klass_holder());
332 }
333 }
334
335 void AOTCodeHeap::link_primitive_array_klasses() {
336 ResourceMark rm;
337 for (int i = T_BOOLEAN; i <= T_CONFLICT; i++) {
338 BasicType t = (BasicType)i;
339 if (is_java_primitive(t)) {
361 address entry = _code_space + stub_offsets[i]._code_offset;
362 aot_metadata* meta = (aot_metadata *) (_method_metadata + stub_offsets[i]._meta_offset);
363 address metadata_table = (address)_metadata_got + stub_offsets[i]._metadata_got_offset;
364 int metadata_size = stub_offsets[i]._metadata_got_size;
365 int code_id = stub_offsets[i]._code_id;
366 assert(code_id < _method_count, "sanity");
367 jlong* state_adr = &_method_state[code_id];
368 int len = build_u2_from((address)stub_name);
369 stub_name += 2;
370 char* full_name = NEW_C_HEAP_ARRAY(char, len+5, mtCode);
371 if (full_name == NULL) { // No memory?
372 break;
373 }
374 memcpy(full_name, "AOT ", 4);
375 memcpy(full_name+4, stub_name, len);
376 full_name[len+4] = 0;
377 guarantee(_code_to_aot[code_id]._state != invalid, "stub %s can't be invalidated", full_name);
378 AOTCompiledMethod* aot = new AOTCompiledMethod(entry, NULL, meta, metadata_table, metadata_size, state_adr, this, full_name, code_id, i);
379 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
380 _code_to_aot[code_id]._aot = aot;
381 if (Atomic::cmpxchg(in_use, (jint*)&_code_to_aot[code_id]._state, not_set) != not_set) {
382 fatal("stab '%s' code state is %d", full_name, _code_to_aot[code_id]._state);
383 }
384 // Adjust code buffer boundaries only for stubs because they are last in the buffer.
385 adjust_boundaries(aot);
386 if (PrintAOT && Verbose) {
387 aot->print_on(tty, NULL);
388 }
389 }
390 }
391
392 #define SET_AOT_GLOBAL_SYMBOL_VALUE(AOTSYMNAME, AOTSYMTYPE, VMSYMVAL) \
393 { \
394 AOTSYMTYPE * adr = (AOTSYMTYPE *) os::dll_lookup(_lib->dl_handle(), AOTSYMNAME); \
395 /* Check for a lookup error */ \
396 guarantee(adr != NULL, "AOT Symbol not found %s", AOTSYMNAME); \
397 *adr = (AOTSYMTYPE) VMSYMVAL; \
398 }
399
400 void AOTCodeHeap::link_graal_runtime_symbols() {
401 SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_jvmci_runtime_monitorenter", address, JVMCIRuntime::monitorenter);
632 int code_id = indexes[i];
633 if (_code_to_aot[code_id]._aot == aot) {
634 return true; // found dependent method
635 }
636 }
637 }
638 return false;
639 }
640
641 void AOTCodeHeap::sweep_dependent_methods(AOTKlassData* klass_data) {
642 // Make dependent methods non_entrant forever.
643 int methods_offset = klass_data->_dependent_methods_offset;
644 if (methods_offset >= 0) {
645 int marked = 0;
646 address methods_cnt_adr = _dependencies + methods_offset;
647 int methods_cnt = *(int*)methods_cnt_adr;
648 int* indexes = (int*)(methods_cnt_adr + 4);
649 for (int i = 0; i < methods_cnt; ++i) {
650 int code_id = indexes[i];
651 // Invalidate aot code.
652 if (Atomic::cmpxchg(invalid, (jint*)&_code_to_aot[code_id]._state, not_set) != not_set) {
653 if (_code_to_aot[code_id]._state == in_use) {
654 AOTCompiledMethod* aot = _code_to_aot[code_id]._aot;
655 assert(aot != NULL, "aot should be set");
656 if (!aot->is_runtime_stub()) { // Something is wrong - should not invalidate stubs.
657 aot->mark_for_deoptimization(false);
658 marked++;
659 }
660 }
661 }
662 }
663 if (marked > 0) {
664 VM_Deoptimize op;
665 VMThread::execute(&op);
666 }
667 }
668 }
669
670 bool AOTCodeHeap::load_klass_data(InstanceKlass* ik, Thread* thread) {
671 ResourceMark rm;
672
|
299
300 jlong* state_adr = &_method_state[code_id];
301 address metadata_table = method_data->_metadata_table;
302 int metadata_size = method_data->_metadata_size;
303 assert(code_id < _method_count, "sanity");
304 _aot_id++;
305
306 #ifdef ASSERT
307 if (_aot_id > CIStop || _aot_id < CIStart) {
308 // Skip compilation
309 return;
310 }
311 #endif
312 // Check one more time.
313 if (_code_to_aot[code_id]._state == invalid) {
314 return;
315 }
316 AOTCompiledMethod *aot = new AOTCompiledMethod(code, mh(), meta, metadata_table, metadata_size, state_adr, this, name, code_id, _aot_id);
317 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
318 _code_to_aot[code_id]._aot = aot; // Should set this first
319 if (Atomic::cmpxchg(in_use, &_code_to_aot[code_id]._state, not_set) != not_set) {
320 _code_to_aot[code_id]._aot = NULL; // Clean
321 } else { // success
322 // Publish method
323 #ifdef TIERED
324 mh->set_aot_code(aot);
325 #endif
326 Method::set_code(mh, aot);
327 if (PrintAOT || (PrintCompilation && PrintAOT)) {
328 aot->print_on(tty, NULL);
329 }
330 // Publish oop only after we are visible to CompiledMethodIterator
331 aot->set_oop(mh()->method_holder()->klass_holder());
332 }
333 }
334
335 void AOTCodeHeap::link_primitive_array_klasses() {
336 ResourceMark rm;
337 for (int i = T_BOOLEAN; i <= T_CONFLICT; i++) {
338 BasicType t = (BasicType)i;
339 if (is_java_primitive(t)) {
361 address entry = _code_space + stub_offsets[i]._code_offset;
362 aot_metadata* meta = (aot_metadata *) (_method_metadata + stub_offsets[i]._meta_offset);
363 address metadata_table = (address)_metadata_got + stub_offsets[i]._metadata_got_offset;
364 int metadata_size = stub_offsets[i]._metadata_got_size;
365 int code_id = stub_offsets[i]._code_id;
366 assert(code_id < _method_count, "sanity");
367 jlong* state_adr = &_method_state[code_id];
368 int len = build_u2_from((address)stub_name);
369 stub_name += 2;
370 char* full_name = NEW_C_HEAP_ARRAY(char, len+5, mtCode);
371 if (full_name == NULL) { // No memory?
372 break;
373 }
374 memcpy(full_name, "AOT ", 4);
375 memcpy(full_name+4, stub_name, len);
376 full_name[len+4] = 0;
377 guarantee(_code_to_aot[code_id]._state != invalid, "stub %s can't be invalidated", full_name);
378 AOTCompiledMethod* aot = new AOTCompiledMethod(entry, NULL, meta, metadata_table, metadata_size, state_adr, this, full_name, code_id, i);
379 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
380 _code_to_aot[code_id]._aot = aot;
381 if (Atomic::cmpxchg(in_use, &_code_to_aot[code_id]._state, not_set) != not_set) {
382 fatal("stab '%s' code state is %d", full_name, _code_to_aot[code_id]._state);
383 }
384 // Adjust code buffer boundaries only for stubs because they are last in the buffer.
385 adjust_boundaries(aot);
386 if (PrintAOT && Verbose) {
387 aot->print_on(tty, NULL);
388 }
389 }
390 }
391
392 #define SET_AOT_GLOBAL_SYMBOL_VALUE(AOTSYMNAME, AOTSYMTYPE, VMSYMVAL) \
393 { \
394 AOTSYMTYPE * adr = (AOTSYMTYPE *) os::dll_lookup(_lib->dl_handle(), AOTSYMNAME); \
395 /* Check for a lookup error */ \
396 guarantee(adr != NULL, "AOT Symbol not found %s", AOTSYMNAME); \
397 *adr = (AOTSYMTYPE) VMSYMVAL; \
398 }
399
400 void AOTCodeHeap::link_graal_runtime_symbols() {
401 SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_jvmci_runtime_monitorenter", address, JVMCIRuntime::monitorenter);
632 int code_id = indexes[i];
633 if (_code_to_aot[code_id]._aot == aot) {
634 return true; // found dependent method
635 }
636 }
637 }
638 return false;
639 }
640
641 void AOTCodeHeap::sweep_dependent_methods(AOTKlassData* klass_data) {
642 // Make dependent methods non_entrant forever.
643 int methods_offset = klass_data->_dependent_methods_offset;
644 if (methods_offset >= 0) {
645 int marked = 0;
646 address methods_cnt_adr = _dependencies + methods_offset;
647 int methods_cnt = *(int*)methods_cnt_adr;
648 int* indexes = (int*)(methods_cnt_adr + 4);
649 for (int i = 0; i < methods_cnt; ++i) {
650 int code_id = indexes[i];
651 // Invalidate aot code.
652 if (Atomic::cmpxchg(invalid, &_code_to_aot[code_id]._state, not_set) != not_set) {
653 if (_code_to_aot[code_id]._state == in_use) {
654 AOTCompiledMethod* aot = _code_to_aot[code_id]._aot;
655 assert(aot != NULL, "aot should be set");
656 if (!aot->is_runtime_stub()) { // Something is wrong - should not invalidate stubs.
657 aot->mark_for_deoptimization(false);
658 marked++;
659 }
660 }
661 }
662 }
663 if (marked > 0) {
664 VM_Deoptimize op;
665 VMThread::execute(&op);
666 }
667 }
668 }
669
670 bool AOTCodeHeap::load_klass_data(InstanceKlass* ik, Thread* thread) {
671 ResourceMark rm;
672
|