330
331 jlong* state_adr = &_method_state[code_id];
332 address metadata_table = method_data->_metadata_table;
333 int metadata_size = method_data->_metadata_size;
334 assert(code_id < _method_count, "sanity");
335 _aot_id++;
336
337 #ifdef ASSERT
338 if (_aot_id > CIStop || _aot_id < CIStart) {
339 // Skip compilation
340 return;
341 }
342 #endif
343 // Check one more time.
344 if (_code_to_aot[code_id]._state == invalid) {
345 return;
346 }
347 AOTCompiledMethod *aot = new AOTCompiledMethod(code, mh(), meta, metadata_table, metadata_size, state_adr, this, name, code_id, _aot_id);
348 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
349 _code_to_aot[code_id]._aot = aot; // Should set this first
350 if (Atomic::cmpxchg(in_use, &_code_to_aot[code_id]._state, not_set) != not_set) {
351 _code_to_aot[code_id]._aot = NULL; // Clean
352 } else { // success
353 // Publish method
354 #ifdef TIERED
355 mh->set_aot_code(aot);
356 #endif
357 {
358 MutexLocker pl(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
359 Method::set_code(mh, aot);
360 }
361 if (PrintAOT || (PrintCompilation && PrintAOT)) {
362 PauseNoSafepointVerifier pnsv(&nsv); // aot code is registered already
363 aot->print_on(tty, NULL);
364 }
365 // Publish oop only after we are visible to CompiledMethodIterator
366 aot->set_oop(mh()->method_holder()->klass_holder());
367 }
368 }
369
370 void AOTCodeHeap::link_primitive_array_klasses() {
393 AOTMethodOffsets* stub_offsets = (AOTMethodOffsets*)_stubs_offsets;
394 for (int i = 0; i < stubs_count; ++i) {
395 const char* stub_name = _metaspace_names + stub_offsets[i]._name_offset;
396 address entry = _code_space + stub_offsets[i]._code_offset;
397 aot_metadata* meta = (aot_metadata *) (_method_metadata + stub_offsets[i]._meta_offset);
398 address metadata_table = (address)_metadata_got + stub_offsets[i]._metadata_got_offset;
399 int metadata_size = stub_offsets[i]._metadata_got_size;
400 int code_id = stub_offsets[i]._code_id;
401 assert(code_id < _method_count, "sanity");
402 jlong* state_adr = &_method_state[code_id];
403 int len = Bytes::get_Java_u2((address)stub_name);
404 stub_name += 2;
405 char* full_name = NEW_C_HEAP_ARRAY(char, len+5, mtCode);
406 memcpy(full_name, "AOT ", 4);
407 memcpy(full_name+4, stub_name, len);
408 full_name[len+4] = 0;
409 guarantee(_code_to_aot[code_id]._state != invalid, "stub %s can't be invalidated", full_name);
410 AOTCompiledMethod* aot = new AOTCompiledMethod(entry, NULL, meta, metadata_table, metadata_size, state_adr, this, full_name, code_id, i);
411 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
412 _code_to_aot[code_id]._aot = aot;
413 if (Atomic::cmpxchg(in_use, &_code_to_aot[code_id]._state, not_set) != not_set) {
414 fatal("stab '%s' code state is %d", full_name, _code_to_aot[code_id]._state);
415 }
416 // Adjust code buffer boundaries only for stubs because they are last in the buffer.
417 adjust_boundaries(aot);
418 if (PrintAOT && Verbose) {
419 aot->print_on(tty, NULL);
420 }
421 }
422 }
423
424 #define SET_AOT_GLOBAL_SYMBOL_VALUE(AOTSYMNAME, AOTSYMTYPE, VMSYMVAL) \
425 { \
426 AOTSYMTYPE * adr = (AOTSYMTYPE *) os::dll_lookup(_lib->dl_handle(), AOTSYMNAME); \
427 /* Check for a lookup error */ \
428 guarantee(adr != NULL, "AOT Symbol not found %s", AOTSYMNAME); \
429 *adr = (AOTSYMTYPE) VMSYMVAL; \
430 }
431
432 void AOTCodeHeap::link_graal_runtime_symbols() {
433 SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_jvmci_runtime_monitorenter", address, JVMCIRuntime::monitorenter);
704 int methods_offset = klass_data->_dependent_methods_offset;
705 if (methods_offset >= 0) {
706 address methods_cnt_adr = _dependencies + methods_offset;
707 int methods_cnt = *(int*)methods_cnt_adr;
708 int* indexes = (int*)(methods_cnt_adr + 4);
709 for (int i = 0; i < methods_cnt; ++i) {
710 int code_id = indexes[i];
711 AOTCompiledMethod* aot = _code_to_aot[code_id]._aot;
712 if (aot != NULL) {
713 aot->mark_for_deoptimization(false);
714 }
715 }
716 }
717 }
718
719 void AOTCodeHeap::sweep_dependent_methods(int* indexes, int methods_cnt) {
720 int marked = 0;
721 for (int i = 0; i < methods_cnt; ++i) {
722 int code_id = indexes[i];
723 // Invalidate aot code.
724 if (Atomic::cmpxchg(invalid, &_code_to_aot[code_id]._state, not_set) != not_set) {
725 if (_code_to_aot[code_id]._state == in_use) {
726 AOTCompiledMethod* aot = _code_to_aot[code_id]._aot;
727 assert(aot != NULL, "aot should be set");
728 if (!aot->is_runtime_stub()) { // Something is wrong - should not invalidate stubs.
729 aot->mark_for_deoptimization(false);
730 marked++;
731 }
732 }
733 }
734 }
735 if (marked > 0) {
736 Deoptimization::deoptimize_all_marked();
737 }
738 }
739
740 void AOTCodeHeap::sweep_dependent_methods(AOTKlassData* klass_data) {
741 // Make dependent methods non_entrant forever.
742 int methods_offset = klass_data->_dependent_methods_offset;
743 if (methods_offset >= 0) {
744 address methods_cnt_adr = _dependencies + methods_offset;
|
330
331 jlong* state_adr = &_method_state[code_id];
332 address metadata_table = method_data->_metadata_table;
333 int metadata_size = method_data->_metadata_size;
334 assert(code_id < _method_count, "sanity");
335 _aot_id++;
336
337 #ifdef ASSERT
338 if (_aot_id > CIStop || _aot_id < CIStart) {
339 // Skip compilation
340 return;
341 }
342 #endif
343 // Check one more time.
344 if (_code_to_aot[code_id]._state == invalid) {
345 return;
346 }
347 AOTCompiledMethod *aot = new AOTCompiledMethod(code, mh(), meta, metadata_table, metadata_size, state_adr, this, name, code_id, _aot_id);
348 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
349 _code_to_aot[code_id]._aot = aot; // Should set this first
350 if (Atomic::cmpxchg(&_code_to_aot[code_id]._state, not_set, in_use) != not_set) {
351 _code_to_aot[code_id]._aot = NULL; // Clean
352 } else { // success
353 // Publish method
354 #ifdef TIERED
355 mh->set_aot_code(aot);
356 #endif
357 {
358 MutexLocker pl(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
359 Method::set_code(mh, aot);
360 }
361 if (PrintAOT || (PrintCompilation && PrintAOT)) {
362 PauseNoSafepointVerifier pnsv(&nsv); // aot code is registered already
363 aot->print_on(tty, NULL);
364 }
365 // Publish oop only after we are visible to CompiledMethodIterator
366 aot->set_oop(mh()->method_holder()->klass_holder());
367 }
368 }
369
370 void AOTCodeHeap::link_primitive_array_klasses() {
393 AOTMethodOffsets* stub_offsets = (AOTMethodOffsets*)_stubs_offsets;
394 for (int i = 0; i < stubs_count; ++i) {
395 const char* stub_name = _metaspace_names + stub_offsets[i]._name_offset;
396 address entry = _code_space + stub_offsets[i]._code_offset;
397 aot_metadata* meta = (aot_metadata *) (_method_metadata + stub_offsets[i]._meta_offset);
398 address metadata_table = (address)_metadata_got + stub_offsets[i]._metadata_got_offset;
399 int metadata_size = stub_offsets[i]._metadata_got_size;
400 int code_id = stub_offsets[i]._code_id;
401 assert(code_id < _method_count, "sanity");
402 jlong* state_adr = &_method_state[code_id];
403 int len = Bytes::get_Java_u2((address)stub_name);
404 stub_name += 2;
405 char* full_name = NEW_C_HEAP_ARRAY(char, len+5, mtCode);
406 memcpy(full_name, "AOT ", 4);
407 memcpy(full_name+4, stub_name, len);
408 full_name[len+4] = 0;
409 guarantee(_code_to_aot[code_id]._state != invalid, "stub %s can't be invalidated", full_name);
410 AOTCompiledMethod* aot = new AOTCompiledMethod(entry, NULL, meta, metadata_table, metadata_size, state_adr, this, full_name, code_id, i);
411 assert(_code_to_aot[code_id]._aot == NULL, "should be not initialized");
412 _code_to_aot[code_id]._aot = aot;
413 if (Atomic::cmpxchg(&_code_to_aot[code_id]._state, not_set, in_use) != not_set) {
414 fatal("stab '%s' code state is %d", full_name, _code_to_aot[code_id]._state);
415 }
416 // Adjust code buffer boundaries only for stubs because they are last in the buffer.
417 adjust_boundaries(aot);
418 if (PrintAOT && Verbose) {
419 aot->print_on(tty, NULL);
420 }
421 }
422 }
423
424 #define SET_AOT_GLOBAL_SYMBOL_VALUE(AOTSYMNAME, AOTSYMTYPE, VMSYMVAL) \
425 { \
426 AOTSYMTYPE * adr = (AOTSYMTYPE *) os::dll_lookup(_lib->dl_handle(), AOTSYMNAME); \
427 /* Check for a lookup error */ \
428 guarantee(adr != NULL, "AOT Symbol not found %s", AOTSYMNAME); \
429 *adr = (AOTSYMTYPE) VMSYMVAL; \
430 }
431
432 void AOTCodeHeap::link_graal_runtime_symbols() {
433 SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_jvmci_runtime_monitorenter", address, JVMCIRuntime::monitorenter);
704 int methods_offset = klass_data->_dependent_methods_offset;
705 if (methods_offset >= 0) {
706 address methods_cnt_adr = _dependencies + methods_offset;
707 int methods_cnt = *(int*)methods_cnt_adr;
708 int* indexes = (int*)(methods_cnt_adr + 4);
709 for (int i = 0; i < methods_cnt; ++i) {
710 int code_id = indexes[i];
711 AOTCompiledMethod* aot = _code_to_aot[code_id]._aot;
712 if (aot != NULL) {
713 aot->mark_for_deoptimization(false);
714 }
715 }
716 }
717 }
718
719 void AOTCodeHeap::sweep_dependent_methods(int* indexes, int methods_cnt) {
720 int marked = 0;
721 for (int i = 0; i < methods_cnt; ++i) {
722 int code_id = indexes[i];
723 // Invalidate aot code.
724 if (Atomic::cmpxchg(&_code_to_aot[code_id]._state, not_set, invalid) != not_set) {
725 if (_code_to_aot[code_id]._state == in_use) {
726 AOTCompiledMethod* aot = _code_to_aot[code_id]._aot;
727 assert(aot != NULL, "aot should be set");
728 if (!aot->is_runtime_stub()) { // Something is wrong - should not invalidate stubs.
729 aot->mark_for_deoptimization(false);
730 marked++;
731 }
732 }
733 }
734 }
735 if (marked > 0) {
736 Deoptimization::deoptimize_all_marked();
737 }
738 }
739
740 void AOTCodeHeap::sweep_dependent_methods(AOTKlassData* klass_data) {
741 // Make dependent methods non_entrant forever.
742 int methods_offset = klass_data->_dependent_methods_offset;
743 if (methods_offset >= 0) {
744 address methods_cnt_adr = _dependencies + methods_offset;
|