< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page




  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/debugInfoRec.hpp"
  30 #include "code/nmethod.hpp"
  31 #include "code/pcDesc.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "interpreter/bytecode.hpp"
  34 #include "interpreter/interpreter.hpp"
  35 #include "interpreter/oopMapCache.hpp"
  36 #include "memory/allocation.inline.hpp"
  37 #include "memory/oopFactory.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "oops/constantPool.hpp"
  40 #include "oops/method.hpp"
  41 #include "oops/objArrayOop.inline.hpp"
  42 #include "oops/oop.inline.hpp"
  43 #include "oops/fieldStreams.hpp"
  44 #include "oops/typeArrayOop.inline.hpp"



  45 #include "oops/verifyOopClosure.hpp"
  46 #include "prims/jvmtiThreadState.hpp"
  47 #include "runtime/biasedLocking.hpp"
  48 #include "runtime/compilationPolicy.hpp"
  49 #include "runtime/deoptimization.hpp"
  50 #include "runtime/frame.inline.hpp"
  51 #include "runtime/handles.inline.hpp"
  52 #include "runtime/interfaceSupport.inline.hpp"
  53 #include "runtime/safepointVerifiers.hpp"
  54 #include "runtime/sharedRuntime.hpp"
  55 #include "runtime/signature.hpp"
  56 #include "runtime/stubRoutines.hpp"
  57 #include "runtime/thread.hpp"
  58 #include "runtime/threadSMR.hpp"
  59 #include "runtime/vframe.hpp"
  60 #include "runtime/vframeArray.hpp"
  61 #include "runtime/vframe_hp.hpp"
  62 #include "utilities/events.hpp"
  63 #include "utilities/preserveException.hpp"
  64 #include "utilities/xmlstream.hpp"


 201 
 202 #if COMPILER2_OR_JVMCI
 203   // Reallocate the non-escaping objects and restore their fields. Then
 204   // relock objects if synchronization on them was eliminated.
 205 #if !INCLUDE_JVMCI
 206   if (DoEscapeAnalysis || EliminateNestedLocks) {
 207     if (EliminateAllocations) {
 208 #endif // INCLUDE_JVMCI
 209       assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
 210       GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
 211 
 212       // The flag return_oop() indicates call sites which return oop
 213       // in compiled code. Such sites include java method calls,
 214       // runtime calls (for example, used to allocate new objects/arrays
 215       // on slow code path) and any other calls generated in compiled code.
 216       // It is not guaranteed that we can get such information here only
 217       // by analyzing bytecode in deoptimized frames. This is why this flag
 218       // is set during method compilation (see Compile::Process_OopMap_Node()).
 219       // If the previous frame was popped or if we are dispatching an exception,
 220       // we don't have an oop result.
 221       bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 222       Handle return_value;











 223       if (save_oop_result) {
 224         // Reallocation may trigger GC. If deoptimization happened on return from
 225         // call which returns oop we need to save it since it is not in oopmap.
 226         oop result = deoptee.saved_oop_result(&map);
 227         assert(oopDesc::is_oop_or_null(result), "must be oop");
 228         return_value = Handle(thread, result);
 229         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 230         if (TraceDeoptimization) {
 231           ttyLocker ttyl;
 232           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 233         }
 234       }
 235       if (objects != NULL) {

 236         JRT_BLOCK
 237           realloc_failures = realloc_objects(thread, &deoptee, objects, THREAD);






 238         JRT_END
 239         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 240         reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
 241 #ifndef PRODUCT
 242         if (TraceDeoptimization) {
 243           ttyLocker ttyl;
 244           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 245           print_objects(objects, realloc_failures);
 246         }
 247 #endif
 248       }
 249       if (save_oop_result) {
 250         // Restore result.
 251         deoptee.set_saved_oop_result(&map, return_value());

 252       }
 253 #if !INCLUDE_JVMCI
 254     }
 255     if (EliminateLocks) {
 256 #endif // INCLUDE_JVMCI
 257 #ifndef PRODUCT
 258       bool first = true;
 259 #endif
 260       for (int i = 0; i < chunk->length(); i++) {
 261         compiledVFrame* cvf = chunk->at(i);
 262         assert (cvf->scope() != NULL,"expect only compiled java frames");
 263         GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 264         if (monitors->is_nonempty()) {
 265           relock_objects(monitors, thread, realloc_failures);
 266 #ifndef PRODUCT
 267           if (PrintDeoptimizationDetails) {
 268             ttyLocker ttyl;
 269             for (int j = 0; j < monitors->length(); j++) {
 270               MonitorInfo* mi = monitors->at(j);
 271               if (mi->eliminated()) {


 468   // its caller's stack by. If the caller is a compiled frame then
 469   // we pretend that the callee has no parameters so that the
 470   // extension counts for the full amount of locals and not just
 471   // locals-parms. This is because without a c2i adapter the parm
 472   // area as created by the compiled frame will not be usable by
 473   // the interpreter. (Depending on the calling convention there
 474   // may not even be enough space).
 475 
 476   // QQQ I'd rather see this pushed down into last_frame_adjust
 477   // and have it take the sender (aka caller).
 478 
 479   if (deopt_sender.is_compiled_frame() || caller_was_method_handle) {
 480     caller_adjustment = last_frame_adjust(0, callee_locals);
 481   } else if (callee_locals > callee_parameters) {
 482     // The caller frame may need extending to accommodate
 483     // non-parameter locals of the first unpacked interpreted frame.
 484     // Compute that adjustment.
 485     caller_adjustment = last_frame_adjust(callee_parameters, callee_locals);
 486   }
 487 
 488   // If the sender is deoptimized the we must retrieve the address of the handler
 489   // since the frame will "magically" show the original pc before the deopt
 490   // and we'd undo the deopt.
 491 
 492   frame_pcs[0] = deopt_sender.raw_pc();
 493 
 494   assert(CodeCache::find_blob_unsafe(frame_pcs[0]) != NULL, "bad pc");
 495 
 496 #if INCLUDE_JVMCI
 497   if (exceptionObject() != NULL) {
 498     thread->set_exception_oop(exceptionObject());
 499     exec_mode = Unpack_exception;
 500   }
 501 #endif
 502 
 503   if (thread->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
 504     assert(thread->has_pending_exception(), "should have thrown OOME");
 505     thread->set_exception_oop(thread->pending_exception());
 506     thread->clear_pending_exception();
 507     exec_mode = Unpack_exception;
 508   }


 790 
 791 #if COMPILER2_OR_JVMCI
 792 bool Deoptimization::realloc_objects(JavaThread* thread, frame* fr, GrowableArray<ScopeValue*>* objects, TRAPS) {
 793   Handle pending_exception(THREAD, thread->pending_exception());
 794   const char* exception_file = thread->exception_file();
 795   int exception_line = thread->exception_line();
 796   thread->clear_pending_exception();
 797 
 798   bool failures = false;
 799 
 800   for (int i = 0; i < objects->length(); i++) {
 801     assert(objects->at(i)->is_object(), "invalid debug information");
 802     ObjectValue* sv = (ObjectValue*) objects->at(i);
 803 
 804     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 805     oop obj = NULL;
 806 
 807     if (k->is_instance_klass()) {
 808       InstanceKlass* ik = InstanceKlass::cast(k);
 809       obj = ik->allocate_instance(THREAD);




 810     } else if (k->is_typeArray_klass()) {
 811       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
 812       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
 813       int len = sv->field_size() / type2size[ak->element_type()];
 814       obj = ak->allocate(len, THREAD);
 815     } else if (k->is_objArray_klass()) {
 816       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
 817       obj = ak->allocate(sv->field_size(), THREAD);
 818     }
 819 
 820     if (obj == NULL) {
 821       failures = true;
 822     }
 823 
 824     assert(sv->value().is_null(), "redundant reallocation");
 825     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
 826     CLEAR_PENDING_EXCEPTION;
 827     sv->set_value(obj);
 828   }
 829 
 830   if (failures) {
 831     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
 832   } else if (pending_exception.not_null()) {
 833     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
 834   }
 835 
 836   return failures;
 837 }
 838 















 839 // restore elements of an eliminated type array
 840 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
 841   int index = 0;
 842   intptr_t val;
 843 
 844   for (int i = 0; i < sv->field_size(); i++) {
 845     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
 846     switch(type) {
 847     case T_LONG: case T_DOUBLE: {
 848       assert(value->type() == T_INT, "Agreement.");
 849       StackValue* low =
 850         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
 851 #ifdef _LP64
 852       jlong res = (jlong)low->get_int();
 853 #else
 854 #ifdef SPARC
 855       // For SPARC we have to swap high and low words.
 856       jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int());
 857 #else
 858       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());


 929         ShouldNotReachHere();
 930     }
 931     index++;
 932   }
 933 }
 934 
 935 
 936 // restore fields of an eliminated object array
 937 void Deoptimization::reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj) {
 938   for (int i = 0; i < sv->field_size(); i++) {
 939     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
 940     assert(value->type() == T_OBJECT, "object element expected");
 941     obj->obj_at_put(i, value->get_obj()());
 942   }
 943 }
 944 
 945 class ReassignedField {
 946 public:
 947   int _offset;
 948   BasicType _type;

 949 public:
 950   ReassignedField() {
 951     _offset = 0;
 952     _type = T_ILLEGAL;

 953   }
 954 };
 955 
 956 int compare(ReassignedField* left, ReassignedField* right) {
 957   return left->_offset - right->_offset;
 958 }
 959 
 960 // Restore fields of an eliminated instance object using the same field order
 961 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
 962 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
 963   if (klass->superklass() != NULL) {
 964     svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal);
 965   }
 966 
 967   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
 968   for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
 969     if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
 970       ReassignedField field;
 971       field._offset = fs.offset();
 972       field._type = FieldType::basic_type(fs.signature());









 973       fields->append(field);
 974     }
 975   }
 976   fields->sort(compare);
 977   for (int i = 0; i < fields->length(); i++) {
 978     intptr_t val;
 979     ScopeValue* scope_field = sv->field_at(svIndex);
 980     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
 981     int offset = fields->at(i)._offset;
 982     BasicType type = fields->at(i)._type;
 983     switch (type) {
 984       case T_OBJECT: case T_ARRAY:

 985         assert(value->type() == T_OBJECT, "Agreement.");
 986         obj->obj_field_put(offset, value->get_obj()());
 987         break;
 988 









 989       // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.
 990       case T_INT: case T_FLOAT: { // 4 bytes.
 991         assert(value->type() == T_INT, "Agreement.");
 992         bool big_value = false;
 993         if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) {
 994           if (scope_field->is_location()) {
 995             Location::Type type = ((LocationValue*) scope_field)->location().type();
 996             if (type == Location::dbl || type == Location::lng) {
 997               big_value = true;
 998             }
 999           }
1000           if (scope_field->is_constant_int()) {
1001             ScopeValue* next_scope_field = sv->field_at(svIndex + 1);
1002             if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) {
1003               big_value = true;
1004             }
1005           }
1006         }
1007 
1008         if (big_value) {


1049       case T_BYTE:
1050         assert(value->type() == T_INT, "Agreement.");
1051         val = value->get_int();
1052         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1053         break;
1054 
1055       case T_BOOLEAN:
1056         assert(value->type() == T_INT, "Agreement.");
1057         val = value->get_int();
1058         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1059         break;
1060 
1061       default:
1062         ShouldNotReachHere();
1063     }
1064     svIndex++;
1065   }
1066   return svIndex;
1067 }
1068 














1069 // restore fields of all eliminated objects and arrays
1070 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1071   for (int i = 0; i < objects->length(); i++) {
1072     ObjectValue* sv = (ObjectValue*) objects->at(i);
1073     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1074     Handle obj = sv->value();
1075     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1076     if (PrintDeoptimizationDetails) {
1077       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1078     }
1079     if (obj.is_null()) {
1080       continue;
1081     }
1082 
1083     if (k->is_instance_klass()) {
1084       InstanceKlass* ik = InstanceKlass::cast(k);
1085       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);



1086     } else if (k->is_typeArray_klass()) {
1087       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1088       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1089     } else if (k->is_objArray_klass()) {
1090       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1091     }
1092   }
1093 }
1094 
1095 
1096 // relock objects for which synchronization was eliminated
1097 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1098   for (int i = 0; i < monitors->length(); i++) {
1099     MonitorInfo* mon_info = monitors->at(i);
1100     if (mon_info->eliminated()) {
1101       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1102       if (!mon_info->owner_is_scalar_replaced()) {
1103         Handle obj(thread, mon_info->owner());
1104         markOop mark = obj->mark();
1105         if (UseBiasedLocking && mark->has_bias_pattern()) {




  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/debugInfoRec.hpp"
  30 #include "code/nmethod.hpp"
  31 #include "code/pcDesc.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "interpreter/bytecode.hpp"
  34 #include "interpreter/interpreter.hpp"
  35 #include "interpreter/oopMapCache.hpp"
  36 #include "memory/allocation.inline.hpp"
  37 #include "memory/oopFactory.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "oops/constantPool.hpp"
  40 #include "oops/method.hpp"
  41 #include "oops/objArrayOop.inline.hpp"
  42 #include "oops/oop.inline.hpp"
  43 #include "oops/fieldStreams.hpp"
  44 #include "oops/typeArrayOop.inline.hpp"
  45 #include "oops/valueArrayKlass.hpp"
  46 #include "oops/valueArrayOop.hpp"
  47 #include "oops/valueKlass.hpp"
  48 #include "oops/verifyOopClosure.hpp"
  49 #include "prims/jvmtiThreadState.hpp"
  50 #include "runtime/biasedLocking.hpp"
  51 #include "runtime/compilationPolicy.hpp"
  52 #include "runtime/deoptimization.hpp"
  53 #include "runtime/frame.inline.hpp"
  54 #include "runtime/handles.inline.hpp"
  55 #include "runtime/interfaceSupport.inline.hpp"
  56 #include "runtime/safepointVerifiers.hpp"
  57 #include "runtime/sharedRuntime.hpp"
  58 #include "runtime/signature.hpp"
  59 #include "runtime/stubRoutines.hpp"
  60 #include "runtime/thread.hpp"
  61 #include "runtime/threadSMR.hpp"
  62 #include "runtime/vframe.hpp"
  63 #include "runtime/vframeArray.hpp"
  64 #include "runtime/vframe_hp.hpp"
  65 #include "utilities/events.hpp"
  66 #include "utilities/preserveException.hpp"
  67 #include "utilities/xmlstream.hpp"


 204 
 205 #if COMPILER2_OR_JVMCI
 206   // Reallocate the non-escaping objects and restore their fields. Then
 207   // relock objects if synchronization on them was eliminated.
 208 #if !INCLUDE_JVMCI
 209   if (DoEscapeAnalysis || EliminateNestedLocks) {
 210     if (EliminateAllocations) {
 211 #endif // INCLUDE_JVMCI
 212       assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
 213       GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
 214 
 215       // The flag return_oop() indicates call sites which return oop
 216       // in compiled code. Such sites include java method calls,
 217       // runtime calls (for example, used to allocate new objects/arrays
 218       // on slow code path) and any other calls generated in compiled code.
 219       // It is not guaranteed that we can get such information here only
 220       // by analyzing bytecode in deoptimized frames. This is why this flag
 221       // is set during method compilation (see Compile::Process_OopMap_Node()).
 222       // If the previous frame was popped or if we are dispatching an exception,
 223       // we don't have an oop result.
 224       ScopeDesc* scope = chunk->at(0)->scope();
 225       bool save_oop_result = scope->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 226       // In case of the return of multiple values, we must take care
 227       // of all oop return values.
 228       GrowableArray<Handle> return_oops;
 229       ValueKlass* vk = NULL;
 230       if (save_oop_result && scope->return_vt()) {
 231         vk = ValueKlass::returned_value_klass(map);
 232         if (vk != NULL) {
 233           vk->save_oop_fields(map, return_oops);
 234           save_oop_result = false;
 235         }
 236       }
 237       if (save_oop_result) {
 238         // Reallocation may trigger GC. If deoptimization happened on return from
 239         // call which returns oop we need to save it since it is not in oopmap.
 240         oop result = deoptee.saved_oop_result(&map);
 241         assert(oopDesc::is_oop_or_null(result), "must be oop");
 242         return_oops.push(Handle(thread, result));
 243         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 244         if (TraceDeoptimization) {
 245           ttyLocker ttyl;
 246           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 247         }
 248       }
 249       if (objects != NULL || vk != NULL) {
 250         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 251         JRT_BLOCK
 252           if (vk != NULL) {
 253             realloc_failures = realloc_value_type_result(vk, map, return_oops, THREAD);
 254           }
 255           if (objects != NULL) {
 256             realloc_failures = realloc_failures || realloc_objects(thread, &deoptee, objects, THREAD);
 257             reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal, THREAD);
 258           }
 259         JRT_END


 260 #ifndef PRODUCT
 261         if (TraceDeoptimization) {
 262           ttyLocker ttyl;
 263           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 264           print_objects(objects, realloc_failures);
 265         }
 266 #endif
 267       }
 268       if (save_oop_result || vk != NULL) {
 269         // Restore result.
 270         assert(return_oops.length() == 1, "no value type");
 271         deoptee.set_saved_oop_result(&map, return_oops.pop()());
 272       }
 273 #if !INCLUDE_JVMCI
 274     }
 275     if (EliminateLocks) {
 276 #endif // INCLUDE_JVMCI
 277 #ifndef PRODUCT
 278       bool first = true;
 279 #endif
 280       for (int i = 0; i < chunk->length(); i++) {
 281         compiledVFrame* cvf = chunk->at(i);
 282         assert (cvf->scope() != NULL,"expect only compiled java frames");
 283         GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 284         if (monitors->is_nonempty()) {
 285           relock_objects(monitors, thread, realloc_failures);
 286 #ifndef PRODUCT
 287           if (PrintDeoptimizationDetails) {
 288             ttyLocker ttyl;
 289             for (int j = 0; j < monitors->length(); j++) {
 290               MonitorInfo* mi = monitors->at(j);
 291               if (mi->eliminated()) {


 488   // its caller's stack by. If the caller is a compiled frame then
 489   // we pretend that the callee has no parameters so that the
 490   // extension counts for the full amount of locals and not just
 491   // locals-parms. This is because without a c2i adapter the parm
 492   // area as created by the compiled frame will not be usable by
 493   // the interpreter. (Depending on the calling convention there
 494   // may not even be enough space).
 495 
 496   // QQQ I'd rather see this pushed down into last_frame_adjust
 497   // and have it take the sender (aka caller).
 498 
 499   if (deopt_sender.is_compiled_frame() || caller_was_method_handle) {
 500     caller_adjustment = last_frame_adjust(0, callee_locals);
 501   } else if (callee_locals > callee_parameters) {
 502     // The caller frame may need extending to accommodate
 503     // non-parameter locals of the first unpacked interpreted frame.
 504     // Compute that adjustment.
 505     caller_adjustment = last_frame_adjust(callee_parameters, callee_locals);
 506   }
 507 
 508   // If the sender is deoptimized we must retrieve the address of the handler
 509   // since the frame will "magically" show the original pc before the deopt
 510   // and we'd undo the deopt.
 511 
 512   frame_pcs[0] = deopt_sender.raw_pc();
 513 
 514   assert(CodeCache::find_blob_unsafe(frame_pcs[0]) != NULL, "bad pc");
 515 
 516 #if INCLUDE_JVMCI
 517   if (exceptionObject() != NULL) {
 518     thread->set_exception_oop(exceptionObject());
 519     exec_mode = Unpack_exception;
 520   }
 521 #endif
 522 
 523   if (thread->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
 524     assert(thread->has_pending_exception(), "should have thrown OOME");
 525     thread->set_exception_oop(thread->pending_exception());
 526     thread->clear_pending_exception();
 527     exec_mode = Unpack_exception;
 528   }


 810 
 811 #if COMPILER2_OR_JVMCI
 812 bool Deoptimization::realloc_objects(JavaThread* thread, frame* fr, GrowableArray<ScopeValue*>* objects, TRAPS) {
 813   Handle pending_exception(THREAD, thread->pending_exception());
 814   const char* exception_file = thread->exception_file();
 815   int exception_line = thread->exception_line();
 816   thread->clear_pending_exception();
 817 
 818   bool failures = false;
 819 
 820   for (int i = 0; i < objects->length(); i++) {
 821     assert(objects->at(i)->is_object(), "invalid debug information");
 822     ObjectValue* sv = (ObjectValue*) objects->at(i);
 823 
 824     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 825     oop obj = NULL;
 826 
 827     if (k->is_instance_klass()) {
 828       InstanceKlass* ik = InstanceKlass::cast(k);
 829       obj = ik->allocate_instance(THREAD);
 830     } else if (k->is_valueArray_klass()) {
 831       ValueArrayKlass* ak = ValueArrayKlass::cast(k);
 832       // Value type array must be zeroed because not all memory is reassigned
 833       obj = ak->allocate(sv->field_size(), THREAD);
 834     } else if (k->is_typeArray_klass()) {
 835       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
 836       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
 837       int len = sv->field_size() / type2size[ak->element_type()];
 838       obj = ak->allocate(len, THREAD);
 839     } else if (k->is_objArray_klass()) {
 840       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
 841       obj = ak->allocate(sv->field_size(), THREAD);
 842     }
 843 
 844     if (obj == NULL) {
 845       failures = true;
 846     }
 847 
 848     assert(sv->value().is_null(), "redundant reallocation");
 849     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
 850     CLEAR_PENDING_EXCEPTION;
 851     sv->set_value(obj);
 852   }
 853 
 854   if (failures) {
 855     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
 856   } else if (pending_exception.not_null()) {
 857     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
 858   }
 859 
 860   return failures;
 861 }
 862 
 863 // We're deoptimizing at the return of a call, value type fields are
 864 // in registers. When we go back to the interpreter, it will expect a
 865 // reference to a value type instance. Allocate and initialize it from
 866 // the register values here.
 867 bool Deoptimization::realloc_value_type_result(ValueKlass* vk, const RegisterMap& map, GrowableArray<Handle>& return_oops, TRAPS) {
 868   oop new_vt = vk->realloc_result(map, return_oops, THREAD);
 869   if (new_vt == NULL) {
 870     CLEAR_PENDING_EXCEPTION;
 871     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), true);
 872   }
 873   return_oops.clear();
 874   return_oops.push(Handle(THREAD, new_vt));
 875   return false;
 876 }
 877 
 878 // restore elements of an eliminated type array
 879 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
 880   int index = 0;
 881   intptr_t val;
 882 
 883   for (int i = 0; i < sv->field_size(); i++) {
 884     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
 885     switch(type) {
 886     case T_LONG: case T_DOUBLE: {
 887       assert(value->type() == T_INT, "Agreement.");
 888       StackValue* low =
 889         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
 890 #ifdef _LP64
 891       jlong res = (jlong)low->get_int();
 892 #else
 893 #ifdef SPARC
 894       // For SPARC we have to swap high and low words.
 895       jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int());
 896 #else
 897       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());


 968         ShouldNotReachHere();
 969     }
 970     index++;
 971   }
 972 }
 973 
 974 
 975 // restore fields of an eliminated object array
 976 void Deoptimization::reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj) {
 977   for (int i = 0; i < sv->field_size(); i++) {
 978     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
 979     assert(value->type() == T_OBJECT, "object element expected");
 980     obj->obj_at_put(i, value->get_obj()());
 981   }
 982 }
 983 
 984 class ReassignedField {
 985 public:
 986   int _offset;
 987   BasicType _type;
 988   InstanceKlass* _klass;
 989 public:
 990   ReassignedField() {
 991     _offset = 0;
 992     _type = T_ILLEGAL;
 993     _klass = NULL;
 994   }
 995 };
 996 
 997 int compare(ReassignedField* left, ReassignedField* right) {
 998   return left->_offset - right->_offset;
 999 }
1000 
1001 // Restore fields of an eliminated instance object using the same field order
1002 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
1003 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal, int base_offset, TRAPS) {
1004   if (klass->superklass() != NULL) {
1005     svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal, 0, CHECK_0);
1006   }
1007 
1008   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
1009   for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
1010     if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
1011       ReassignedField field;
1012       field._offset = fs.offset();
1013       field._type = FieldType::basic_type(fs.signature());
1014       if (field._type == T_VALUETYPE) {
1015         field._type = T_OBJECT;
1016       }
1017       if (fs.is_flattened()) {
1018         // Resolve klass of flattened value type field
1019         Klass* vk = klass->get_value_field_klass(fs.index());
1020         field._klass = ValueKlass::cast(vk);
1021         field._type = T_VALUETYPE;
1022       }
1023       fields->append(field);
1024     }
1025   }
1026   fields->sort(compare);
1027   for (int i = 0; i < fields->length(); i++) {
1028     intptr_t val;
1029     ScopeValue* scope_field = sv->field_at(svIndex);
1030     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
1031     int offset = base_offset + fields->at(i)._offset;
1032     BasicType type = fields->at(i)._type;
1033     switch (type) {
1034       case T_OBJECT:
1035       case T_ARRAY:
1036         assert(value->type() == T_OBJECT, "Agreement.");
1037         obj->obj_field_put(offset, value->get_obj()());
1038         break;
1039 
1040       case T_VALUETYPE: {
1041         // Recursively re-assign flattened value type fields
1042         InstanceKlass* vk = fields->at(i)._klass;
1043         assert(vk != NULL, "must be resolved");
1044         offset -= ValueKlass::cast(vk)->first_field_offset(); // Adjust offset to omit oop header
1045         svIndex = reassign_fields_by_klass(vk, fr, reg_map, sv, svIndex, obj, skip_internal, offset, CHECK_0);
1046         continue; // Continue because we don't need to increment svIndex
1047       }
1048 
1049       // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.
1050       case T_INT: case T_FLOAT: { // 4 bytes.
1051         assert(value->type() == T_INT, "Agreement.");
1052         bool big_value = false;
1053         if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) {
1054           if (scope_field->is_location()) {
1055             Location::Type type = ((LocationValue*) scope_field)->location().type();
1056             if (type == Location::dbl || type == Location::lng) {
1057               big_value = true;
1058             }
1059           }
1060           if (scope_field->is_constant_int()) {
1061             ScopeValue* next_scope_field = sv->field_at(svIndex + 1);
1062             if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) {
1063               big_value = true;
1064             }
1065           }
1066         }
1067 
1068         if (big_value) {


1109       case T_BYTE:
1110         assert(value->type() == T_INT, "Agreement.");
1111         val = value->get_int();
1112         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1113         break;
1114 
1115       case T_BOOLEAN:
1116         assert(value->type() == T_INT, "Agreement.");
1117         val = value->get_int();
1118         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1119         break;
1120 
1121       default:
1122         ShouldNotReachHere();
1123     }
1124     svIndex++;
1125   }
1126   return svIndex;
1127 }
1128 
1129 // restore fields of an eliminated value type array
1130 void Deoptimization::reassign_value_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, valueArrayOop obj, ValueArrayKlass* vak, TRAPS) {
1131   ValueKlass* vk = vak->element_klass();
1132   assert(vk->flatten_array(), "should only be used for flattened value type arrays");
1133   // Adjust offset to omit oop header
1134   int base_offset = arrayOopDesc::base_offset_in_bytes(T_VALUETYPE) - ValueKlass::cast(vk)->first_field_offset();
1135   // Initialize all elements of the flattened value type array
1136   for (int i = 0; i < sv->field_size(); i++) {
1137     ScopeValue* val = sv->field_at(i);
1138     int offset = base_offset + (i << Klass::layout_helper_log2_element_size(vak->layout_helper()));
1139     reassign_fields_by_klass(vk, fr, reg_map, val->as_ObjectValue(), 0, (oop)obj, false /* skip_internal */, offset, CHECK);
1140   }
1141 }
1142 
1143 // restore fields of all eliminated objects and arrays
1144 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal, TRAPS) {
1145   for (int i = 0; i < objects->length(); i++) {
1146     ObjectValue* sv = (ObjectValue*) objects->at(i);
1147     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1148     Handle obj = sv->value();
1149     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1150     if (PrintDeoptimizationDetails) {
1151       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1152     }
1153     if (obj.is_null()) {
1154       continue;
1155     }
1156 
1157     if (k->is_instance_klass()) {
1158       InstanceKlass* ik = InstanceKlass::cast(k);
1159       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal, 0, CHECK);
1160     } else if (k->is_valueArray_klass()) {
1161       ValueArrayKlass* vak = ValueArrayKlass::cast(k);
1162       reassign_value_array_elements(fr, reg_map, sv, (valueArrayOop) obj(), vak, CHECK);
1163     } else if (k->is_typeArray_klass()) {
1164       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1165       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1166     } else if (k->is_objArray_klass()) {
1167       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1168     }
1169   }
1170 }
1171 
1172 
1173 // relock objects for which synchronization was eliminated
1174 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1175   for (int i = 0; i < monitors->length(); i++) {
1176     MonitorInfo* mon_info = monitors->at(i);
1177     if (mon_info->eliminated()) {
1178       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1179       if (!mon_info->owner_is_scalar_replaced()) {
1180         Handle obj(thread, mon_info->owner());
1181         markOop mark = obj->mark();
1182         if (UseBiasedLocking && mark->has_bias_pattern()) {


< prev index next >