< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page

        

@@ -40,10 +40,13 @@
 #include "oops/method.hpp"
 #include "oops/objArrayOop.inline.hpp"
 #include "oops/oop.inline.hpp"
 #include "oops/fieldStreams.hpp"
 #include "oops/typeArrayOop.inline.hpp"
+#include "oops/valueArrayKlass.hpp"
+#include "oops/valueArrayOop.hpp"
+#include "oops/valueKlass.hpp"
 #include "oops/verifyOopClosure.hpp"
 #include "prims/jvmtiThreadState.hpp"
 #include "runtime/biasedLocking.hpp"
 #include "runtime/compilationPolicy.hpp"
 #include "runtime/deoptimization.hpp"

@@ -216,41 +219,58 @@
       // It is not guaranteed that we can get such information here only
       // by analyzing bytecode in deoptimized frames. This is why this flag
       // is set during method compilation (see Compile::Process_OopMap_Node()).
       // If the previous frame was popped or if we are dispatching an exception,
       // we don't have an oop result.
-      bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
-      Handle return_value;
+      ScopeDesc* scope = chunk->at(0)->scope();
+      bool save_oop_result = scope->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
+      // In case of the return of multiple values, we must take care
+      // of all oop return values.
+      GrowableArray<Handle> return_oops;
+      ValueKlass* vk = NULL;
+      if (save_oop_result && scope->return_vt()) {
+        vk = ValueKlass::returned_value_klass(map);
+        if (vk != NULL) {
+          vk->save_oop_fields(map, return_oops);
+          save_oop_result = false;
+        }
+      }
       if (save_oop_result) {
         // Reallocation may trigger GC. If deoptimization happened on return from
         // call which returns oop we need to save it since it is not in oopmap.
         oop result = deoptee.saved_oop_result(&map);
         assert(oopDesc::is_oop_or_null(result), "must be oop");
-        return_value = Handle(thread, result);
+        return_oops.push(Handle(thread, result));
         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
         if (TraceDeoptimization) {
           ttyLocker ttyl;
           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
         }
       }
-      if (objects != NULL) {
+      if (objects != NULL || vk != NULL) {
+        bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
         JRT_BLOCK
-          realloc_failures = realloc_objects(thread, &deoptee, objects, THREAD);
+          if (vk != NULL) {
+            realloc_failures = realloc_value_type_result(vk, map, return_oops, THREAD);
+          }
+          if (objects != NULL) {
+            realloc_failures = realloc_failures || realloc_objects(thread, &deoptee, objects, THREAD);
+            reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal, THREAD);
+          }
         JRT_END
-        bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
-        reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
 #ifndef PRODUCT
         if (TraceDeoptimization) {
           ttyLocker ttyl;
           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
           print_objects(objects, realloc_failures);
         }
 #endif
       }
-      if (save_oop_result) {
+      if (save_oop_result || vk != NULL) {
         // Restore result.
-        deoptee.set_saved_oop_result(&map, return_value());
+        assert(return_oops.length() == 1, "no value type");
+        deoptee.set_saved_oop_result(&map, return_oops.pop()());
       }
 #if !INCLUDE_JVMCI
     }
     if (EliminateLocks) {
 #endif // INCLUDE_JVMCI

@@ -483,11 +503,11 @@
     // non-parameter locals of the first unpacked interpreted frame.
     // Compute that adjustment.
     caller_adjustment = last_frame_adjust(callee_parameters, callee_locals);
   }
 
-  // If the sender is deoptimized the we must retrieve the address of the handler
+  // If the sender is deoptimized we must retrieve the address of the handler
   // since the frame will "magically" show the original pc before the deopt
   // and we'd undo the deopt.
 
   frame_pcs[0] = deopt_sender.raw_pc();
 

@@ -805,10 +825,14 @@
     oop obj = NULL;
 
     if (k->is_instance_klass()) {
       InstanceKlass* ik = InstanceKlass::cast(k);
       obj = ik->allocate_instance(THREAD);
+    } else if (k->is_valueArray_klass()) {
+      ValueArrayKlass* ak = ValueArrayKlass::cast(k);
+      // Value type array must be zeroed because not all memory is reassigned
+      obj = ak->allocate(sv->field_size(), THREAD);
     } else if (k->is_typeArray_klass()) {
       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
       int len = sv->field_size() / type2size[ak->element_type()];
       obj = ak->allocate(len, THREAD);

@@ -834,10 +858,25 @@
   }
 
   return failures;
 }
 
+// We're deoptimizing at the return of a call, value type fields are
+// in registers. When we go back to the interpreter, it will expect a
+// reference to a value type instance. Allocate and initialize it from
+// the register values here.
+bool Deoptimization::realloc_value_type_result(ValueKlass* vk, const RegisterMap& map, GrowableArray<Handle>& return_oops, TRAPS) {
+  oop new_vt = vk->realloc_result(map, return_oops, THREAD);
+  if (new_vt == NULL) {
+    CLEAR_PENDING_EXCEPTION;
+    THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), true);
+  }
+  return_oops.clear();
+  return_oops.push(Handle(THREAD, new_vt));
+  return false;
+}
+
 // restore elements of an eliminated type array
 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
   int index = 0;
   intptr_t val;
 

@@ -944,50 +983,71 @@
 
 class ReassignedField {
 public:
   int _offset;
   BasicType _type;
+  InstanceKlass* _klass;
 public:
   ReassignedField() {
     _offset = 0;
     _type = T_ILLEGAL;
+    _klass = NULL;
   }
 };
 
 int compare(ReassignedField* left, ReassignedField* right) {
   return left->_offset - right->_offset;
 }
 
 // Restore fields of an eliminated instance object using the same field order
 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
-static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
+static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal, int base_offset, TRAPS) {
   if (klass->superklass() != NULL) {
-    svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal);
+    svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal, 0, CHECK_0);
   }
 
   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
   for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
     if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
       ReassignedField field;
       field._offset = fs.offset();
       field._type = FieldType::basic_type(fs.signature());
+      if (field._type == T_VALUETYPE) {
+        field._type = T_OBJECT;
+      }
+      if (fs.is_flattened()) {
+        // Resolve klass of flattened value type field
+        Klass* vk = klass->get_value_field_klass(fs.index());
+        field._klass = ValueKlass::cast(vk);
+        field._type = T_VALUETYPE;
+      }
       fields->append(field);
     }
   }
   fields->sort(compare);
   for (int i = 0; i < fields->length(); i++) {
     intptr_t val;
     ScopeValue* scope_field = sv->field_at(svIndex);
     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
-    int offset = fields->at(i)._offset;
+    int offset = base_offset + fields->at(i)._offset;
     BasicType type = fields->at(i)._type;
     switch (type) {
-      case T_OBJECT: case T_ARRAY:
+      case T_OBJECT:
+      case T_ARRAY:
         assert(value->type() == T_OBJECT, "Agreement.");
         obj->obj_field_put(offset, value->get_obj()());
         break;
 
+      case T_VALUETYPE: {
+        // Recursively re-assign flattened value type fields
+        InstanceKlass* vk = fields->at(i)._klass;
+        assert(vk != NULL, "must be resolved");
+        offset -= ValueKlass::cast(vk)->first_field_offset(); // Adjust offset to omit oop header
+        svIndex = reassign_fields_by_klass(vk, fr, reg_map, sv, svIndex, obj, skip_internal, offset, CHECK_0);
+        continue; // Continue because we don't need to increment svIndex
+      }
+
       // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.
       case T_INT: case T_FLOAT: { // 4 bytes.
         assert(value->type() == T_INT, "Agreement.");
         bool big_value = false;
         if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) {

@@ -1064,12 +1124,26 @@
     svIndex++;
   }
   return svIndex;
 }
 
+// restore fields of an eliminated value type array
+void Deoptimization::reassign_value_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, valueArrayOop obj, ValueArrayKlass* vak, TRAPS) {
+  ValueKlass* vk = vak->element_klass();
+  assert(vk->flatten_array(), "should only be used for flattened value type arrays");
+  // Adjust offset to omit oop header
+  int base_offset = arrayOopDesc::base_offset_in_bytes(T_VALUETYPE) - ValueKlass::cast(vk)->first_field_offset();
+  // Initialize all elements of the flattened value type array
+  for (int i = 0; i < sv->field_size(); i++) {
+    ScopeValue* val = sv->field_at(i);
+    int offset = base_offset + (i << Klass::layout_helper_log2_element_size(vak->layout_helper()));
+    reassign_fields_by_klass(vk, fr, reg_map, val->as_ObjectValue(), 0, (oop)obj, false /* skip_internal */, offset, CHECK);
+  }
+}
+
 // restore fields of all eliminated objects and arrays
-void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
+void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal, TRAPS) {
   for (int i = 0; i < objects->length(); i++) {
     ObjectValue* sv = (ObjectValue*) objects->at(i);
     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
     Handle obj = sv->value();
     assert(obj.not_null() || realloc_failures, "reallocation was missed");

@@ -1080,11 +1154,14 @@
       continue;
     }
 
     if (k->is_instance_klass()) {
       InstanceKlass* ik = InstanceKlass::cast(k);
-      reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);
+      reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal, 0, CHECK);
+    } else if (k->is_valueArray_klass()) {
+      ValueArrayKlass* vak = ValueArrayKlass::cast(k);
+      reassign_value_array_elements(fr, reg_map, sv, (valueArrayOop) obj(), vak, CHECK);
     } else if (k->is_typeArray_klass()) {
       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
     } else if (k->is_objArray_klass()) {
       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
< prev index next >