< prev index next >

src/hotspot/share/interpreter/interpreterRuntime.cpp

Print this page




 284 
 285   // Creating new value by copying the one passed in argument
 286   bool in_heap;
 287   instanceOop new_value = vklass->allocate_buffered_or_heap_instance(&in_heap,
 288       CHECK_((type2size[field_type]) * AbstractInterpreter::stackElementSize));
 289   Handle new_value_h = Handle(THREAD, new_value);
 290   int first_offset = vklass->first_field_offset();
 291   vklass->value_store(vklass->data_for_oop(old_value_h()),
 292       vklass->data_for_oop(new_value_h()), in_heap, false);
 293 
 294   // Updating the field specified in arguments
 295   if (field_type == T_OBJECT || field_type == T_ARRAY) {
 296     oop aoop = *(oop*)f.interpreter_frame_expression_stack_at(tos_idx);
 297     assert(aoop == NULL || (oopDesc::is_oop(aoop) && (!aoop->is_value())),"argument must be a reference type");
 298     if (in_heap) {
 299       new_value_h()->obj_field_put(field_offset, aoop);
 300     } else {
 301       new_value_h()->obj_field_put_raw(field_offset, aoop);
 302     }
 303   } else if (field_type == T_VALUETYPE) {
 304     if (cp_entry->is_flatten()) {
 305       Klass* field_k = vklass->get_value_field_klass(field_index);
 306       ValueKlass* field_vk = ValueKlass::cast(field_k);
 307       oop vt_oop = *(oop*)f.interpreter_frame_expression_stack_at(tos_idx);
 308       assert(vt_oop != NULL && oopDesc::is_oop(vt_oop) && vt_oop->is_value(),"argument must be a value type");
 309       assert(field_vk == vt_oop->klass(), "Must match");
 310       field_vk->value_store(field_vk->data_for_oop(vt_oop),
 311           ((char*)(oopDesc*)new_value_h()) + field_offset, in_heap, false);
 312     } else {
 313       oop voop = *(oop*)f.interpreter_frame_expression_stack_at(tos_idx);
 314       assert(voop != NULL || (oopDesc::is_oop(voop) && (voop->is_value())),"argument must be a value type");
 315       if (VTBuffer::is_in_vt_buffer(voop)) {
 316         // new value field is currently allocated in a TLVB, a heap allocated
 317         // copy must be created because a field must never point to a TLVB allocated value
 318         Handle voop_h = Handle(THREAD, voop);
 319         ValueKlass* field_vk = ValueKlass::cast(voop->klass());
 320         assert(field_vk == vklass->get_value_field_klass(field_index), "Sanity check");
 321         instanceOop field_copy = field_vk->allocate_instance(CHECK_((type2size[field_type]) * AbstractInterpreter::stackElementSize));
 322         Handle field_copy_h = Handle(THREAD, field_copy);
 323         field_vk->value_store(field_vk->data_for_oop(voop_h()), field_vk->data_for_oop(field_copy_h()), true, false);
 324         if (in_heap) {


 337   } else {
 338     intptr_t* addr = f.interpreter_frame_expression_stack_at(tos_idx);
 339     copy_primitive_argument(addr, new_value_h, field_offset, field_type);
 340   }
 341 
 342   // returning result
 343   thread->set_vm_result(new_value_h());
 344   return (type2size[field_type] + type2size[T_OBJECT]) * AbstractInterpreter::stackElementSize;
 345 IRT_END
 346 
 347 IRT_ENTRY(void, InterpreterRuntime::qgetfield(JavaThread* thread, oopDesc* obj, int index, Klass* field_holder))
 348   Handle value_h(THREAD, obj);
 349   InstanceKlass* klass = InstanceKlass::cast(field_holder);
 350 
 351   Klass* field_k = klass->get_value_field_klass(index);
 352   ValueKlass* field_vklass = ValueKlass::cast(field_k);
 353   field_vklass->initialize(THREAD);
 354 
 355   instanceOop res;
 356   bool in_heap;
 357   if (klass->is_field_flatten(index)) {
 358     // allocate instance
 359     res = field_vklass->allocate_buffered_or_heap_instance(&in_heap, CHECK);
 360     instanceHandle res_h(THREAD, res);
 361     // copy value
 362     field_vklass->value_store(((char*)(oopDesc*)value_h()) + klass->field_offset(index),
 363                               field_vklass->data_for_oop(res), in_heap, false);
 364     thread->set_vm_result(res_h());
 365   } else {
 366     oop res = value_h()->obj_field_acquire(klass->field_offset(index));
 367     if (res == NULL) {
 368       res = field_vklass->default_value();
 369     } else {
 370       assert(res->klass() == field_k, "Sanity check");
 371       assert(!VTBuffer::is_in_vt_buffer(res), "Sanity check");
 372     }
 373     thread->set_vm_result(res);
 374   }
 375   assert(thread->vm_result()->klass() == field_vklass, "sanity check");
 376 IRT_END
 377 
 378 IRT_ENTRY(void, InterpreterRuntime::uninitialized_static_value_field(JavaThread* thread, oopDesc* mirror, int index))
 379   instanceHandle mirror_h(THREAD, (instanceOop)mirror);
 380   InstanceKlass* klass = InstanceKlass::cast(java_lang_Class::as_Klass(mirror));
 381   int offset = klass->field_offset(index);
 382   assert(mirror->obj_field(offset) == NULL,"Field must not be initialized twice");





 383 




 384   Klass* field_k = klass->get_value_field_klass_or_null(index);
 385   if (field_k == NULL) {
 386     field_k = SystemDictionary::resolve_or_fail(klass->field_signature(index),
 387                                                 Handle(THREAD, klass->class_loader()),
 388                                                 Handle(THREAD, klass->protection_domain()), true, CHECK);
 389     assert(field_k != NULL, "Sanity check");
 390     assert(field_k->access_flags().is_value_type(), "Value type expected");
 391     klass->set_value_field_klass(index, field_k);
 392   }
 393   ValueKlass* field_vklass = ValueKlass::cast(field_k);
 394   instanceOop res = (instanceOop)field_vklass->default_value();
 395   thread->set_vm_result(res);
 396 IRT_END
 397 
 398 IRT_ENTRY(void, InterpreterRuntime::qputfield(JavaThread* thread, oopDesc* obj, oopDesc* value, ConstantPoolCache* cp_cache))
 399   LastFrameAccessor last_frame(thread);
 400   Handle value_h(THREAD, value);
 401   Handle obj_h(THREAD, obj);
 402   assert(!obj_h()->klass()->is_value(), "obj must be an object");
 403   assert(value_h()->klass()->is_value(), "value must be an value type");
 404 
 405   int idx = ConstantPool::decode_cpcache_index(last_frame.get_index_u2_cpcache(Bytecodes::_putfield));
 406   ConstantPoolCacheEntry* cp_entry = cp_cache->entry_at(idx);
 407 
 408   int index = cp_entry->field_index();
 409   bool flatten = cp_entry->is_flatten();
 410 
 411   InstanceKlass* klass = InstanceKlass::cast(cp_entry->f1_as_klass());
 412   Klass* field_k = klass->get_value_field_klass(index);
 413   ValueKlass* field_vklass = ValueKlass::cast(value->klass());
 414   assert(value_h()->klass() == field_k, "Sanity check");
 415   assert(field_k == field_vklass, "Field descriptor and argument must match");
 416   if (flatten) {
 417     // copy value
 418     field_vklass->value_store(field_vklass->data_for_oop(value_h()),
 419                               ((char*)(oopDesc*)obj_h()) + klass->field_offset(index), true, false);
 420   } else {
 421     if (!VTBuffer::is_in_vt_buffer(value_h())) {
 422       obj_h()->obj_field_put(klass->field_offset(index), value_h());
 423     } else {
 424       // allocate heap instance
 425       instanceOop val = field_vklass->allocate_instance(CHECK);
 426       instanceHandle res_h(THREAD, val);
 427       // copy value
 428       field_vklass->value_store(field_vklass->data_for_oop(value_h()),
 429                                 field_vklass->data_for_oop(res_h()), true, false);


1034       get_code = Bytecodes::_getstatic;
1035     } else {
1036       get_code = Bytecodes::_getfield;
1037     }
1038     if (is_put && is_value) {
1039         put_code = ((is_static) ? Bytecodes::_putstatic : Bytecodes::_withfield);
1040     } else if ((is_put && !has_initialized_final_update) || !info.access_flags().is_final()) {
1041         put_code = ((is_static) ? Bytecodes::_putstatic : Bytecodes::_putfield);
1042     }
1043   }
1044 
1045   cp_cache_entry->set_field(
1046     get_code,
1047     put_code,
1048     info.field_holder(),
1049     info.index(),
1050     info.offset(),
1051     state,
1052     info.access_flags().is_final(),
1053     info.access_flags().is_volatile(),
1054     info.is_flatten(),
1055     info.is_flattenable(),
1056     pool->pool_holder()
1057   );
1058 }
1059 
1060 
1061 //------------------------------------------------------------------------------------------------------------------------
1062 // Synchronization
1063 //
1064 // The interpreter's synchronization code is factored out so that it can
1065 // be shared by method invocation and synchronized blocks.
1066 //%note synchronization_3
1067 
1068 //%note monitor_1
1069 IRT_ENTRY_NO_ASYNC(void, InterpreterRuntime::monitorenter(JavaThread* thread, BasicObjectLock* elem))
1070 #ifdef ASSERT
1071   thread->last_frame().interpreter_frame_verify_monitor(elem);
1072 #endif
1073   if (PrintBiasedLockingStatistics) {
1074     Atomic::inc(BiasedLocking::slow_path_entry_count_addr());




 284 
 285   // Creating new value by copying the one passed in argument
 286   bool in_heap;
 287   instanceOop new_value = vklass->allocate_buffered_or_heap_instance(&in_heap,
 288       CHECK_((type2size[field_type]) * AbstractInterpreter::stackElementSize));
 289   Handle new_value_h = Handle(THREAD, new_value);
 290   int first_offset = vklass->first_field_offset();
 291   vklass->value_store(vklass->data_for_oop(old_value_h()),
 292       vklass->data_for_oop(new_value_h()), in_heap, false);
 293 
 294   // Updating the field specified in arguments
 295   if (field_type == T_OBJECT || field_type == T_ARRAY) {
 296     oop aoop = *(oop*)f.interpreter_frame_expression_stack_at(tos_idx);
 297     assert(aoop == NULL || (oopDesc::is_oop(aoop) && (!aoop->is_value())),"argument must be a reference type");
 298     if (in_heap) {
 299       new_value_h()->obj_field_put(field_offset, aoop);
 300     } else {
 301       new_value_h()->obj_field_put_raw(field_offset, aoop);
 302     }
 303   } else if (field_type == T_VALUETYPE) {
 304     if (cp_entry->is_flattened()) {
 305       Klass* field_k = vklass->get_value_field_klass(field_index);
 306       ValueKlass* field_vk = ValueKlass::cast(field_k);
 307       oop vt_oop = *(oop*)f.interpreter_frame_expression_stack_at(tos_idx);
 308       assert(vt_oop != NULL && oopDesc::is_oop(vt_oop) && vt_oop->is_value(),"argument must be a value type");
 309       assert(field_vk == vt_oop->klass(), "Must match");
 310       field_vk->value_store(field_vk->data_for_oop(vt_oop),
 311           ((char*)(oopDesc*)new_value_h()) + field_offset, in_heap, false);
 312     } else {
 313       oop voop = *(oop*)f.interpreter_frame_expression_stack_at(tos_idx);
 314       assert(voop != NULL || (oopDesc::is_oop(voop) && (voop->is_value())),"argument must be a value type");
 315       if (VTBuffer::is_in_vt_buffer(voop)) {
 316         // new value field is currently allocated in a TLVB, a heap allocated
 317         // copy must be created because a field must never point to a TLVB allocated value
 318         Handle voop_h = Handle(THREAD, voop);
 319         ValueKlass* field_vk = ValueKlass::cast(voop->klass());
 320         assert(field_vk == vklass->get_value_field_klass(field_index), "Sanity check");
 321         instanceOop field_copy = field_vk->allocate_instance(CHECK_((type2size[field_type]) * AbstractInterpreter::stackElementSize));
 322         Handle field_copy_h = Handle(THREAD, field_copy);
 323         field_vk->value_store(field_vk->data_for_oop(voop_h()), field_vk->data_for_oop(field_copy_h()), true, false);
 324         if (in_heap) {


 337   } else {
 338     intptr_t* addr = f.interpreter_frame_expression_stack_at(tos_idx);
 339     copy_primitive_argument(addr, new_value_h, field_offset, field_type);
 340   }
 341 
 342   // returning result
 343   thread->set_vm_result(new_value_h());
 344   return (type2size[field_type] + type2size[T_OBJECT]) * AbstractInterpreter::stackElementSize;
 345 IRT_END
 346 
 347 IRT_ENTRY(void, InterpreterRuntime::qgetfield(JavaThread* thread, oopDesc* obj, int index, Klass* field_holder))
 348   Handle value_h(THREAD, obj);
 349   InstanceKlass* klass = InstanceKlass::cast(field_holder);
 350 
 351   Klass* field_k = klass->get_value_field_klass(index);
 352   ValueKlass* field_vklass = ValueKlass::cast(field_k);
 353   field_vklass->initialize(THREAD);
 354 
 355   instanceOop res;
 356   bool in_heap;
 357   if (klass->field_is_flattened(index)) {
 358     // allocate instance
 359     res = field_vklass->allocate_buffered_or_heap_instance(&in_heap, CHECK);
 360     instanceHandle res_h(THREAD, res);
 361     // copy value
 362     field_vklass->value_store(((char*)(oopDesc*)value_h()) + klass->field_offset(index),
 363                               field_vklass->data_for_oop(res), in_heap, false);
 364     thread->set_vm_result(res_h());
 365   } else {
 366     oop res = value_h()->obj_field_acquire(klass->field_offset(index));
 367     if (res == NULL) {
 368       res = field_vklass->default_value();
 369     } else {
 370       assert(res->klass() == field_k, "Sanity check");
 371       assert(!VTBuffer::is_in_vt_buffer(res), "Sanity check");
 372     }
 373     thread->set_vm_result(res);
 374   }
 375   assert(thread->vm_result()->klass() == field_vklass, "sanity check");
 376 IRT_END
 377 
 378 IRT_ENTRY(void, InterpreterRuntime::uninitialized_static_value_field(JavaThread* thread, oopDesc* mirror, int index))
 379   instanceHandle mirror_h(THREAD, (instanceOop)mirror);
 380   InstanceKlass* klass = InstanceKlass::cast(java_lang_Class::as_Klass(mirror));
 381   int offset = klass->field_offset(index);
 382   Klass* field_k = klass->get_value_field_klass_or_null(index);
 383   assert(field_k != NULL, "Must have been initialized");
 384   ValueKlass* field_vklass = ValueKlass::cast(field_k);
 385   instanceOop res = (instanceOop)field_vklass->default_value();
 386   thread->set_vm_result(res);
 387 IRT_END
 388 
 389 IRT_ENTRY(void, InterpreterRuntime::uninitialized_instance_value_field(JavaThread* thread, oopDesc* obj, int index))
 390   instanceHandle obj_h(THREAD, (instanceOop)obj);
 391   InstanceKlass* klass = InstanceKlass::cast(obj_h()->klass());
 392   int offset = klass->field_offset(index);
 393   Klass* field_k = klass->get_value_field_klass_or_null(index);
 394   assert(field_k != NULL, "Must have been initialized");







 395   ValueKlass* field_vklass = ValueKlass::cast(field_k);
 396   instanceOop res = (instanceOop)field_vklass->default_value();
 397   thread->set_vm_result(res);
 398 IRT_END
 399 
 400 IRT_ENTRY(void, InterpreterRuntime::qputfield(JavaThread* thread, oopDesc* obj, oopDesc* value, ConstantPoolCache* cp_cache))
 401   LastFrameAccessor last_frame(thread);
 402   Handle value_h(THREAD, value);
 403   Handle obj_h(THREAD, obj);
 404   assert(!obj_h()->klass()->is_value(), "obj must be an object");
 405   assert(value_h()->klass()->is_value(), "value must be an value type");
 406 
 407   int idx = ConstantPool::decode_cpcache_index(last_frame.get_index_u2_cpcache(Bytecodes::_putfield));
 408   ConstantPoolCacheEntry* cp_entry = cp_cache->entry_at(idx);
 409 
 410   int index = cp_entry->field_index();
 411   bool flatten = cp_entry->is_flattened();
 412 
 413   InstanceKlass* klass = InstanceKlass::cast(cp_entry->f1_as_klass());
 414   Klass* field_k = klass->get_value_field_klass(index);
 415   ValueKlass* field_vklass = ValueKlass::cast(value->klass());
 416   assert(value_h()->klass() == field_k, "Sanity check");
 417   assert(field_k == field_vklass, "Field descriptor and argument must match");
 418   if (flatten) {
 419     // copy value
 420     field_vklass->value_store(field_vklass->data_for_oop(value_h()),
 421                               ((char*)(oopDesc*)obj_h()) + klass->field_offset(index), true, false);
 422   } else {
 423     if (!VTBuffer::is_in_vt_buffer(value_h())) {
 424       obj_h()->obj_field_put(klass->field_offset(index), value_h());
 425     } else {
 426       // allocate heap instance
 427       instanceOop val = field_vklass->allocate_instance(CHECK);
 428       instanceHandle res_h(THREAD, val);
 429       // copy value
 430       field_vklass->value_store(field_vklass->data_for_oop(value_h()),
 431                                 field_vklass->data_for_oop(res_h()), true, false);


1036       get_code = Bytecodes::_getstatic;
1037     } else {
1038       get_code = Bytecodes::_getfield;
1039     }
1040     if (is_put && is_value) {
1041         put_code = ((is_static) ? Bytecodes::_putstatic : Bytecodes::_withfield);
1042     } else if ((is_put && !has_initialized_final_update) || !info.access_flags().is_final()) {
1043         put_code = ((is_static) ? Bytecodes::_putstatic : Bytecodes::_putfield);
1044     }
1045   }
1046 
1047   cp_cache_entry->set_field(
1048     get_code,
1049     put_code,
1050     info.field_holder(),
1051     info.index(),
1052     info.offset(),
1053     state,
1054     info.access_flags().is_final(),
1055     info.access_flags().is_volatile(),
1056     info.is_flattened(),
1057     info.is_flattenable(),
1058     pool->pool_holder()
1059   );
1060 }
1061 
1062 
1063 //------------------------------------------------------------------------------------------------------------------------
1064 // Synchronization
1065 //
1066 // The interpreter's synchronization code is factored out so that it can
1067 // be shared by method invocation and synchronized blocks.
1068 //%note synchronization_3
1069 
1070 //%note monitor_1
1071 IRT_ENTRY_NO_ASYNC(void, InterpreterRuntime::monitorenter(JavaThread* thread, BasicObjectLock* elem))
1072 #ifdef ASSERT
1073   thread->last_frame().interpreter_frame_verify_monitor(elem);
1074 #endif
1075   if (PrintBiasedLockingStatistics) {
1076     Atomic::inc(BiasedLocking::slow_path_entry_count_addr());


< prev index next >