< prev index next >

src/share/vm/opto/library_call.cpp

Print this page

        

*** 242,263 **** // Unsafe.getObject should be recorded in an SATB log buffer. void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar); bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned); static bool klass_needs_init_guard(Node* kls); bool inline_unsafe_allocate(); bool inline_unsafe_copyMemory(); bool inline_native_currentThread(); #ifdef TRACE_HAVE_INTRINSICS bool inline_native_classID(); bool inline_native_threadID(); #endif bool inline_native_time_funcs(address method, const char* funcName); bool inline_native_isInterrupted(); bool inline_native_Class_query(vmIntrinsics::ID id); bool inline_native_subtype_check(); - - bool inline_native_newArray(); bool inline_native_getLength(); bool inline_array_copyOf(bool is_copyOfRange); bool inline_array_equals(StrIntrinsicNode::ArgEnc ae); bool inline_objects_checkIndex(); void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark); --- 242,262 ---- // Unsafe.getObject should be recorded in an SATB log buffer. void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar); bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned); static bool klass_needs_init_guard(Node* kls); bool inline_unsafe_allocate(); + bool inline_unsafe_newArray(bool uninit); bool inline_unsafe_copyMemory(); bool inline_native_currentThread(); #ifdef TRACE_HAVE_INTRINSICS bool inline_native_classID(); bool inline_native_threadID(); #endif bool inline_native_time_funcs(address method, const char* funcName); bool inline_native_isInterrupted(); bool inline_native_Class_query(vmIntrinsics::ID id); bool inline_native_subtype_check(); bool inline_native_getLength(); bool inline_array_copyOf(bool is_copyOfRange); bool inline_array_equals(StrIntrinsicNode::ArgEnc ae); bool inline_objects_checkIndex(); void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
*** 648,658 **** #endif case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis"); case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime"); case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate(); case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory(); ! case vmIntrinsics::_newArray: return inline_native_newArray(); case vmIntrinsics::_getLength: return inline_native_getLength(); case vmIntrinsics::_copyOf: return inline_array_copyOf(false); case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true); case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL); case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU); --- 647,658 ---- #endif case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis"); case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime"); case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate(); case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory(); ! case vmIntrinsics::_allocateArrayUninit: return inline_unsafe_newArray(true); ! case vmIntrinsics::_newArray: return inline_unsafe_newArray(false); case vmIntrinsics::_getLength: return inline_native_getLength(); case vmIntrinsics::_copyOf: return inline_array_copyOf(false); case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true); case vmIntrinsics::_equalsB: return inline_array_equals(StrIntrinsicNode::LL); case vmIntrinsics::_equalsC: return inline_array_equals(StrIntrinsicNode::UU);
*** 3577,3589 **** } //-----------------------inline_native_newArray-------------------------- // private static native Object java.lang.reflect.newArray(Class<?> componentType, int length); ! bool LibraryCallKit::inline_native_newArray() { ! Node* mirror = argument(0); ! Node* count_val = argument(1); mirror = null_check(mirror); // If mirror or obj is dead, only null-path is taken. if (stopped()) return true; --- 3577,3597 ---- } //-----------------------inline_native_newArray-------------------------- // private static native Object java.lang.reflect.newArray(Class<?> componentType, int length); ! // private native Object Unsafe.allocateArrayUninit0(Class<?> cls, int size); ! bool LibraryCallKit::inline_unsafe_newArray(bool uninit) { ! Node* mirror; ! Node* count_val; ! if (uninit) { ! mirror = argument(1); ! count_val = argument(2); ! } else { ! mirror = argument(0); ! count_val = argument(1); ! } mirror = null_check(mirror); // If mirror or obj is dead, only null-path is taken. if (stopped()) return true;
*** 3624,3633 **** --- 3632,3647 ---- Node* obj = new_array(klass_node, count_val, 0); // no arguments to push result_reg->init_req(_normal_path, control()); result_val->init_req(_normal_path, obj); result_io ->init_req(_normal_path, i_o()); result_mem->init_req(_normal_path, reset_memory()); + + if (uninit) { + // Mark the allocation so that zeroing is skipped + AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(obj, &_gvn); + alloc->maybe_set_complete(&_gvn); + } } // Return the combined state. set_i_o( _gvn.transform(result_io) ); set_all_memory( _gvn.transform(result_mem));
< prev index next >