src/share/vm/c1/c1_GraphBuilder.cpp

Print this page
rev 2237 : [mq]: initial-intrinsification-changes
rev 2239 : [mq]: client_assertion_fauilure


2895       s.reset_to_bci(0);
2896       scope_data()->set_stream(&s);
2897       s.next();
2898 
2899       // setup the initial block state
2900       _block = start_block;
2901       _state = start_block->state()->copy_for_parsing();
2902       _last  = start_block;
2903       load_local(doubleType, 0);
2904 
2905       // Emit the intrinsic node.
2906       bool result = try_inline_intrinsics(scope->method());
2907       if (!result) BAILOUT("failed to inline intrinsic");
2908       method_return(dpop());
2909 
2910       // connect the begin and end blocks and we're all done.
2911       BlockEnd* end = last()->as_BlockEnd();
2912       block()->set_end(end);
2913       break;
2914     }








































2915   default:
2916     scope_data()->add_to_work_list(start_block);
2917     iterate_all_blocks();
2918     break;
2919   }
2920   CHECK_BAILOUT();
2921 
2922   _start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
2923 
2924   eliminate_redundant_phis(_start);
2925 
2926   NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());
2927   // for osr compile, bailout if some requirements are not fulfilled
2928   if (osr_bci != -1) {
2929     BlockBegin* osr_block = blm.bci2block()->at(osr_bci);
2930     assert(osr_block->is_set(BlockBegin::was_visited_flag),"osr entry must have been visited for osr compile");
2931 
2932     // check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points
2933     if (!osr_block->state()->stack_is_empty()) {
2934       BAILOUT("stack not empty at OSR entry point");


3132     case vmIntrinsics::_prefetchWrite       : return append_unsafe_prefetch(callee, false, true);
3133     case vmIntrinsics::_prefetchReadStatic  : return append_unsafe_prefetch(callee, true,  false);
3134     case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true,  true);
3135 
3136     case vmIntrinsics::_checkIndex    :
3137       if (!InlineNIOCheckIndex) return false;
3138       preserves_state = true;
3139       break;
3140     case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT,  true);
3141     case vmIntrinsics::_putOrderedInt    : return append_unsafe_put_obj(callee, T_INT,     true);
3142     case vmIntrinsics::_putOrderedLong   : return append_unsafe_put_obj(callee, T_LONG,    true);
3143 
3144     case vmIntrinsics::_compareAndSwapLong:
3145       if (!VM_Version::supports_cx8()) return false;
3146       // fall through
3147     case vmIntrinsics::_compareAndSwapInt:
3148     case vmIntrinsics::_compareAndSwapObject:
3149       append_unsafe_CAS(callee);
3150       return true;
3151 









3152     default                       : return false; // do not inline
3153   }
3154   // create intrinsic node
3155   const bool has_receiver = !callee->is_static();
3156   ValueType* result_type = as_ValueType(callee->return_type());
3157   ValueStack* state_before = copy_state_for_exception();
3158 
3159   Values* args = state()->pop_arguments(callee->arg_size());
3160 
3161   if (is_profiling()) {
3162     // Don't profile in the special case where the root method
3163     // is the intrinsic
3164     if (callee != method()) {
3165       // Note that we'd collect profile data in this method if we wanted it.
3166       compilation()->set_would_profile(true);
3167       if (profile_calls()) {
3168         Value recv = NULL;
3169         if (has_receiver) {
3170           recv = args->at(0);
3171           null_check(recv);




2895       s.reset_to_bci(0);
2896       scope_data()->set_stream(&s);
2897       s.next();
2898 
2899       // setup the initial block state
2900       _block = start_block;
2901       _state = start_block->state()->copy_for_parsing();
2902       _last  = start_block;
2903       load_local(doubleType, 0);
2904 
2905       // Emit the intrinsic node.
2906       bool result = try_inline_intrinsics(scope->method());
2907       if (!result) BAILOUT("failed to inline intrinsic");
2908       method_return(dpop());
2909 
2910       // connect the begin and end blocks and we're all done.
2911       BlockEnd* end = last()->as_BlockEnd();
2912       block()->set_end(end);
2913       break;
2914     }
2915 
2916   case vmIntrinsics::_Reference_get:
2917     {
2918       if (UseG1GC) {
2919         // With java.lang.ref.reference.get() we must go through the
2920         // intrinsic - when G1 is enabled - even when get() is the root
2921         // method of the compile so that, if necessary, the value in
2922         // the referent field of the reference object gets recorded by
2923         // the pre-barrier code.
2924         // Specifically, if G1 is enabled, the value in the referent
2925         // field is recorded by the G1 SATB pre barrier. This will
2926         // result in the referent being marked live and the reference
2927         // object removed from the list of discovered references during
2928         // reference processing.
2929 
2930         // Set up a stream so that appending instructions works properly.
2931         ciBytecodeStream s(scope->method());
2932         s.reset_to_bci(0);
2933         scope_data()->set_stream(&s);
2934         s.next();
2935 
2936         // setup the initial block state
2937         _block = start_block;
2938         _state = start_block->state()->copy_for_parsing();
2939         _last  = start_block;
2940         load_local(objectType, 0);
2941 
2942         // Emit the intrinsic node.
2943         bool result = try_inline_intrinsics(scope->method());
2944         if (!result) BAILOUT("failed to inline intrinsic");
2945         method_return(apop());
2946 
2947         // connect the begin and end blocks and we're all done.
2948         BlockEnd* end = last()->as_BlockEnd();
2949         block()->set_end(end);
2950         break;
2951       }
2952       // Otherwise, fall thru
2953     }
2954 
2955   default:
2956     scope_data()->add_to_work_list(start_block);
2957     iterate_all_blocks();
2958     break;
2959   }
2960   CHECK_BAILOUT();
2961 
2962   _start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
2963 
2964   eliminate_redundant_phis(_start);
2965 
2966   NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());
2967   // for osr compile, bailout if some requirements are not fulfilled
2968   if (osr_bci != -1) {
2969     BlockBegin* osr_block = blm.bci2block()->at(osr_bci);
2970     assert(osr_block->is_set(BlockBegin::was_visited_flag),"osr entry must have been visited for osr compile");
2971 
2972     // check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points
2973     if (!osr_block->state()->stack_is_empty()) {
2974       BAILOUT("stack not empty at OSR entry point");


3172     case vmIntrinsics::_prefetchWrite       : return append_unsafe_prefetch(callee, false, true);
3173     case vmIntrinsics::_prefetchReadStatic  : return append_unsafe_prefetch(callee, true,  false);
3174     case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true,  true);
3175 
3176     case vmIntrinsics::_checkIndex    :
3177       if (!InlineNIOCheckIndex) return false;
3178       preserves_state = true;
3179       break;
3180     case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT,  true);
3181     case vmIntrinsics::_putOrderedInt    : return append_unsafe_put_obj(callee, T_INT,     true);
3182     case vmIntrinsics::_putOrderedLong   : return append_unsafe_put_obj(callee, T_LONG,    true);
3183 
3184     case vmIntrinsics::_compareAndSwapLong:
3185       if (!VM_Version::supports_cx8()) return false;
3186       // fall through
3187     case vmIntrinsics::_compareAndSwapInt:
3188     case vmIntrinsics::_compareAndSwapObject:
3189       append_unsafe_CAS(callee);
3190       return true;
3191 
3192     case vmIntrinsics::_Reference_get:
3193       // It is only when G1 is enabled that we absolutely
3194       // need to use the intrinsic version of Reference.get()
3195       // so that the value in the referent field, if necessary,
3196       // can be registered by the pre-barrier code.
3197       if (!UseG1GC) return false;
3198       preserves_state = true;
3199       break;
3200 
3201     default                       : return false; // do not inline
3202   }
3203   // create intrinsic node
3204   const bool has_receiver = !callee->is_static();
3205   ValueType* result_type = as_ValueType(callee->return_type());
3206   ValueStack* state_before = copy_state_for_exception();
3207 
3208   Values* args = state()->pop_arguments(callee->arg_size());
3209 
3210   if (is_profiling()) {
3211     // Don't profile in the special case where the root method
3212     // is the intrinsic
3213     if (callee != method()) {
3214       // Note that we'd collect profile data in this method if we wanted it.
3215       compilation()->set_would_profile(true);
3216       if (profile_calls()) {
3217         Value recv = NULL;
3218         if (has_receiver) {
3219           recv = args->at(0);
3220           null_check(recv);