src/share/vm/c1/c1_GraphBuilder.cpp

Print this page
rev 2161 : [mq]: initial-intrinsification-changes
rev 2163 : [mq]: client_assertion_fauilure


2899       s.reset_to_bci(0);
2900       scope_data()->set_stream(&s);
2901       s.next();
2902 
2903       // setup the initial block state
2904       _block = start_block;
2905       _state = start_block->state()->copy_for_parsing();
2906       _last  = start_block;
2907       load_local(doubleType, 0);
2908 
2909       // Emit the intrinsic node.
2910       bool result = try_inline_intrinsics(scope->method());
2911       if (!result) BAILOUT("failed to inline intrinsic");
2912       method_return(dpop());
2913 
2914       // connect the begin and end blocks and we're all done.
2915       BlockEnd* end = last()->as_BlockEnd();
2916       block()->set_end(end);
2917       break;
2918     }








































2919   default:
2920     scope_data()->add_to_work_list(start_block);
2921     iterate_all_blocks();
2922     break;
2923   }
2924   CHECK_BAILOUT();
2925 
2926   _start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
2927 
2928   eliminate_redundant_phis(_start);
2929 
2930   NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());
2931   // for osr compile, bailout if some requirements are not fulfilled
2932   if (osr_bci != -1) {
2933     BlockBegin* osr_block = blm.bci2block()->at(osr_bci);
2934     assert(osr_block->is_set(BlockBegin::was_visited_flag),"osr entry must have been visited for osr compile");
2935 
2936     // check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points
2937     if (!osr_block->state()->stack_is_empty()) {
2938       BAILOUT("stack not empty at OSR entry point");


3136     case vmIntrinsics::_prefetchWrite       : return append_unsafe_prefetch(callee, false, true);
3137     case vmIntrinsics::_prefetchReadStatic  : return append_unsafe_prefetch(callee, true,  false);
3138     case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true,  true);
3139 
3140     case vmIntrinsics::_checkIndex    :
3141       if (!InlineNIOCheckIndex) return false;
3142       preserves_state = true;
3143       break;
3144     case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT,  true);
3145     case vmIntrinsics::_putOrderedInt    : return append_unsafe_put_obj(callee, T_INT,     true);
3146     case vmIntrinsics::_putOrderedLong   : return append_unsafe_put_obj(callee, T_LONG,    true);
3147 
3148     case vmIntrinsics::_compareAndSwapLong:
3149       if (!VM_Version::supports_cx8()) return false;
3150       // fall through
3151     case vmIntrinsics::_compareAndSwapInt:
3152     case vmIntrinsics::_compareAndSwapObject:
3153       append_unsafe_CAS(callee);
3154       return true;
3155 









3156     default                       : return false; // do not inline
3157   }
3158   // create intrinsic node
3159   const bool has_receiver = !callee->is_static();
3160   ValueType* result_type = as_ValueType(callee->return_type());
3161   ValueStack* state_before = copy_state_for_exception();
3162 
3163   Values* args = state()->pop_arguments(callee->arg_size());
3164 
3165   if (is_profiling()) {
3166     // Don't profile in the special case where the root method
3167     // is the intrinsic
3168     if (callee != method()) {
3169       // Note that we'd collect profile data in this method if we wanted it.
3170       compilation()->set_would_profile(true);
3171       if (profile_calls()) {
3172         Value recv = NULL;
3173         if (has_receiver) {
3174           recv = args->at(0);
3175           null_check(recv);




2899       s.reset_to_bci(0);
2900       scope_data()->set_stream(&s);
2901       s.next();
2902 
2903       // setup the initial block state
2904       _block = start_block;
2905       _state = start_block->state()->copy_for_parsing();
2906       _last  = start_block;
2907       load_local(doubleType, 0);
2908 
2909       // Emit the intrinsic node.
2910       bool result = try_inline_intrinsics(scope->method());
2911       if (!result) BAILOUT("failed to inline intrinsic");
2912       method_return(dpop());
2913 
2914       // connect the begin and end blocks and we're all done.
2915       BlockEnd* end = last()->as_BlockEnd();
2916       block()->set_end(end);
2917       break;
2918     }
2919 
2920   case vmIntrinsics::_Reference_get:
2921     {
2922       if (UseG1GC) {
2923         // With java.lang.ref.reference.get() we must go through the
2924         // intrinsic - when G1 is enabled - even when get() is the root
2925         // method of the compile so that, if necessary, the value in
2926         // the referent field of the reference object gets recorded by
2927         // the pre-barrier code.
2928         // Specifically, if G1 is enabled, the value in the referent
2929         // field is recorded by the G1 SATB pre barrier. This will
2930         // result in the referent being marked live and the reference
2931         // object removed from the list of discovered references during
2932         // reference processing.
2933 
2934         // Set up a stream so that appending instructions works properly.
2935         ciBytecodeStream s(scope->method());
2936         s.reset_to_bci(0);
2937         scope_data()->set_stream(&s);
2938         s.next();
2939 
2940         // setup the initial block state
2941         _block = start_block;
2942         _state = start_block->state()->copy_for_parsing();
2943         _last  = start_block;
2944         load_local(objectType, 0);
2945 
2946         // Emit the intrinsic node.
2947         bool result = try_inline_intrinsics(scope->method());
2948         if (!result) BAILOUT("failed to inline intrinsic");
2949         method_return(apop());
2950 
2951         // connect the begin and end blocks and we're all done.
2952         BlockEnd* end = last()->as_BlockEnd();
2953         block()->set_end(end);
2954         break;
2955       }
2956       // Otherwise, fall thru
2957     }
2958 
2959   default:
2960     scope_data()->add_to_work_list(start_block);
2961     iterate_all_blocks();
2962     break;
2963   }
2964   CHECK_BAILOUT();
2965 
2966   _start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
2967 
2968   eliminate_redundant_phis(_start);
2969 
2970   NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());
2971   // for osr compile, bailout if some requirements are not fulfilled
2972   if (osr_bci != -1) {
2973     BlockBegin* osr_block = blm.bci2block()->at(osr_bci);
2974     assert(osr_block->is_set(BlockBegin::was_visited_flag),"osr entry must have been visited for osr compile");
2975 
2976     // check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points
2977     if (!osr_block->state()->stack_is_empty()) {
2978       BAILOUT("stack not empty at OSR entry point");


3176     case vmIntrinsics::_prefetchWrite       : return append_unsafe_prefetch(callee, false, true);
3177     case vmIntrinsics::_prefetchReadStatic  : return append_unsafe_prefetch(callee, true,  false);
3178     case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true,  true);
3179 
3180     case vmIntrinsics::_checkIndex    :
3181       if (!InlineNIOCheckIndex) return false;
3182       preserves_state = true;
3183       break;
3184     case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT,  true);
3185     case vmIntrinsics::_putOrderedInt    : return append_unsafe_put_obj(callee, T_INT,     true);
3186     case vmIntrinsics::_putOrderedLong   : return append_unsafe_put_obj(callee, T_LONG,    true);
3187 
3188     case vmIntrinsics::_compareAndSwapLong:
3189       if (!VM_Version::supports_cx8()) return false;
3190       // fall through
3191     case vmIntrinsics::_compareAndSwapInt:
3192     case vmIntrinsics::_compareAndSwapObject:
3193       append_unsafe_CAS(callee);
3194       return true;
3195 
3196     case vmIntrinsics::_Reference_get:
3197       // It is only when G1 is enabled that we absolutely
3198       // need to use the intrinsic version of Reference.get()
3199       // so that the value in the referent field, if necessary,
3200       // can be registered by the pre-barrier code.
3201       if (!UseG1GC) return false;
3202       preserves_state = true;
3203       break;
3204 
3205     default                       : return false; // do not inline
3206   }
3207   // create intrinsic node
3208   const bool has_receiver = !callee->is_static();
3209   ValueType* result_type = as_ValueType(callee->return_type());
3210   ValueStack* state_before = copy_state_for_exception();
3211 
3212   Values* args = state()->pop_arguments(callee->arg_size());
3213 
3214   if (is_profiling()) {
3215     // Don't profile in the special case where the root method
3216     // is the intrinsic
3217     if (callee != method()) {
3218       // Note that we'd collect profile data in this method if we wanted it.
3219       compilation()->set_would_profile(true);
3220       if (profile_calls()) {
3221         Value recv = NULL;
3222         if (has_receiver) {
3223           recv = args->at(0);
3224           null_check(recv);