2930
2931 BlockList* bci2block = blm.bci2block();
2932 BlockBegin* start_block = bci2block->at(0);
2933
2934 push_root_scope(scope, bci2block, start_block);
2935
2936 // setup state for std entry
2937 _initial_state = state_at_entry();
2938 start_block->merge(_initial_state);
2939
2940 // complete graph
2941 _vmap = new ValueMap();
2942 switch (scope->method()->intrinsic_id()) {
2943 case vmIntrinsics::_dabs : // fall through
2944 case vmIntrinsics::_dsqrt : // fall through
2945 case vmIntrinsics::_dsin : // fall through
2946 case vmIntrinsics::_dcos : // fall through
2947 case vmIntrinsics::_dtan : // fall through
2948 case vmIntrinsics::_dlog : // fall through
2949 case vmIntrinsics::_dlog10 : // fall through
2950 {
2951 // Compiles where the root method is an intrinsic need a special
2952 // compilation environment because the bytecodes for the method
2953 // shouldn't be parsed during the compilation, only the special
2954 // Intrinsic node should be emitted. If this isn't done the the
2955 // code for the inlined version will be different than the root
2956 // compiled version which could lead to monotonicity problems on
2957 // intel.
2958
2959 // Set up a stream so that appending instructions works properly.
2960 ciBytecodeStream s(scope->method());
2961 s.reset_to_bci(0);
2962 scope_data()->set_stream(&s);
2963 s.next();
2964
2965 // setup the initial block state
2966 _block = start_block;
2967 _state = start_block->state()->copy_for_parsing();
2968 _last = start_block;
2969 load_local(doubleType, 0);
2970
2971 // Emit the intrinsic node.
2972 bool result = try_inline_intrinsics(scope->method());
2973 if (!result) BAILOUT("failed to inline intrinsic");
2974 method_return(dpop());
2975
2976 // connect the begin and end blocks and we're all done.
2977 BlockEnd* end = last()->as_BlockEnd();
2978 block()->set_end(end);
2979 break;
2980 }
2981
2982 case vmIntrinsics::_Reference_get:
2983 {
2984 if (UseG1GC) {
2985 // With java.lang.ref.reference.get() we must go through the
2986 // intrinsic - when G1 is enabled - even when get() is the root
2987 // method of the compile so that, if necessary, the value in
2988 // the referent field of the reference object gets recorded by
2989 // the pre-barrier code.
3150 break;
3151
3152 case vmIntrinsics::_getClass :
3153 if (!InlineClassNatives) return false;
3154 preserves_state = true;
3155 break;
3156
3157 case vmIntrinsics::_currentThread :
3158 if (!InlineThreadNatives) return false;
3159 preserves_state = true;
3160 cantrap = false;
3161 break;
3162
3163 case vmIntrinsics::_dabs : // fall through
3164 case vmIntrinsics::_dsqrt : // fall through
3165 case vmIntrinsics::_dsin : // fall through
3166 case vmIntrinsics::_dcos : // fall through
3167 case vmIntrinsics::_dtan : // fall through
3168 case vmIntrinsics::_dlog : // fall through
3169 case vmIntrinsics::_dlog10 : // fall through
3170 if (!InlineMathNatives) return false;
3171 cantrap = false;
3172 preserves_state = true;
3173 break;
3174
3175 // sun/misc/AtomicLong.attemptUpdate
3176 case vmIntrinsics::_attemptUpdate :
3177 if (!VM_Version::supports_cx8()) return false;
3178 if (!InlineAtomicLong) return false;
3179 preserves_state = true;
3180 break;
3181
3182 // Use special nodes for Unsafe instructions so we can more easily
3183 // perform an address-mode optimization on the raw variants
3184 case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT, false);
3185 case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);
3186 case vmIntrinsics::_getByte : return append_unsafe_get_obj(callee, T_BYTE, false);
3187 case vmIntrinsics::_getShort : return append_unsafe_get_obj(callee, T_SHORT, false);
3188 case vmIntrinsics::_getChar : return append_unsafe_get_obj(callee, T_CHAR, false);
3189 case vmIntrinsics::_getInt : return append_unsafe_get_obj(callee, T_INT, false);
|
2930
2931 BlockList* bci2block = blm.bci2block();
2932 BlockBegin* start_block = bci2block->at(0);
2933
2934 push_root_scope(scope, bci2block, start_block);
2935
2936 // setup state for std entry
2937 _initial_state = state_at_entry();
2938 start_block->merge(_initial_state);
2939
2940 // complete graph
2941 _vmap = new ValueMap();
2942 switch (scope->method()->intrinsic_id()) {
2943 case vmIntrinsics::_dabs : // fall through
2944 case vmIntrinsics::_dsqrt : // fall through
2945 case vmIntrinsics::_dsin : // fall through
2946 case vmIntrinsics::_dcos : // fall through
2947 case vmIntrinsics::_dtan : // fall through
2948 case vmIntrinsics::_dlog : // fall through
2949 case vmIntrinsics::_dlog10 : // fall through
2950 case vmIntrinsics::_dexp : // fall through
2951 case vmIntrinsics::_dpow : // fall through
2952 {
2953 // Compiles where the root method is an intrinsic need a special
2954 // compilation environment because the bytecodes for the method
2955 // shouldn't be parsed during the compilation, only the special
2956 // Intrinsic node should be emitted. If this isn't done the the
2957 // code for the inlined version will be different than the root
2958 // compiled version which could lead to monotonicity problems on
2959 // intel.
2960
2961 // Set up a stream so that appending instructions works properly.
2962 ciBytecodeStream s(scope->method());
2963 s.reset_to_bci(0);
2964 scope_data()->set_stream(&s);
2965 s.next();
2966
2967 // setup the initial block state
2968 _block = start_block;
2969 _state = start_block->state()->copy_for_parsing();
2970 _last = start_block;
2971 load_local(doubleType, 0);
2972 if (scope->method()->intrinsic_id() == vmIntrinsics::_dpow) {
2973 load_local(doubleType, 2);
2974 }
2975
2976 // Emit the intrinsic node.
2977 bool result = try_inline_intrinsics(scope->method());
2978 if (!result) BAILOUT("failed to inline intrinsic");
2979 method_return(dpop());
2980
2981 // connect the begin and end blocks and we're all done.
2982 BlockEnd* end = last()->as_BlockEnd();
2983 block()->set_end(end);
2984 break;
2985 }
2986
2987 case vmIntrinsics::_Reference_get:
2988 {
2989 if (UseG1GC) {
2990 // With java.lang.ref.reference.get() we must go through the
2991 // intrinsic - when G1 is enabled - even when get() is the root
2992 // method of the compile so that, if necessary, the value in
2993 // the referent field of the reference object gets recorded by
2994 // the pre-barrier code.
3155 break;
3156
3157 case vmIntrinsics::_getClass :
3158 if (!InlineClassNatives) return false;
3159 preserves_state = true;
3160 break;
3161
3162 case vmIntrinsics::_currentThread :
3163 if (!InlineThreadNatives) return false;
3164 preserves_state = true;
3165 cantrap = false;
3166 break;
3167
3168 case vmIntrinsics::_dabs : // fall through
3169 case vmIntrinsics::_dsqrt : // fall through
3170 case vmIntrinsics::_dsin : // fall through
3171 case vmIntrinsics::_dcos : // fall through
3172 case vmIntrinsics::_dtan : // fall through
3173 case vmIntrinsics::_dlog : // fall through
3174 case vmIntrinsics::_dlog10 : // fall through
3175 case vmIntrinsics::_dexp : // fall through
3176 case vmIntrinsics::_dpow : // fall through
3177 if (!InlineMathNatives) return false;
3178 cantrap = false;
3179 preserves_state = true;
3180 break;
3181
3182 // sun/misc/AtomicLong.attemptUpdate
3183 case vmIntrinsics::_attemptUpdate :
3184 if (!VM_Version::supports_cx8()) return false;
3185 if (!InlineAtomicLong) return false;
3186 preserves_state = true;
3187 break;
3188
3189 // Use special nodes for Unsafe instructions so we can more easily
3190 // perform an address-mode optimization on the raw variants
3191 case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT, false);
3192 case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);
3193 case vmIntrinsics::_getByte : return append_unsafe_get_obj(callee, T_BYTE, false);
3194 case vmIntrinsics::_getShort : return append_unsafe_get_obj(callee, T_SHORT, false);
3195 case vmIntrinsics::_getChar : return append_unsafe_get_obj(callee, T_CHAR, false);
3196 case vmIntrinsics::_getInt : return append_unsafe_get_obj(callee, T_INT, false);
|