2911 // connect the begin and end blocks and we're all done. |
2911 // connect the begin and end blocks and we're all done. |
2912 BlockEnd* end = last()->as_BlockEnd(); |
2912 BlockEnd* end = last()->as_BlockEnd(); |
2913 block()->set_end(end); |
2913 block()->set_end(end); |
2914 break; |
2914 break; |
2915 } |
2915 } |
|
2916 |
|
2917 case vmIntrinsics::_Reference_get: |
|
2918 { |
|
2919 if (UseG1GC) { |
|
2920 // With java.lang.ref.reference.get() we must go through the |
|
2921 // intrinsic - when G1 is enabled - even when get() is the root |
|
2922 // method of the compile so that, if necessary, the value in |
|
2923 // the referent field of the reference object gets recorded by |
|
2924 // the pre-barrier code. |
|
2925 // Specifically, if G1 is enabled, the value in the referent |
|
2926 // field is recorded by the G1 SATB pre barrier. This will |
|
2927 // result in the referent being marked live and the reference |
|
2928 // object removed from the list of discovered references during |
|
2929 // reference processing. |
|
2930 |
|
2931 // Set up a stream so that appending instructions works properly. |
|
2932 ciBytecodeStream s(scope->method()); |
|
2933 s.reset_to_bci(0); |
|
2934 scope_data()->set_stream(&s); |
|
2935 s.next(); |
|
2936 |
|
2937 // setup the initial block state |
|
2938 _block = start_block; |
|
2939 _state = start_block->state()->copy_for_parsing(); |
|
2940 _last = start_block; |
|
2941 load_local(objectType, 0); |
|
2942 |
|
2943 // Emit the intrinsic node. |
|
2944 bool result = try_inline_intrinsics(scope->method()); |
|
2945 if (!result) BAILOUT("failed to inline intrinsic"); |
|
2946 method_return(apop()); |
|
2947 |
|
2948 // connect the begin and end blocks and we're all done. |
|
2949 BlockEnd* end = last()->as_BlockEnd(); |
|
2950 block()->set_end(end); |
|
2951 break; |
|
2952 } |
|
2953 // Otherwise, fall thru |
|
2954 } |
|
2955 |
2916 default: |
2956 default: |
2917 scope_data()->add_to_work_list(start_block); |
2957 scope_data()->add_to_work_list(start_block); |
2918 iterate_all_blocks(); |
2958 iterate_all_blocks(); |
2919 break; |
2959 break; |
2920 } |
2960 } |
3147 // fall through |
3187 // fall through |
3148 case vmIntrinsics::_compareAndSwapInt: |
3188 case vmIntrinsics::_compareAndSwapInt: |
3149 case vmIntrinsics::_compareAndSwapObject: |
3189 case vmIntrinsics::_compareAndSwapObject: |
3150 append_unsafe_CAS(callee); |
3190 append_unsafe_CAS(callee); |
3151 return true; |
3191 return true; |
|
3192 |
|
3193 case vmIntrinsics::_Reference_get: |
|
3194 // It is only when G1 is enabled that we absolutely |
|
3195 // need to use the intrinsic version of Reference.get() |
|
3196 // so that the value in the referent field, if necessary, |
|
3197 // can be registered by the pre-barrier code. |
|
3198 if (!UseG1GC) return false; |
|
3199 preserves_state = true; |
|
3200 break; |
3152 |
3201 |
3153 default : return false; // do not inline |
3202 default : return false; // do not inline |
3154 } |
3203 } |
3155 // create intrinsic node |
3204 // create intrinsic node |
3156 const bool has_receiver = !callee->is_static(); |
3205 const bool has_receiver = !callee->is_static(); |