# HG changeset patch # User kbarrett # Date 1529704018 14400 # Node ID 9375184cec98eddc9b62224f66b6d1e911dacd5d # Parent 081b132c4dc050cc97592e08d150cc27b642fbec 8205459: Rename Access API flag decorators Summary: Rename OOP_NOT_NULL, IN_HEAP_ARRAY, AS_DEST_NOT_INITIALIZED Reviewed-by: pliden, stefank diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/aarch64/gc/g1/g1BarrierSetAssembler_aarch64.cpp --- a/src/hotspot/cpu/aarch64/gc/g1/g1BarrierSetAssembler_aarch64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/aarch64/gc/g1/g1BarrierSetAssembler_aarch64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -43,7 +43,7 @@ void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators, Register addr, Register count, RegSet saved_regs) { - bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0; + bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0; if (!dest_uninitialized) { __ push(saved_regs, sp); if (count == c_rarg0) { diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp --- a/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -37,14 +37,14 @@ bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; + bool is_not_null = (decorators & IS_NOT_NULL) != 0; switch (type) { case T_OBJECT: case T_ARRAY: { if (in_heap) { if (UseCompressedOops) { __ ldrw(dst, src); - if (oop_not_null) { + if (is_not_null) { __ decode_heap_oop_not_null(dst); } else { __ decode_heap_oop(dst); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/aarch64/gc/shared/cardTableBarrierSetAssembler_aarch64.cpp --- a/src/hotspot/cpu/aarch64/gc/shared/cardTableBarrierSetAssembler_aarch64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/aarch64/gc/shared/cardTableBarrierSetAssembler_aarch64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -91,9 +91,9 @@ void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Address dst, Register val, Register tmp1, Register tmp2) { bool in_heap = (decorators & IN_HEAP) != 0; - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; bool needs_post_barrier = val != noreg && in_heap; BarrierSetAssembler::store_at(masm, decorators, type, dst, val, noreg, noreg); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp --- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -3991,7 +3991,7 @@ void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1, Register thread_tmp, DecoratorSet decorators) { - access_load_at(T_OBJECT, IN_HEAP | OOP_NOT_NULL | decorators, dst, src, tmp1, thread_tmp); + access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp); } void MacroAssembler::store_heap_oop(Address dst, Register src, Register tmp1, diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp --- a/src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -1351,9 +1351,9 @@ BLOCK_COMMENT("Entry:"); } - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -1425,9 +1425,9 @@ __ cmp(rscratch1, count, Assembler::LSL, exact_log2(size)); __ br(Assembler::HS, nooverlap_target); - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -1789,10 +1789,10 @@ } #endif //ASSERT - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_CHECKCAST; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_CHECKCAST; bool is_oop = true; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } BarrierSetAssembler *bs = BarrierSet::barrier_set()->barrier_set_assembler(); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/aarch64/templateTable_aarch64.cpp --- a/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -761,7 +761,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2); - __ access_load_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg); + __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg); } void TemplateTable::laload() @@ -773,7 +773,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3); - __ access_load_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg); + __ access_load_at(T_LONG, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg); } void TemplateTable::faload() @@ -785,7 +785,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2); - __ access_load_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg); + __ access_load_at(T_FLOAT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg); } void TemplateTable::daload() @@ -797,7 +797,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3); - __ access_load_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg); + __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg); } void TemplateTable::aaload() @@ -812,7 +812,7 @@ do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, - IN_HEAP_ARRAY); + IS_ARRAY); } void TemplateTable::baload() @@ -824,7 +824,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0); - __ access_load_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg); + __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg); } void TemplateTable::caload() @@ -836,7 +836,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1); - __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); + __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); } // iload followed by caload frequent pair @@ -853,7 +853,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1); - __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); + __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); } void TemplateTable::saload() @@ -865,7 +865,7 @@ // r1: index index_check(r0, r1); // leaves index in r1, kills rscratch1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_SHORT) >> 1); - __ access_load_at(T_SHORT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); + __ access_load_at(T_SHORT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg); } void TemplateTable::iload(int n) @@ -1059,7 +1059,7 @@ // r3: array index_check(r3, r1); // prefer index in r1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2); - __ access_store_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), r0, noreg, noreg); + __ access_store_at(T_INT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), r0, noreg, noreg); } void TemplateTable::lastore() { @@ -1071,7 +1071,7 @@ // r3: array index_check(r3, r1); // prefer index in r1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3); - __ access_store_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), r0, noreg, noreg); + __ access_store_at(T_LONG, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), r0, noreg, noreg); } void TemplateTable::fastore() { @@ -1083,7 +1083,7 @@ // r3: array index_check(r3, r1); // prefer index in r1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2); - __ access_store_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg); + __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg); } void TemplateTable::dastore() { @@ -1095,7 +1095,7 @@ // r3: array index_check(r3, r1); // prefer index in r1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3); - __ access_store_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg); + __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg); } void TemplateTable::aastore() { @@ -1136,7 +1136,7 @@ // Get the value we will store __ ldr(r0, at_tos()); // Now store using the appropriate barrier - do_oop_store(_masm, element_address, r0, IN_HEAP_ARRAY); + do_oop_store(_masm, element_address, r0, IS_ARRAY); __ b(done); // Have a NULL in r0, r3=array, r2=index. Store NULL at ary[idx] @@ -1144,7 +1144,7 @@ __ profile_null_seen(r2); // Store a NULL - do_oop_store(_masm, element_address, noreg, IN_HEAP_ARRAY); + do_oop_store(_masm, element_address, noreg, IS_ARRAY); // Pop stack arguments __ bind(done); @@ -1172,7 +1172,7 @@ __ bind(L_skip); __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0); - __ access_store_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(0)), r0, noreg, noreg); + __ access_store_at(T_BYTE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(0)), r0, noreg, noreg); } void TemplateTable::castore() @@ -1185,7 +1185,7 @@ // r3: array index_check(r3, r1); // prefer index in r1 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1); - __ access_store_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(1)), r0, noreg, noreg); + __ access_store_at(T_CHAR, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(1)), r0, noreg, noreg); } void TemplateTable::sastore() diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/arm/gc/g1/g1BarrierSetAssembler_arm.cpp --- a/src/hotspot/cpu/arm/gc/g1/g1BarrierSetAssembler_arm.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/arm/gc/g1/g1BarrierSetAssembler_arm.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -53,7 +53,7 @@ void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators, Register addr, Register count, int callee_saved_regs) { - bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0; + bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0; if (!dest_uninitialized) { assert( addr->encoding() < callee_saved_regs, "addr must be saved"); assert(count->encoding() < callee_saved_regs, "count must be saved"); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/arm/gc/shared/cardTableBarrierSetAssembler_arm.cpp --- a/src/hotspot/cpu/arm/gc/shared/cardTableBarrierSetAssembler_arm.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/arm/gc/shared/cardTableBarrierSetAssembler_arm.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -75,9 +75,9 @@ void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Address obj, Register new_val, Register tmp1, Register tmp2, Register tmp3, bool is_null) { - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; if (is_null) { BarrierSetAssembler::store_at(masm, decorators, type, obj, new_val, tmp1, tmp2, tmp3, true); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/arm/stubGenerator_arm.cpp --- a/src/hotspot/cpu/arm/stubGenerator_arm.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/arm/stubGenerator_arm.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -2945,7 +2945,7 @@ __ push(LR); #endif // AARCH64 - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (disjoint) { decorators |= ARRAYCOPY_DISJOINT; } @@ -3217,7 +3217,7 @@ pushed+=1; #endif // AARCH64 - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_CHECKCAST; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_CHECKCAST; BarrierSetAssembler *bs = BarrierSet::barrier_set()->barrier_set_assembler(); bs->arraycopy_prologue(_masm, decorators, true, to, count, callee_saved_regs); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/arm/templateTable_arm.cpp --- a/src/hotspot/cpu/arm/templateTable_arm.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/arm/templateTable_arm.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -943,7 +943,7 @@ const Register Rindex = R0_tos; index_check(Rarray, Rindex); - do_oop_load(_masm, R0_tos, get_array_elem_addr(T_OBJECT, Rarray, Rindex, Rtemp), IN_HEAP_ARRAY); + do_oop_load(_masm, R0_tos, get_array_elem_addr(T_OBJECT, Rarray, Rindex, Rtemp), IS_ARRAY); } @@ -1328,7 +1328,7 @@ __ add(Raddr_1, Raddr_1, AsmOperand(Rindex_4, lsl, LogBytesPerHeapOop)); // Now store using the appropriate barrier - do_oop_store(_masm, Raddr_1, Rvalue_2, Rtemp, R0_tmp, R3_tmp, false, IN_HEAP_ARRAY); + do_oop_store(_masm, Raddr_1, Rvalue_2, Rtemp, R0_tmp, R3_tmp, false, IS_ARRAY); __ b(done); __ bind(throw_array_store); @@ -1344,7 +1344,7 @@ __ profile_null_seen(R0_tmp); // Store a NULL - do_oop_store(_masm, Address::indexed_oop(Raddr_1, Rindex_4), Rvalue_2, Rtemp, R0_tmp, R3_tmp, true, IN_HEAP_ARRAY); + do_oop_store(_masm, Address::indexed_oop(Raddr_1, Rindex_4), Rvalue_2, Rtemp, R0_tmp, R3_tmp, true, IS_ARRAY); // Pop stack arguments __ bind(done); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp --- a/src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -44,7 +44,7 @@ void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators, Register from, Register to, Register count, Register preserve1, Register preserve2) { - bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0; + bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0; // With G1, don't generate the call if we statically know that the target in uninitialized if (!dest_uninitialized) { int spill_slots = 3; @@ -107,7 +107,7 @@ void G1BarrierSetAssembler::g1_write_barrier_pre(MacroAssembler* masm, DecoratorSet decorators, Register obj, RegisterOrConstant ind_or_offs, Register pre_val, Register tmp1, Register tmp2, bool needs_frame) { - bool not_null = (decorators & OOP_NOT_NULL) != 0, + bool not_null = (decorators & IS_NOT_NULL) != 0, preloaded = obj == noreg; Register nv_save = noreg; @@ -205,7 +205,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm, DecoratorSet decorators, Register store_addr, Register new_val, Register tmp1, Register tmp2, Register tmp3) { - bool not_null = (decorators & OOP_NOT_NULL) != 0; + bool not_null = (decorators & IS_NOT_NULL) != 0; Label runtime, filtered; assert_different_registers(store_addr, new_val, tmp1, tmp2); @@ -279,9 +279,9 @@ void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register base, RegisterOrConstant ind_or_offs, Register val, Register tmp1, Register tmp2, Register tmp3, bool needs_frame) { - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; // Load and record the previous value. g1_write_barrier_pre(masm, decorators, base, ind_or_offs, tmp1, tmp2, tmp3, needs_frame); @@ -318,7 +318,7 @@ // these parameters the pre-barrier does not generate // the load of the previous value // We only reach here if value is not null. - g1_write_barrier_pre(masm, decorators | OOP_NOT_NULL, noreg /* obj */, (intptr_t)0, dst /* pre_val */, + g1_write_barrier_pre(masm, decorators | IS_NOT_NULL, noreg /* obj */, (intptr_t)0, dst /* pre_val */, tmp1, tmp2, needs_frame); } __ bind(done); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp --- a/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -35,7 +35,7 @@ Register tmp1, Register tmp2, Register tmp3, bool needs_frame) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool not_null = (decorators & OOP_NOT_NULL) != 0; + bool not_null = (decorators & IS_NOT_NULL) != 0; assert(in_heap || in_native, "where?"); assert_different_registers(base, val, tmp1, tmp2, R0); @@ -68,7 +68,7 @@ Register tmp1, Register tmp2, bool needs_frame, Label *L_handle_null) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool not_null = (decorators & OOP_NOT_NULL) != 0; + bool not_null = (decorators & IS_NOT_NULL) != 0; assert(in_heap || in_native, "where?"); assert_different_registers(ind_or_offs.register_or_noreg(), dst, R0); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/gc/shared/cardTableBarrierSetAssembler_ppc.cpp --- a/src/hotspot/cpu/ppc/gc/shared/cardTableBarrierSetAssembler_ppc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/gc/shared/cardTableBarrierSetAssembler_ppc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -93,9 +93,9 @@ void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register base, RegisterOrConstant ind_or_offs, Register val, Register tmp1, Register tmp2, Register tmp3, bool needs_frame) { - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; BarrierSetAssembler::store_at(masm, decorators, type, base, ind_or_offs, val, tmp1, tmp2, tmp3, needs_frame); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/macroAssembler_ppc.cpp --- a/src/hotspot/cpu/ppc/macroAssembler_ppc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/macroAssembler_ppc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -2046,7 +2046,7 @@ assert_different_registers(mtype_reg, mh_reg, temp_reg); // Compare method type against that of the receiver. load_heap_oop(temp_reg, delayed_value(java_lang_invoke_MethodHandle::type_offset_in_bytes, temp_reg), mh_reg, - noreg, noreg, false, OOP_NOT_NULL); + noreg, noreg, false, IS_NOT_NULL); cmpd(CCR0, temp_reg, mtype_reg); bne(CCR0, wrong_method_type); } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp --- a/src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2002, 2015, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2002, 2018, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2012, 2015 SAP SE. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * @@ -329,7 +329,7 @@ inline void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Register base, RegisterOrConstant ind_or_offs, Register val, Register tmp1, Register tmp2, Register tmp3, bool needs_frame) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_NATIVE | IS_ARRAY | IS_NOT_NULL | ON_UNKNOWN_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); bool as_raw = (decorators & AS_RAW) != 0; @@ -348,7 +348,7 @@ inline void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register base, RegisterOrConstant ind_or_offs, Register dst, Register tmp1, Register tmp2, bool needs_frame, Label *L_handle_null) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_NATIVE | IS_ARRAY | IS_NOT_NULL | ON_PHANTOM_OOP_REF | ON_WEAK_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); decorators = AccessInternal::decorator_fixup(decorators); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/methodHandles_ppc.cpp --- a/src/hotspot/cpu/ppc/methodHandles_ppc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/methodHandles_ppc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -174,13 +174,13 @@ // Load the invoker, as MH -> MH.form -> LF.vmentry __ verify_oop(recv); __ load_heap_oop(method_temp, NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes()), recv, - temp2, noreg, false, OOP_NOT_NULL); + temp2, noreg, false, IS_NOT_NULL); __ verify_oop(method_temp); __ load_heap_oop(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes()), method_temp, - temp2, noreg, false, OOP_NOT_NULL); + temp2, noreg, false, IS_NOT_NULL); __ verify_oop(method_temp); __ load_heap_oop(method_temp, NONZERO(java_lang_invoke_MemberName::method_offset_in_bytes()), method_temp, - temp2, noreg, false, OOP_NOT_NULL); + temp2, noreg, false, IS_NOT_NULL); __ verify_oop(method_temp); __ ld(method_temp, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset_in_bytes()), method_temp); @@ -342,7 +342,7 @@ Label L_ok; Register temp2_defc = temp2; __ load_heap_oop(temp2_defc, NONZERO(java_lang_invoke_MemberName::clazz_offset_in_bytes()), member_reg, - temp3, noreg, false, OOP_NOT_NULL); + temp3, noreg, false, IS_NOT_NULL); load_klass_from_Class(_masm, temp2_defc, temp3, temp4); __ verify_klass_ptr(temp2_defc); __ check_klass_subtype(temp1_recv_klass, temp2_defc, temp3, temp4, L_ok); @@ -370,7 +370,7 @@ verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp2); } __ load_heap_oop(R19_method, NONZERO(java_lang_invoke_MemberName::method_offset_in_bytes()), member_reg, - temp3, noreg, false, OOP_NOT_NULL); + temp3, noreg, false, IS_NOT_NULL); __ ld(R19_method, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset_in_bytes()), R19_method); break; @@ -379,7 +379,7 @@ verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp2); } __ load_heap_oop(R19_method, NONZERO(java_lang_invoke_MemberName::method_offset_in_bytes()), member_reg, - temp3, noreg, false, OOP_NOT_NULL); + temp3, noreg, false, IS_NOT_NULL); __ ld(R19_method, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset_in_bytes()), R19_method); break; @@ -422,7 +422,7 @@ Register temp2_intf = temp2; __ load_heap_oop(temp2_intf, NONZERO(java_lang_invoke_MemberName::clazz_offset_in_bytes()), member_reg, - temp3, noreg, false, OOP_NOT_NULL); + temp3, noreg, false, IS_NOT_NULL); load_klass_from_Class(_masm, temp2_intf, temp3, temp4); __ verify_klass_ptr(temp2_intf); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/stubGenerator_ppc.cpp --- a/src/hotspot/cpu/ppc/stubGenerator_ppc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/stubGenerator_ppc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -2024,9 +2024,9 @@ STUB_ENTRY(arrayof_oop_disjoint_arraycopy) : STUB_ENTRY(oop_disjoint_arraycopy); - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -2063,9 +2063,9 @@ address start = __ function_entry(); assert_positive_int(R5_ARG3); - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -2159,9 +2159,9 @@ } #endif - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_CHECKCAST; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_CHECKCAST; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } BarrierSetAssembler *bs = BarrierSet::barrier_set()->barrier_set_assembler(); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/ppc/templateTable_ppc_64.cpp --- a/src/hotspot/cpu/ppc/templateTable_ppc_64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/ppc/templateTable_ppc_64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -688,7 +688,7 @@ Rtemp2 = R31; __ index_check(Rarray, R17_tos /* index */, UseCompressedOops ? 2 : LogBytesPerWord, Rtemp, Rload_addr); do_oop_load(_masm, Rload_addr, arrayOopDesc::base_offset_in_bytes(T_OBJECT), R17_tos, Rtemp, Rtemp2, - IN_HEAP_ARRAY); + IS_ARRAY); __ verify_oop(R17_tos); //__ dcbt(R17_tos); // prefetch } @@ -1015,14 +1015,14 @@ __ bind(Lis_null); do_oop_store(_masm, Rstore_addr, arrayOopDesc::base_offset_in_bytes(T_OBJECT), noreg /* 0 */, - Rscratch, Rscratch2, Rscratch3, IN_HEAP_ARRAY); + Rscratch, Rscratch2, Rscratch3, IS_ARRAY); __ profile_null_seen(Rscratch, Rscratch2); __ b(Ldone); // Store is OK. __ bind(Lstore_ok); do_oop_store(_masm, Rstore_addr, arrayOopDesc::base_offset_in_bytes(T_OBJECT), R17_tos /* value */, - Rscratch, Rscratch2, Rscratch3, IN_HEAP_ARRAY | OOP_NOT_NULL); + Rscratch, Rscratch2, Rscratch3, IS_ARRAY | IS_NOT_NULL); __ bind(Ldone); // Adjust sp (pops array, index and value). diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp --- a/src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -46,7 +46,7 @@ void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators, Register addr, Register count) { - bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0; + bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0; // With G1, don't generate the call if we statically know that the target is uninitialized. if (!dest_uninitialized) { @@ -108,7 +108,7 @@ if (on_oop && on_reference) { // Generate the G1 pre-barrier code to log the value of // the referent field in an SATB buffer. - g1_write_barrier_pre(masm, decorators | OOP_NOT_NULL, + g1_write_barrier_pre(masm, decorators | IS_NOT_NULL, NULL /* obj */, dst /* pre_val */, noreg/* preserve */ , @@ -127,7 +127,7 @@ bool pre_val_needed // Save Rpre_val across runtime call, caller uses it. ) { - bool not_null = (decorators & OOP_NOT_NULL) != 0, + bool not_null = (decorators & IS_NOT_NULL) != 0, preloaded = obj == NULL; const Register Robj = obj ? obj->base() : noreg, @@ -260,7 +260,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm, DecoratorSet decorators, Register Rstore_addr, Register Rnew_val, Register Rtmp1, Register Rtmp2, Register Rtmp3) { - bool not_null = (decorators & OOP_NOT_NULL) != 0; + bool not_null = (decorators & IS_NOT_NULL) != 0; assert_different_registers(Rstore_addr, Rnew_val, Rtmp1, Rtmp2); // Most probably, Rnew_val == Rtmp3. @@ -372,9 +372,9 @@ void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, const Address& dst, Register val, Register tmp1, Register tmp2, Register tmp3) { - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; // Load and record the previous value. g1_write_barrier_pre(masm, decorators, &dst, tmp3, val, tmp1, tmp2, false); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp --- a/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -39,7 +39,7 @@ const Address& addr, Register dst, Register tmp1, Register tmp2, Label *L_handle_null) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool not_null = (decorators & OOP_NOT_NULL) != 0; + bool not_null = (decorators & IS_NOT_NULL) != 0; assert(in_heap || in_native, "where?"); switch (type) { @@ -69,7 +69,7 @@ const Address& addr, Register val, Register tmp1, Register tmp2, Register tmp3) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool not_null = (decorators & OOP_NOT_NULL) != 0; + bool not_null = (decorators & IS_NOT_NULL) != 0; assert(in_heap || in_native, "where?"); assert_different_registers(val, tmp1, tmp2); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/s390/gc/shared/cardTableBarrierSetAssembler_s390.cpp --- a/src/hotspot/cpu/s390/gc/shared/cardTableBarrierSetAssembler_s390.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/s390/gc/shared/cardTableBarrierSetAssembler_s390.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -156,9 +156,9 @@ void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, const Address& dst, Register val, Register tmp1, Register tmp2, Register tmp3) { - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/s390/macroAssembler_s390.cpp --- a/src/hotspot/cpu/s390/macroAssembler_s390.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/s390/macroAssembler_s390.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -4051,7 +4051,7 @@ void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, const Address& addr, Register val, Register tmp1, Register tmp2, Register tmp3) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_NATIVE | IS_ARRAY | IS_NOT_NULL | ON_UNKNOWN_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); decorators = AccessInternal::decorator_fixup(decorators); @@ -4070,7 +4070,7 @@ void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, const Address& addr, Register dst, Register tmp1, Register tmp2, Label *is_null) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_NATIVE | IS_ARRAY | IS_NOT_NULL | ON_PHANTOM_OOP_REF | ON_WEAK_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); decorators = AccessInternal::decorator_fixup(decorators); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/s390/methodHandles_s390.cpp --- a/src/hotspot/cpu/s390/methodHandles_s390.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/s390/methodHandles_s390.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -198,17 +198,17 @@ __ load_heap_oop(method_temp, Address(recv, NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())), - noreg, noreg, OOP_NOT_NULL); + noreg, noreg, IS_NOT_NULL); __ verify_oop(method_temp); __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())), - noreg, noreg, OOP_NOT_NULL); + noreg, noreg, IS_NOT_NULL); __ verify_oop(method_temp); __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_MemberName::method_offset_in_bytes())), - noreg, noreg, OOP_NOT_NULL); + noreg, noreg, IS_NOT_NULL); __ verify_oop(method_temp); __ z_lg(method_temp, Address(method_temp, @@ -409,7 +409,7 @@ Register temp2_defc = temp2; __ load_heap_oop(temp2_defc, member_clazz, - noreg, noreg, OOP_NOT_NULL); + noreg, noreg, IS_NOT_NULL); load_klass_from_Class(_masm, temp2_defc, temp3, temp4); __ verify_klass_ptr(temp2_defc); __ check_klass_subtype(temp1_recv_klass, temp2_defc, temp3, temp4, L_ok); @@ -436,7 +436,7 @@ verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp3); } __ load_heap_oop(Z_method, member_vmtarget, - noreg, noreg, OOP_NOT_NULL); + noreg, noreg, IS_NOT_NULL); __ z_lg(Z_method, vmtarget_method); method_is_live = true; break; @@ -446,7 +446,7 @@ verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp3); } __ load_heap_oop(Z_method, member_vmtarget, - noreg, noreg, OOP_NOT_NULL); + noreg, noreg, IS_NOT_NULL); __ z_lg(Z_method, vmtarget_method); method_is_live = true; break; @@ -488,7 +488,7 @@ Register temp3_intf = temp3; __ load_heap_oop(temp3_intf, member_clazz, - noreg, noreg, OOP_NOT_NULL); + noreg, noreg, IS_NOT_NULL); load_klass_from_Class(_masm, temp3_intf, temp2, temp4); Register Z_index = Z_method; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/s390/stubGenerator_s390.cpp --- a/src/hotspot/cpu/s390/stubGenerator_s390.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/s390/stubGenerator_s390.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -1300,9 +1300,9 @@ unsigned int start_off = __ offset(); // Remember stub start address (is rtn value). unsigned int size = UseCompressedOops ? 4 : 8; - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -1392,9 +1392,9 @@ // Branch to disjoint_copy (if applicable) before pre_barrier to avoid double pre_barrier. array_overlap_test(nooverlap_target, shift); // Branch away to nooverlap_target if disjoint. - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/s390/templateTable_s390.cpp --- a/src/hotspot/cpu/s390/templateTable_s390.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/s390/templateTable_s390.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -853,7 +853,7 @@ index_check(Z_tmp_1, index, shift); // Now load array element. do_oop_load(_masm, Address(Z_tmp_1, index, arrayOopDesc::base_offset_in_bytes(T_OBJECT)), Z_tos, - Z_tmp_2, Z_tmp_3, IN_HEAP_ARRAY); + Z_tmp_2, Z_tmp_3, IS_ARRAY); __ verify_oop(Z_tos); } @@ -1197,7 +1197,7 @@ // Store a NULL. do_oop_store(_masm, Address(Rstore_addr, (intptr_t)0), noreg, - tmp3, tmp2, tmp1, IN_HEAP_ARRAY); + tmp3, tmp2, tmp1, IS_ARRAY); __ z_bru(done); // Come here on success. @@ -1205,7 +1205,7 @@ // Now store using the appropriate barrier. do_oop_store(_masm, Address(Rstore_addr, (intptr_t)0), Rvalue, - tmp3, tmp2, tmp1, IN_HEAP_ARRAY | OOP_NOT_NULL); + tmp3, tmp2, tmp1, IS_ARRAY | IS_NOT_NULL); // Pop stack arguments. __ bind(done); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/sparc/gc/g1/g1BarrierSetAssembler_sparc.cpp --- a/src/hotspot/cpu/sparc/gc/g1/g1BarrierSetAssembler_sparc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/sparc/gc/g1/g1BarrierSetAssembler_sparc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -43,7 +43,7 @@ void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators, Register addr, Register count) { - bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0; + bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0; // With G1, don't generate the call if we statically know that the target in uninitialized if (!dest_uninitialized) { Register tmp = O5; @@ -406,9 +406,9 @@ // No need for post barrier if storing NULL bool needs_post_barrier = val != G0 && in_heap; - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; Register index = dst.has_index() ? dst.index() : noreg; int disp = dst.has_disp() ? dst.disp() : 0; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp --- a/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -34,7 +34,7 @@ Register val, Address dst, Register tmp) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; + bool is_not_null = (decorators & IS_NOT_NULL) != 0; switch (type) { case T_ARRAY: @@ -47,7 +47,7 @@ } if (UseCompressedOops) { assert(dst.base() != val, "not enough registers"); - if (oop_not_null) { + if (is_not_null) { __ encode_heap_oop_not_null(val); } else { __ encode_heap_oop(val); @@ -70,7 +70,7 @@ Address src, Register dst, Register tmp) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; + bool is_not_null = (decorators & IS_NOT_NULL) != 0; switch (type) { case T_ARRAY: @@ -83,7 +83,7 @@ } if (UseCompressedOops) { __ lduw(src, dst); - if (oop_not_null) { + if (is_not_null) { __ decode_heap_oop_not_null(dst); } else { __ decode_heap_oop(dst); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/sparc/gc/shared/cardTableBarrierSetAssembler_sparc.cpp --- a/src/hotspot/cpu/sparc/gc/shared/cardTableBarrierSetAssembler_sparc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/sparc/gc/shared/cardTableBarrierSetAssembler_sparc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -1,4 +1,3 @@ - /* * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. @@ -90,9 +89,9 @@ Register val, Address dst, Register tmp) { bool in_heap = (decorators & IN_HEAP) != 0; - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; // No need for post barrier if storing NULL bool needs_post_barrier = val != G0 && in_heap; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/sparc/stubGenerator_sparc.cpp --- a/src/hotspot/cpu/sparc/stubGenerator_sparc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/sparc/stubGenerator_sparc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -2269,9 +2269,9 @@ BLOCK_COMMENT("Entry:"); } - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -2326,9 +2326,9 @@ array_overlap_test(nooverlap_target, LogBytesPerHeapOop); - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -2446,9 +2446,9 @@ BLOCK_COMMENT("Entry:"); } - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_CHECKCAST; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_CHECKCAST; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } BarrierSetAssembler *bs = BarrierSet::barrier_set()->barrier_set_assembler(); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/sparc/templateTable_sparc.cpp --- a/src/hotspot/cpu/sparc/templateTable_sparc.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/sparc/templateTable_sparc.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -697,7 +697,7 @@ arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, - IN_HEAP_ARRAY); + IS_ARRAY); __ verify_oop(Otos_i); } @@ -997,13 +997,13 @@ // Store is OK. __ bind(store_ok); - do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, IN_HEAP_ARRAY); + do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, IS_ARRAY); __ ba(done); __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value) __ bind(is_null); - do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, IN_HEAP_ARRAY); + do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, IS_ARRAY); __ profile_null_seen(G3_scratch); __ inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value) diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/x86/gc/g1/g1BarrierSetAssembler_x86.cpp --- a/src/hotspot/cpu/x86/gc/g1/g1BarrierSetAssembler_x86.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/x86/gc/g1/g1BarrierSetAssembler_x86.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -43,7 +43,7 @@ void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators, Register addr, Register count) { - bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0; + bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0; if (!dest_uninitialized) { Register thread = NOT_LP64(rax) LP64_ONLY(r15_thread); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp --- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -35,7 +35,7 @@ Register dst, Address src, Register tmp1, Register tmp_thread) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; + bool is_not_null = (decorators & IS_NOT_NULL) != 0; bool atomic = (decorators & MO_RELAXED) != 0; switch (type) { @@ -45,7 +45,7 @@ #ifdef _LP64 if (UseCompressedOops) { __ movl(dst, src); - if (oop_not_null) { + if (is_not_null) { __ decode_heap_oop_not_null(dst); } else { __ decode_heap_oop(dst); @@ -100,7 +100,7 @@ Address dst, Register val, Register tmp1, Register tmp2) { bool in_heap = (decorators & IN_HEAP) != 0; bool in_native = (decorators & IN_NATIVE) != 0; - bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; + bool is_not_null = (decorators & IS_NOT_NULL) != 0; bool atomic = (decorators & MO_RELAXED) != 0; switch (type) { @@ -108,7 +108,7 @@ case T_ARRAY: { if (in_heap) { if (val == noreg) { - assert(!oop_not_null, "inconsistent access"); + assert(!is_not_null, "inconsistent access"); #ifdef _LP64 if (UseCompressedOops) { __ movl(dst, (int32_t)NULL_WORD); @@ -122,7 +122,7 @@ #ifdef _LP64 if (UseCompressedOops) { assert(!dst.uses(val), "not enough registers"); - if (oop_not_null) { + if (is_not_null) { __ encode_heap_oop_not_null(val); } else { __ encode_heap_oop(val); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/x86/gc/shared/cardTableBarrierSetAssembler_x86.cpp --- a/src/hotspot/cpu/x86/gc/shared/cardTableBarrierSetAssembler_x86.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/x86/gc/shared/cardTableBarrierSetAssembler_x86.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -135,9 +135,9 @@ Address dst, Register val, Register tmp1, Register tmp2) { bool in_heap = (decorators & IN_HEAP) != 0; - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; bool needs_post_barrier = val != noreg && in_heap; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/x86/macroAssembler_x86.cpp --- a/src/hotspot/cpu/x86/macroAssembler_x86.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/x86/macroAssembler_x86.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -6287,7 +6287,7 @@ // Doesn't do verfication, generates fixed size code void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1, Register thread_tmp, DecoratorSet decorators) { - access_load_at(T_OBJECT, IN_HEAP | OOP_NOT_NULL | decorators, dst, src, tmp1, thread_tmp); + access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp); } void MacroAssembler::store_heap_oop(Address dst, Register src, Register tmp1, diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/x86/stubGenerator_x86_32.cpp --- a/src/hotspot/cpu/x86/stubGenerator_x86_32.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/x86/stubGenerator_x86_32.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -837,9 +837,9 @@ __ jcc(Assembler::zero, L_0_count); } - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -1026,9 +1026,9 @@ __ jcc(Assembler::zero, L_0_count); } - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -1383,9 +1383,9 @@ Address to_element_addr(end_to, count, Address::times_ptr, 0); Address elem_klass_addr(elem, oopDesc::klass_offset_in_bytes()); - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_CHECKCAST; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_CHECKCAST; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } BasicType type = T_OBJECT; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/x86/stubGenerator_x86_64.cpp --- a/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -1832,9 +1832,9 @@ setup_arg_regs(); // from => rdi, to => rsi, count => rdx // r9 and r10 may be used to save non-volatile registers - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -1926,9 +1926,9 @@ setup_arg_regs(); // from => rdi, to => rsi, count => rdx // r9 and r10 may be used to save non-volatile registers - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -2030,9 +2030,9 @@ // r9 and r10 may be used to save non-volatile registers // 'from', 'to' and 'qword_count' are now valid - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -2123,9 +2123,9 @@ // r9 and r10 may be used to save non-volatile registers // 'from', 'to' and 'qword_count' are now valid - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_DISJOINT; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_DISJOINT; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } if (aligned) { decorators |= ARRAYCOPY_ALIGNED; @@ -2306,9 +2306,9 @@ Address from_element_addr(end_from, count, TIMES_OOP, 0); Address to_element_addr(end_to, count, TIMES_OOP, 0); - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY | ARRAYCOPY_CHECKCAST; + DecoratorSet decorators = IN_HEAP | IS_ARRAY | ARRAYCOPY_CHECKCAST; if (dest_uninitialized) { - decorators |= AS_DEST_NOT_INITIALIZED; + decorators |= IS_DEST_UNINITIALIZED; } BasicType type = T_OBJECT; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/cpu/x86/templateTable_x86.cpp --- a/src/hotspot/cpu/x86/templateTable_x86.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/cpu/x86/templateTable_x86.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -770,7 +770,7 @@ // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ access_load_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, rax, + __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, rax, Address(rdx, rax, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_INT)), noreg, noreg); @@ -783,7 +783,7 @@ index_check(rdx, rax); // kills rbx NOT_LP64(__ mov(rbx, rax)); // rbx,: index - __ access_load_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, noreg /* ltos */, + __ access_load_at(T_LONG, IN_HEAP | IS_ARRAY, noreg /* ltos */, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG)), noreg, noreg); @@ -796,7 +796,7 @@ // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ access_load_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, noreg /* ftos */, + __ access_load_at(T_FLOAT, IN_HEAP | IS_ARRAY, noreg /* ftos */, Address(rdx, rax, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)), @@ -808,7 +808,7 @@ // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ access_load_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, noreg /* dtos */, + __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */, Address(rdx, rax, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)), @@ -825,7 +825,7 @@ UseCompressedOops ? Address::times_4 : Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)), rax, - IN_HEAP_ARRAY); + IS_ARRAY); } void TemplateTable::baload() { @@ -833,7 +833,7 @@ // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ access_load_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, rax, + __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)), noreg, noreg); } @@ -843,7 +843,7 @@ // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, rax, + __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), noreg, noreg); } @@ -858,7 +858,7 @@ // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, rax, + __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), noreg, noreg); } @@ -869,7 +869,7 @@ // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ access_load_at(T_SHORT, IN_HEAP | IN_HEAP_ARRAY, rax, + __ access_load_at(T_SHORT, IN_HEAP | IS_ARRAY, rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)), noreg, noreg); } @@ -1063,7 +1063,7 @@ // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx - __ access_store_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, + __ access_store_at(T_INT, IN_HEAP | IS_ARRAY, Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_INT)), rax, noreg, noreg); @@ -1077,7 +1077,7 @@ // rdx: high(value) index_check(rcx, rbx); // prefer index in rbx, // rbx,: index - __ access_store_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, + __ access_store_at(T_LONG, IN_HEAP | IS_ARRAY, Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG)), noreg /* ltos */, noreg, noreg); @@ -1091,7 +1091,7 @@ // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx - __ access_store_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, + __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)), noreg /* ftos */, noreg, noreg); @@ -1104,7 +1104,7 @@ // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx - __ access_store_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, + __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)), noreg /* dtos */, noreg, noreg); @@ -1148,7 +1148,7 @@ __ movptr(rax, at_tos()); __ movl(rcx, at_tos_p1()); // index // Now store using the appropriate barrier - do_oop_store(_masm, element_address, rax, IN_HEAP_ARRAY); + do_oop_store(_masm, element_address, rax, IS_ARRAY); __ jmp(done); // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx] @@ -1156,7 +1156,7 @@ __ profile_null_seen(rbx); // Store a NULL - do_oop_store(_masm, element_address, noreg, IN_HEAP_ARRAY); + do_oop_store(_masm, element_address, noreg, IS_ARRAY); // Pop stack arguments __ bind(done); @@ -1180,7 +1180,7 @@ __ jccb(Assembler::zero, L_skip); __ andl(rax, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1 __ bind(L_skip); - __ access_store_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, + __ access_store_at(T_BYTE, IN_HEAP | IS_ARRAY, Address(rdx, rbx,Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)), rax, noreg, noreg); @@ -1193,7 +1193,7 @@ // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx - __ access_store_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, + __ access_store_at(T_CHAR, IN_HEAP | IS_ARRAY, Address(rdx, rbx, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), rax, noreg, noreg); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/c1/c1_LIRGenerator.cpp --- a/src/hotspot/share/c1/c1_LIRGenerator.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/c1/c1_LIRGenerator.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -1602,7 +1602,7 @@ array_store_check(value.result(), array.result(), store_check_info, x->profiled_method(), x->profiled_bci()); } - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; if (x->check_boolean()) { decorators |= C1_MASK_BOOLEAN; } @@ -1847,7 +1847,7 @@ } } - DecoratorSet decorators = IN_HEAP | IN_HEAP_ARRAY; + DecoratorSet decorators = IN_HEAP | IS_ARRAY; LIR_Opr result = rlock_result(x, x->elt_type()); access_load_at(decorators, x->elt_type(), diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/cms/parNewGeneration.cpp --- a/src/hotspot/share/gc/cms/parNewGeneration.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/cms/parNewGeneration.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -684,7 +684,7 @@ void /*ParNewGeneration::*/ParKeepAliveClosure::do_oop_work(T* p) { #ifdef ASSERT { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); // We never expect to see a null reference being processed // as a weak reference. assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); @@ -694,7 +694,7 @@ _par_cl->do_oop_nv(p); if (CMSHeap::heap()->is_in_reserved(p)) { - oop obj = RawAccess::oop_load(p);; + oop obj = RawAccess::oop_load(p);; _rs->write_ref_field_gc_par(p, obj); } } @@ -710,7 +710,7 @@ void /*ParNewGeneration::*/KeepAliveClosure::do_oop_work(T* p) { #ifdef ASSERT { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); // We never expect to see a null reference being processed // as a weak reference. assert(oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); @@ -720,7 +720,7 @@ _cl->do_oop_nv(p); if (CMSHeap::heap()->is_in_reserved(p)) { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); _rs->write_ref_field_gc_par(p, obj); } } @@ -737,7 +737,7 @@ oop new_obj = obj->is_forwarded() ? obj->forwardee() : _g->DefNewGeneration::copy_to_survivor_space(obj); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); } if (_gc_barrier) { // If p points to a younger generation, mark the card. diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/cms/parOopClosures.inline.hpp --- a/src/hotspot/share/gc/cms/parOopClosures.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/cms/parOopClosures.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -37,7 +37,7 @@ #include "oops/oop.inline.hpp" template inline void ParScanWeakRefClosure::do_oop_work(T* p) { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); // weak references are sometimes scanned twice; must check // that to-space doesn't already contain this object if ((HeapWord*)obj < _boundary && !_g->to()->is_in_reserved(obj)) { @@ -53,7 +53,7 @@ new_obj = ((ParNewGeneration*)_g)->copy_to_survivor_space(_par_scan_state, obj, obj_sz, m); } - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); } } @@ -62,7 +62,7 @@ template inline void ParScanClosure::par_do_barrier(T* p) { assert(generation()->is_in_reserved(p), "expected ref in generation"); - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); // If p points to a younger generation, mark the card. if ((HeapWord*)obj < gen_boundary()) { rs()->write_ref_field_gc_par(p, obj); @@ -112,14 +112,14 @@ oop new_obj; if (m->is_marked()) { // Contains forwarding pointer. new_obj = ParNewGeneration::real_forwardee(obj); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); log_develop_trace(gc, scavenge)("{%s %s ( " PTR_FORMAT " ) " PTR_FORMAT " -> " PTR_FORMAT " (%d)}", "forwarded ", new_obj->klass()->internal_name(), p2i(p), p2i((void *)obj), p2i((void *)new_obj), new_obj->size()); } else { size_t obj_sz = obj->size_given_klass(objK); new_obj = _g->copy_to_survivor_space(_par_scan_state, obj, obj_sz, m); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); if (root_scan) { // This may have pushed an object. If we have a root // category with a lot of roots, can't let the queue get too diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/g1/g1BarrierSet.inline.hpp --- a/src/hotspot/share/gc/g1/g1BarrierSet.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/g1/g1BarrierSet.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -34,7 +34,7 @@ template inline void G1BarrierSet::write_ref_field_pre(T* field) { - if (HasDecorator::value || + if (HasDecorator::value || HasDecorator::value) { return; } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/g1/g1FullGCOopClosures.inline.hpp --- a/src/hotspot/share/gc/g1/g1FullGCOopClosures.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/g1/g1FullGCOopClosures.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -78,7 +78,7 @@ // Forwarded, just update. assert(Universe::heap()->is_in_reserved(forwardee), "should be in object space"); - RawAccess::oop_store(p, forwardee); + RawAccess::oop_store(p, forwardee); } inline void G1AdjustClosure::do_oop(oop* p) { do_oop_nv(p); } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/g1/g1OopClosures.inline.hpp --- a/src/hotspot/share/gc/g1/g1OopClosures.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/g1/g1OopClosures.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -256,7 +256,7 @@ forwardee = _par_scan_state->copy_to_survivor_space(state, obj, m); } assert(forwardee != NULL, "forwardee should not be NULL"); - RawAccess::oop_store(p, forwardee); + RawAccess::oop_store(p, forwardee); if (do_mark_object != G1MarkNone && forwardee != obj) { // If the object is self-forwarded we don't need to explicitly // mark it, the evacuation failure protocol will do so. diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/g1/g1ParScanThreadState.inline.hpp --- a/src/hotspot/share/gc/g1/g1ParScanThreadState.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/g1/g1ParScanThreadState.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -32,7 +32,7 @@ template void G1ParScanThreadState::do_oop_evac(T* p) { // Reference should not be NULL here as such are never pushed to the task queue. - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); // Although we never intentionally push references outside of the collection // set, due to (benign) races in the claim mechanism during RSet scanning more @@ -47,7 +47,7 @@ } else { obj = copy_to_survivor_space(in_cset_state, obj, m); } - RawAccess::oop_store(p, obj); + RawAccess::oop_store(p, obj); } else if (in_cset_state.is_humongous()) { _g1h->set_humongous_is_live(obj); } else { diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/parallel/psCardTable.cpp --- a/src/hotspot/share/gc/parallel/psCardTable.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/parallel/psCardTable.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -103,7 +103,7 @@ protected: template void do_oop_work(T* p) { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); if (_young_gen->is_in_reserved(obj)) { assert(_card_table->addr_is_marked_precise(p), "Found unmarked precise oop"); _card_table->set_card_newgen(p); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/parallel/psParallelCompact.inline.hpp --- a/src/hotspot/share/gc/parallel/psParallelCompact.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/parallel/psParallelCompact.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -119,7 +119,7 @@ if (new_obj != NULL) { assert(ParallelScavengeHeap::heap()->is_in_reserved(new_obj), "should be in object space"); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); } } } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/parallel/psPromotionManager.inline.hpp --- a/src/hotspot/share/gc/parallel/psPromotionManager.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/parallel/psPromotionManager.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -50,14 +50,14 @@ template inline void PSPromotionManager::claim_or_forward_internal_depth(T* p) { if (p != NULL) { // XXX: error if p != NULL here - oop o = RawAccess::oop_load(p); + oop o = RawAccess::oop_load(p); if (o->is_forwarded()) { o = o->forwardee(); // Card mark if (PSScavenge::is_obj_in_young(o)) { PSScavenge::card_table()->inline_write_ref_field_gc(p, o); } - RawAccess::oop_store(p, o); + RawAccess::oop_store(p, o); } else { push_depth(p); } @@ -281,7 +281,7 @@ inline void PSPromotionManager::copy_and_push_safe_barrier(T* p) { assert(should_scavenge(p, true), "revisiting object?"); - oop o = RawAccess::oop_load(p); + oop o = RawAccess::oop_load(p); oop new_obj = o->is_forwarded() ? o->forwardee() : copy_to_survivor_space(o); @@ -294,7 +294,7 @@ new_obj->klass()->internal_name(), p2i((void *)o), p2i((void *)new_obj), new_obj->size()); } - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); // We cannot mark without test, as some code passes us pointers // that are outside the heap. These pointers are either from roots diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/parallel/psScavenge.cpp --- a/src/hotspot/share/gc/parallel/psScavenge.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/parallel/psScavenge.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -97,7 +97,7 @@ } template void do_oop_work(T* p) { - assert (oopDesc::is_oop(RawAccess::oop_load(p)), + assert (oopDesc::is_oop(RawAccess::oop_load(p)), "expected an oop while scanning weak refs"); // Weak refs may be visited more than once. diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/parallel/psScavenge.inline.hpp --- a/src/hotspot/share/gc/parallel/psScavenge.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/parallel/psScavenge.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -48,7 +48,7 @@ template inline bool PSScavenge::should_scavenge(T* p, MutableSpace* to_space) { if (should_scavenge(p)) { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); // Skip objects copied to to_space since the scavenge started. HeapWord* const addr = (HeapWord*)obj; return addr < to_space_top_before_gc() || addr >= to_space->end(); @@ -109,7 +109,7 @@ } else { new_obj = _pm->copy_to_survivor_space(o); } - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); if (PSScavenge::is_obj_in_young(new_obj)) { do_cld_barrier(); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/serial/defNewGeneration.inline.hpp --- a/src/hotspot/share/gc/serial/defNewGeneration.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/serial/defNewGeneration.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2001, 2017, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2001, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -40,7 +40,7 @@ { // We never expect to see a null reference being processed // as a weak reference. - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); assert (oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); } #endif // ASSERT @@ -61,7 +61,7 @@ // dirty cards in the young gen are never scanned, so the // extra check probably isn't worthwhile. if (GenCollectedHeap::heap()->is_in_reserved(p)) { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); _rs->inline_write_ref_field_gc(p, obj); } } @@ -72,7 +72,7 @@ { // We never expect to see a null reference being processed // as a weak reference. - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); assert (oopDesc::is_oop(obj), "expected an oop while scanning weak refs"); } #endif // ASSERT @@ -82,7 +82,7 @@ // Optimized for Defnew generation if it's the youngest generation: // we set a younger_gen card if we have an older->youngest // generation pointer. - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); if (((HeapWord*)obj < _boundary) && GenCollectedHeap::heap()->is_in_reserved(p)) { _rs->inline_write_ref_field_gc(p, obj); } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/serial/markSweep.inline.hpp --- a/src/hotspot/share/gc/serial/markSweep.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/serial/markSweep.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -54,7 +54,7 @@ if (new_obj != NULL) { assert(Universe::heap()->is_in_reserved(new_obj), "should be in object space"); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); } } } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/shared/c1/barrierSetC1.cpp --- a/src/hotspot/share/gc/shared/c1/barrierSetC1.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/shared/c1/barrierSetC1.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -40,7 +40,7 @@ LIR_Opr BarrierSetC1::resolve_address(LIRAccess& access, bool resolve_in_register) { DecoratorSet decorators = access.decorators(); - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0; LIRItem& base = access.base().item(); @@ -48,7 +48,7 @@ LIRGenerator *gen = access.gen(); LIR_Opr addr_opr; - if (on_array) { + if (is_array) { addr_opr = LIR_OprFact::address(gen->emit_array_address(base.result(), offset, access.type())); } else if (needs_patching) { // we need to patch the offset in the instruction so don't allow diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/shared/c1/modRefBarrierSetC1.cpp --- a/src/hotspot/share/gc/shared/c1/modRefBarrierSetC1.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/shared/c1/modRefBarrierSetC1.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -34,7 +34,7 @@ void ModRefBarrierSetC1::store_at_resolved(LIRAccess& access, LIR_Opr value) { DecoratorSet decorators = access.decorators(); - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; if (access.is_oop()) { @@ -45,7 +45,7 @@ BarrierSetC1::store_at_resolved(access, value); if (access.is_oop()) { - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; LIR_Opr post_addr = precise ? access.resolved_addr() : access.base().opr(); post_barrier(access, post_addr, value); } @@ -87,9 +87,9 @@ DecoratorSet decorators = access.decorators(); bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0; bool is_write = (decorators & C1_WRITE_ACCESS) != 0; - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool precise = on_array || on_anonymous; + bool precise = is_array || on_anonymous; resolve_in_register |= !needs_patching && is_write && access.is_oop() && precise; return BarrierSetC1::resolve_address(access, resolve_in_register); } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp --- a/src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -37,10 +37,10 @@ const TypePtr* adr_type = access.addr().type(); Node* adr = access.addr().node(); - bool on_array = (decorators & IN_HEAP_ARRAY) != 0; + bool is_array = (decorators & IS_ARRAY) != 0; bool anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; bool in_heap = (decorators & IN_HEAP) != 0; - bool use_precise = on_array || anonymous; + bool use_precise = is_array || anonymous; if (!access.is_oop() || (!in_heap && !anonymous)) { return BarrierSetC2::store_at_resolved(access, val); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/shared/genOopClosures.inline.hpp --- a/src/hotspot/share/gc/shared/genOopClosures.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/shared/genOopClosures.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -93,7 +93,7 @@ assert(!_g->to()->is_in_reserved(obj), "Scanning field twice?"); oop new_obj = obj->is_forwarded() ? obj->forwardee() : _g->copy_to_survivor_space(obj); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); } if (is_scanning_a_cld()) { @@ -119,7 +119,7 @@ assert(!_g->to()->is_in_reserved(obj), "Scanning field twice?"); oop new_obj = obj->is_forwarded() ? obj->forwardee() : _g->copy_to_survivor_space(obj); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); if (is_scanning_a_cld()) { do_cld_barrier(); } else if (_gc_barrier) { @@ -153,13 +153,13 @@ // Note similarity to ScanClosure; the difference is that // the barrier set is taken care of outside this closure. template inline void ScanWeakRefClosure::do_oop_work(T* p) { - oop obj = RawAccess::oop_load(p); + oop obj = RawAccess::oop_load(p); // weak references are sometimes scanned twice; must check // that to-space doesn't already contain this object if ((HeapWord*)obj < _boundary && !_g->to()->is_in_reserved(obj)) { oop new_obj = obj->is_forwarded() ? obj->forwardee() : _g->copy_to_survivor_space(obj); - RawAccess::oop_store(p, new_obj); + RawAccess::oop_store(p, new_obj); } } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/gc/shared/modRefBarrierSet.inline.hpp --- a/src/hotspot/share/gc/shared/modRefBarrierSet.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/gc/shared/modRefBarrierSet.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -102,7 +102,7 @@ if (!HasDecorator::value) { // Optimized covariant case bs->write_ref_array_pre(dst_raw, length, - HasDecorator::value); + HasDecorator::value); Raw::oop_arraycopy(NULL, 0, src_raw, NULL, 0, dst_raw, length); bs->write_ref_array((HeapWord*)dst_raw, length); } else { diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/oops/access.hpp --- a/src/hotspot/share/oops/access.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/oops/access.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -104,8 +104,8 @@ template static void verify_primitive_decorators() { - const DecoratorSet primitive_decorators = (AS_DECORATOR_MASK ^ AS_NO_KEEPALIVE ^ AS_DEST_NOT_INITIALIZED) | - IN_HEAP | IN_HEAP_ARRAY; + const DecoratorSet primitive_decorators = (AS_DECORATOR_MASK ^ AS_NO_KEEPALIVE) | + IN_HEAP | IS_ARRAY; verify_decorators(); } @@ -113,15 +113,14 @@ static void verify_oop_decorators() { const DecoratorSet oop_decorators = AS_DECORATOR_MASK | IN_DECORATOR_MASK | (ON_DECORATOR_MASK ^ ON_UNKNOWN_OOP_REF) | // no unknown oop refs outside of the heap - OOP_DECORATOR_MASK; + IS_ARRAY | IS_NOT_NULL | IS_DEST_UNINITIALIZED; verify_decorators(); } template static void verify_heap_oop_decorators() { const DecoratorSet heap_oop_decorators = AS_DECORATOR_MASK | ON_DECORATOR_MASK | - OOP_DECORATOR_MASK | (IN_DECORATOR_MASK ^ - (IN_NATIVE | IN_CONCURRENT_ROOT)); // no root accesses in the heap + IN_HEAP | IS_ARRAY | IS_NOT_NULL; verify_decorators(); } @@ -135,8 +134,8 @@ static inline bool oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw, size_t length) { - verify_decorators(); + verify_decorators(); return AccessInternal::arraycopy(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length); @@ -146,8 +145,8 @@ static inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, const T* src_raw, arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw, size_t length) { - verify_decorators(); + verify_decorators(); AccessInternal::arraycopy(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length); @@ -300,8 +299,8 @@ // Helper for array access. template -class ArrayAccess: public HeapAccess { - typedef HeapAccess AccessT; +class ArrayAccess: public HeapAccess { + typedef HeapAccess AccessT; public: template static inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, @@ -354,7 +353,6 @@ const DecoratorSet barrier_strength_decorators = decorators & AS_DECORATOR_MASK; STATIC_ASSERT(barrier_strength_decorators == 0 || ( // make sure barrier strength decorators are disjoint if set (barrier_strength_decorators ^ AS_NO_KEEPALIVE) == 0 || - (barrier_strength_decorators ^ AS_DEST_NOT_INITIALIZED) == 0 || (barrier_strength_decorators ^ AS_RAW) == 0 || (barrier_strength_decorators ^ AS_NORMAL) == 0 )); @@ -378,7 +376,6 @@ STATIC_ASSERT(location_decorators == 0 || ( // make sure location decorators are disjoint if set (location_decorators ^ IN_NATIVE) == 0 || (location_decorators ^ IN_HEAP) == 0 || - (location_decorators ^ (IN_HEAP | IN_HEAP_ARRAY)) == 0 || (location_decorators ^ (IN_NATIVE | IN_CONCURRENT_ROOT)) == 0 )); } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/oops/accessBackend.hpp --- a/src/hotspot/share/oops/accessBackend.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/oops/accessBackend.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -181,7 +181,7 @@ // This mask specifies what decorators are relevant for raw accesses. When passing // accesses to the raw layer, irrelevant decorators are removed. const DecoratorSet RAW_DECORATOR_MASK = INTERNAL_DECORATOR_MASK | MO_DECORATOR_MASK | - ARRAYCOPY_DECORATOR_MASK | OOP_DECORATOR_MASK; + ARRAYCOPY_DECORATOR_MASK | IS_NOT_NULL; // The RawAccessBarrier performs raw accesses with additional knowledge of // memory ordering, so that OrderAccess/Atomic is called when necessary. @@ -1290,7 +1290,7 @@ (IsSame::value || IsIntegral::value) || IsFloatingPoint::value)); // arraycopy allows type erased void elements typedef typename Decay::type DecayedT; - const DecoratorSet expanded_decorators = DecoratorFixup::value; + const DecoratorSet expanded_decorators = DecoratorFixup::value; return arraycopy_reduce_types(src_obj, src_offset_in_bytes, const_cast(src_raw), dst_obj, dst_offset_in_bytes, const_cast(dst_raw), length); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/oops/accessBackend.inline.hpp --- a/src/hotspot/share/oops/accessBackend.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/oops/accessBackend.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -35,7 +35,7 @@ inline typename EnableIf< AccessInternal::MustConvertCompressedOop::value, T>::type RawAccessBarrier::decode_internal(typename HeapOopType::type value) { - if (HasDecorator::value) { + if (HasDecorator::value) { return CompressedOops::decode_not_null(value); } else { return CompressedOops::decode(value); @@ -48,7 +48,7 @@ AccessInternal::MustConvertCompressedOop::value, typename HeapOopType::type>::type RawAccessBarrier::encode_internal(T value) { - if (HasDecorator::value) { + if (HasDecorator::value) { return CompressedOops::encode_not_null(value); } else { return CompressedOops::encode(value); diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/oops/accessDecorators.hpp --- a/src/hotspot/share/oops/accessDecorators.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/oops/accessDecorators.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -143,8 +143,6 @@ // - Accesses on narrowOop* translate to encoded/decoded memory accesses without runtime checks // - Accesses on HeapWord* translate to a runtime check choosing one of the above // - Accesses on other types translate to raw memory accesses without runtime checks -// * AS_DEST_NOT_INITIALIZED: This property can be important to e.g. SATB barriers by -// marking that the previous value is uninitialized nonsense rather than a real value. // * AS_NO_KEEPALIVE: The barrier is used only on oop references and will not keep any involved objects // alive, regardless of the type of reference being accessed. It will however perform the memory access // in a consistent way w.r.t. e.g. concurrent compaction, so that the right field is being accessed, @@ -155,11 +153,9 @@ // Note that primitive accesses will only be resolved on the barrier set if the appropriate build-time // decorator for enabling primitive barriers is enabled for the build. const DecoratorSet AS_RAW = UCONST64(1) << 12; -const DecoratorSet AS_DEST_NOT_INITIALIZED = UCONST64(1) << 13; const DecoratorSet AS_NO_KEEPALIVE = UCONST64(1) << 14; const DecoratorSet AS_NORMAL = UCONST64(1) << 15; -const DecoratorSet AS_DECORATOR_MASK = AS_RAW | AS_DEST_NOT_INITIALIZED | - AS_NO_KEEPALIVE | AS_NORMAL; +const DecoratorSet AS_DECORATOR_MASK = AS_RAW | AS_NO_KEEPALIVE | AS_NORMAL; // === Reference Strength Decorators === // These decorators only apply to accesses on oop-like types (oop/narrowOop). @@ -182,23 +178,24 @@ // The location is important to the GC as it may imply different actions. The following decorators are used: // * IN_HEAP: The access is performed in the heap. Many barriers such as card marking will // be omitted if this decorator is not set. -// * IN_HEAP_ARRAY: The access is performed on a heap allocated array. This is sometimes a special case -// for some GCs, and implies that it is an IN_HEAP. // * IN_NATIVE: The access is performed in an off-heap data structure pointing into the Java heap. // * IN_CONCURRENT_ROOT: The access is performed in an off-heap data structure pointing into the Java heap, // but is notably not scanned during safepoints. This is sometimes a special case for some GCs and // implies that it is also an IN_NATIVE. const DecoratorSet IN_HEAP = UCONST64(1) << 20; -const DecoratorSet IN_HEAP_ARRAY = UCONST64(1) << 21; const DecoratorSet IN_NATIVE = UCONST64(1) << 22; const DecoratorSet IN_CONCURRENT_ROOT = UCONST64(1) << 23; -const DecoratorSet IN_DECORATOR_MASK = IN_HEAP | IN_HEAP_ARRAY | - IN_NATIVE | IN_CONCURRENT_ROOT; +const DecoratorSet IN_DECORATOR_MASK = IN_HEAP | IN_NATIVE | IN_CONCURRENT_ROOT; -// == Value Decorators == -// * OOP_NOT_NULL: This property can make certain barriers faster such as compressing oops. -const DecoratorSet OOP_NOT_NULL = UCONST64(1) << 25; -const DecoratorSet OOP_DECORATOR_MASK = OOP_NOT_NULL; +// == Boolean Flag Decorators == +// * IS_ARRAY: The access is performed on a heap allocated array. This is sometimes a special case +// for some GCs. +// * IS_DEST_UNINITIALIZED: This property can be important to e.g. SATB barriers by +// marking that the previous value is uninitialized nonsense rather than a real value. +// * IS_NOT_NULL: This property can make certain barriers faster such as compressing oops. +const DecoratorSet IS_ARRAY = UCONST64(1) << 21; +const DecoratorSet IS_DEST_UNINITIALIZED = UCONST64(1) << 13; +const DecoratorSet IS_NOT_NULL = UCONST64(1) << 25; // == Arraycopy Decorators == // * ARRAYCOPY_CHECKCAST: This property means that the class of the objects in source @@ -238,11 +235,8 @@ // If no barrier strength has been picked, normal will be used static const DecoratorSet barrier_strength_default = memory_ordering_default | ((AS_DECORATOR_MASK & memory_ordering_default) == 0 ? AS_NORMAL : INTERNAL_EMPTY); - // Heap array accesses imply it is a heap access - static const DecoratorSet heap_array_is_in_heap = barrier_strength_default | - ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY); - static const DecoratorSet conc_root_is_root = heap_array_is_in_heap | - ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_NATIVE : INTERNAL_EMPTY); + static const DecoratorSet conc_root_is_root = barrier_strength_default | + ((IN_CONCURRENT_ROOT & barrier_strength_default) != 0 ? IN_NATIVE : INTERNAL_EMPTY); static const DecoratorSet value = conc_root_is_root | BT_BUILDTIME_DECORATORS; }; @@ -259,11 +253,8 @@ // If no barrier strength has been picked, normal will be used DecoratorSet barrier_strength_default = memory_ordering_default | ((AS_DECORATOR_MASK & memory_ordering_default) == 0 ? AS_NORMAL : INTERNAL_EMPTY); - // Heap array accesses imply it is a heap access - DecoratorSet heap_array_is_in_heap = barrier_strength_default | - ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY); - DecoratorSet conc_root_is_root = heap_array_is_in_heap | - ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_NATIVE : INTERNAL_EMPTY); + DecoratorSet conc_root_is_root = barrier_strength_default | + ((IN_CONCURRENT_ROOT & barrier_strength_default) != 0 ? IN_NATIVE : INTERNAL_EMPTY); DecoratorSet value = conc_root_is_root | BT_BUILDTIME_DECORATORS; return value; } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/oops/objArrayOop.cpp --- a/src/hotspot/share/oops/objArrayOop.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/oops/objArrayOop.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -37,7 +37,7 @@ } else { offs = objArrayOopDesc::obj_at_offset(index); } - return HeapAccess::oop_atomic_cmpxchg_at(exchange_value, as_oop(), offs, compare_value); + return HeapAccess::oop_atomic_cmpxchg_at(exchange_value, as_oop(), offs, compare_value); } Klass* objArrayOopDesc::element_klass() { diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/oops/objArrayOop.inline.hpp --- a/src/hotspot/share/oops/objArrayOop.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/oops/objArrayOop.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015, 2017, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2015, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -46,12 +46,12 @@ inline oop objArrayOopDesc::obj_at(int index) const { ptrdiff_t offset = UseCompressedOops ? obj_at_offset(index) : obj_at_offset(index); - return HeapAccess::oop_load_at(as_oop(), offset); + return HeapAccess::oop_load_at(as_oop(), offset); } inline void objArrayOopDesc::obj_at_put(int index, oop value) { ptrdiff_t offset = UseCompressedOops ? obj_at_offset(index) : obj_at_offset(index); - HeapAccess::oop_store_at(as_oop(), offset, value); + HeapAccess::oop_store_at(as_oop(), offset, value); } #endif // SHARE_VM_OOPS_OBJARRAYOOP_INLINE_HPP diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/oops/typeArrayOop.inline.hpp --- a/src/hotspot/share/oops/typeArrayOop.inline.hpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/oops/typeArrayOop.inline.hpp Fri Jun 22 17:46:58 2018 -0400 @@ -91,92 +91,92 @@ inline jbyte typeArrayOopDesc::byte_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::byte_at_put(int which, jbyte contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jboolean typeArrayOopDesc::bool_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::bool_at_put(int which, jboolean contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, jboolean(contents & 1)); + HeapAccess::store_at(as_oop(), offset, jboolean(contents & 1)); } inline jchar typeArrayOopDesc::char_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::char_at_put(int which, jchar contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jint typeArrayOopDesc::int_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::int_at_put(int which, jint contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jshort typeArrayOopDesc::short_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::short_at_put(int which, jshort contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jushort typeArrayOopDesc::ushort_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::ushort_at_put(int which, jushort contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jlong typeArrayOopDesc::long_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::long_at_put(int which, jlong contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jfloat typeArrayOopDesc::float_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::float_at_put(int which, jfloat contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jdouble typeArrayOopDesc::double_at(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::double_at_put(int which, jdouble contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } inline jbyte typeArrayOopDesc::byte_at_acquire(int which) const { ptrdiff_t offset = element_offset(which); - return HeapAccess::load_at(as_oop(), offset); + return HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::release_byte_at_put(int which, jbyte contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, contents); + HeapAccess::store_at(as_oop(), offset, contents); } // Java thinks Symbol arrays are just arrays of either long or int, since @@ -185,20 +185,20 @@ #ifdef _LP64 inline Symbol* typeArrayOopDesc::symbol_at(int which) const { ptrdiff_t offset = element_offset(which); - return (Symbol*)(jlong) HeapAccess::load_at(as_oop(), offset); + return (Symbol*)(jlong) HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::symbol_at_put(int which, Symbol* contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, (jlong)contents); + HeapAccess::store_at(as_oop(), offset, (jlong)contents); } #else inline Symbol* typeArrayOopDesc::symbol_at(int which) const { ptrdiff_t offset = element_offset(which); - return (Symbol*)(jint) HeapAccess::load_at(as_oop(), offset); + return (Symbol*)(jint) HeapAccess::load_at(as_oop(), offset); } inline void typeArrayOopDesc::symbol_at_put(int which, Symbol* contents) { ptrdiff_t offset = element_offset(which); - HeapAccess::store_at(as_oop(), offset, (jint)contents); + HeapAccess::store_at(as_oop(), offset, (jint)contents); } #endif // _LP64 diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/opto/parse2.cpp --- a/src/hotspot/share/opto/parse2.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/opto/parse2.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -69,7 +69,7 @@ const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt); Node* ld = access_load_at(array, adr, adr_type, elemtype, bt, - IN_HEAP | IN_HEAP_ARRAY | C2_CONTROL_DEPENDENT_LOAD); + IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD); if (big_val) { push_pair(ld); } else { @@ -104,7 +104,7 @@ const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt); - access_store_at(control(), array, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IN_HEAP_ARRAY); + access_store_at(control(), array, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY); } diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/opto/parse3.cpp --- a/src/hotspot/share/opto/parse3.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/opto/parse3.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -351,7 +351,7 @@ Node* elem = expand_multianewarray(array_klass_1, &lengths[1], ndimensions-1, nargs); intptr_t offset = header + ((intptr_t)i << LogBytesPerHeapOop); Node* eaddr = basic_plus_adr(array, offset); - access_store_at(control(), array, eaddr, adr_type, elem, elemtype, T_OBJECT, IN_HEAP | IN_HEAP_ARRAY); + access_store_at(control(), array, eaddr, adr_type, elem, elemtype, T_OBJECT, IN_HEAP | IS_ARRAY); } } return array; diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/runtime/jniHandles.cpp --- a/src/hotspot/share/runtime/jniHandles.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/runtime/jniHandles.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -517,7 +517,7 @@ // Try last block if (_last->_top < block_size_in_oops) { oop* handle = &(_last->_handles)[_last->_top++]; - NativeAccess::oop_store(handle, obj); + NativeAccess::oop_store(handle, obj); return (jobject) handle; } @@ -525,7 +525,7 @@ if (_free_list != NULL) { oop* handle = _free_list; _free_list = (oop*) *_free_list; - NativeAccess::oop_store(handle, obj); + NativeAccess::oop_store(handle, obj); return (jobject) handle; } // Check if unused block follow last diff -r 081b132c4dc0 -r 9375184cec98 src/hotspot/share/runtime/stubRoutines.cpp --- a/src/hotspot/share/runtime/stubRoutines.cpp Fri Jun 22 16:06:43 2018 -0400 +++ b/src/hotspot/share/runtime/stubRoutines.cpp Fri Jun 22 17:46:58 2018 -0400 @@ -418,7 +418,7 @@ SharedRuntime::_oop_array_copy_ctr++; // Slow-path oop array copy #endif // !PRODUCT assert(count != 0, "count should be non-zero"); - ArrayAccess::oop_arraycopy_raw((HeapWord*)src, (HeapWord*)dest, count); + ArrayAccess::oop_arraycopy_raw((HeapWord*)src, (HeapWord*)dest, count); JRT_END JRT_LEAF(void, StubRoutines::arrayof_jbyte_copy(HeapWord* src, HeapWord* dest, size_t count)) @@ -462,7 +462,7 @@ SharedRuntime::_oop_array_copy_ctr++; // Slow-path oop array copy #endif // !PRODUCT assert(count != 0, "count should be non-zero"); - ArrayAccess::oop_arraycopy_raw(src, dest, count); + ArrayAccess::oop_arraycopy_raw(src, dest, count); JRT_END address StubRoutines::select_fill_function(BasicType t, bool aligned, const char* &name) {