src/hotspot/cpu/x86/templateTable_x86.cpp
branchdatagramsocketimpl-branch
changeset 58678 9cf78a70fa4f
parent 54921 be1cbfd81ea1
child 58679 9c3209ff7550
equal deleted inserted replaced
58677:13588c901957 58678:9cf78a70fa4f
  2717   // Helper function to insert a is-volatile test and memory barrier
  2717   // Helper function to insert a is-volatile test and memory barrier
  2718   __ membar(order_constraint);
  2718   __ membar(order_constraint);
  2719 }
  2719 }
  2720 
  2720 
  2721 void TemplateTable::resolve_cache_and_index(int byte_no,
  2721 void TemplateTable::resolve_cache_and_index(int byte_no,
  2722                                             Register Rcache,
  2722                                             Register cache,
  2723                                             Register index,
  2723                                             Register index,
  2724                                             size_t index_size) {
  2724                                             size_t index_size) {
  2725   const Register temp = rbx;
  2725   const Register temp = rbx;
  2726   assert_different_registers(Rcache, index, temp);
  2726   assert_different_registers(cache, index, temp);
  2727 
  2727 
       
  2728   Label L_clinit_barrier_slow;
  2728   Label resolved;
  2729   Label resolved;
  2729 
  2730 
  2730   Bytecodes::Code code = bytecode();
  2731   Bytecodes::Code code = bytecode();
  2731   switch (code) {
  2732   switch (code) {
  2732   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
  2733   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
  2733   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
  2734   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
  2734   default: break;
  2735   default: break;
  2735   }
  2736   }
  2736 
  2737 
  2737   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
  2738   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
  2738   __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
  2739   __ get_cache_and_index_and_bytecode_at_bcp(cache, index, temp, byte_no, 1, index_size);
  2739   __ cmpl(temp, code);  // have we resolved this bytecode?
  2740   __ cmpl(temp, code);  // have we resolved this bytecode?
  2740   __ jcc(Assembler::equal, resolved);
  2741   __ jcc(Assembler::equal, resolved);
  2741 
  2742 
  2742   // resolve first time through
  2743   // resolve first time through
       
  2744   // Class initialization barrier slow path lands here as well.
       
  2745   __ bind(L_clinit_barrier_slow);
  2743   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
  2746   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
  2744   __ movl(temp, code);
  2747   __ movl(temp, code);
  2745   __ call_VM(noreg, entry, temp);
  2748   __ call_VM(noreg, entry, temp);
  2746   // Update registers with resolved info
  2749   // Update registers with resolved info
  2747   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
  2750   __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
       
  2751 
  2748   __ bind(resolved);
  2752   __ bind(resolved);
       
  2753 
       
  2754   // Class initialization barrier for static methods
       
  2755   if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
       
  2756     const Register method = temp;
       
  2757     const Register klass  = temp;
       
  2758     const Register thread = LP64_ONLY(r15_thread) NOT_LP64(noreg);
       
  2759     assert(thread != noreg, "x86_32 not supported");
       
  2760 
       
  2761     __ load_resolved_method_at_index(byte_no, method, cache, index);
       
  2762     __ load_method_holder(klass, method);
       
  2763     __ clinit_barrier(klass, thread, NULL /*L_fast_path*/, &L_clinit_barrier_slow);
       
  2764   }
  2749 }
  2765 }
  2750 
  2766 
  2751 // The cache and index registers must be set before call
  2767 // The cache and index registers must be set before call
  2752 void TemplateTable::load_field_cp_cache_entry(Register obj,
  2768 void TemplateTable::load_field_cp_cache_entry(Register obj,
  2753                                               Register cache,
  2769                                               Register cache,
  2792   assert_different_registers(method, cache, index);
  2808   assert_different_registers(method, cache, index);
  2793   assert_different_registers(itable_index, flags);
  2809   assert_different_registers(itable_index, flags);
  2794   assert_different_registers(itable_index, cache, index);
  2810   assert_different_registers(itable_index, cache, index);
  2795   // determine constant pool cache field offsets
  2811   // determine constant pool cache field offsets
  2796   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
  2812   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
  2797   const int method_offset = in_bytes(
       
  2798     ConstantPoolCache::base_offset() +
       
  2799       ((byte_no == f2_byte)
       
  2800        ? ConstantPoolCacheEntry::f2_offset()
       
  2801        : ConstantPoolCacheEntry::f1_offset()));
       
  2802   const int flags_offset = in_bytes(ConstantPoolCache::base_offset() +
  2813   const int flags_offset = in_bytes(ConstantPoolCache::base_offset() +
  2803                                     ConstantPoolCacheEntry::flags_offset());
  2814                                     ConstantPoolCacheEntry::flags_offset());
  2804   // access constant pool cache fields
  2815   // access constant pool cache fields
  2805   const int index_offset = in_bytes(ConstantPoolCache::base_offset() +
  2816   const int index_offset = in_bytes(ConstantPoolCache::base_offset() +
  2806                                     ConstantPoolCacheEntry::f2_offset());
  2817                                     ConstantPoolCacheEntry::f2_offset());
  2807 
  2818 
  2808   size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2));
  2819   size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2));
  2809   resolve_cache_and_index(byte_no, cache, index, index_size);
  2820   resolve_cache_and_index(byte_no, cache, index, index_size);
  2810     __ movptr(method, Address(cache, index, Address::times_ptr, method_offset));
  2821   __ load_resolved_method_at_index(byte_no, method, cache, index);
  2811 
  2822 
  2812   if (itable_index != noreg) {
  2823   if (itable_index != noreg) {
  2813     // pick up itable or appendix index from f2 also:
  2824     // pick up itable or appendix index from f2 also:
  2814     __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset));
  2825     __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset));
  2815   }
  2826   }
  3860   // profile this call
  3871   // profile this call
  3861   __ restore_bcp(); // rbcp was destroyed by receiver type check
  3872   __ restore_bcp(); // rbcp was destroyed by receiver type check
  3862   __ profile_virtual_call(rdx, rbcp, rlocals);
  3873   __ profile_virtual_call(rdx, rbcp, rlocals);
  3863 
  3874 
  3864   // Get declaring interface class from method, and itable index
  3875   // Get declaring interface class from method, and itable index
  3865   __ movptr(rax, Address(rbx, Method::const_offset()));
  3876   __ load_method_holder(rax, rbx);
  3866   __ movptr(rax, Address(rax, ConstMethod::constants_offset()));
       
  3867   __ movptr(rax, Address(rax, ConstantPool::pool_holder_offset_in_bytes()));
       
  3868   __ movl(rbx, Address(rbx, Method::itable_index_offset()));
  3877   __ movl(rbx, Address(rbx, Method::itable_index_offset()));
  3869   __ subl(rbx, Method::itable_index_max);
  3878   __ subl(rbx, Method::itable_index_max);
  3870   __ negl(rbx);
  3879   __ negl(rbx);
  3871 
  3880 
  3872   // Preserve recvKlass for throw_AbstractMethodErrorVerbose.
  3881   // Preserve recvKlass for throw_AbstractMethodErrorVerbose.
  4001   const int tags_offset = Array<u1>::base_offset_in_bytes();
  4010   const int tags_offset = Array<u1>::base_offset_in_bytes();
  4002   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
  4011   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
  4003   __ jcc(Assembler::notEqual, slow_case_no_pop);
  4012   __ jcc(Assembler::notEqual, slow_case_no_pop);
  4004 
  4013 
  4005   // get InstanceKlass
  4014   // get InstanceKlass
  4006   __ load_resolved_klass_at_index(rcx, rdx, rcx);
  4015   __ load_resolved_klass_at_index(rcx, rcx, rdx);
  4007   __ push(rcx);  // save the contexts of klass for initializing the header
  4016   __ push(rcx);  // save the contexts of klass for initializing the header
  4008 
  4017 
  4009   // make sure klass is initialized & doesn't have finalizer
  4018   // make sure klass is initialized & doesn't have finalizer
  4010   // make sure klass is fully initialized
  4019   // make sure klass is fully initialized
  4011   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
  4020   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
  4097       __ pop(rcx);   // get saved klass back in the register.
  4106       __ pop(rcx);   // get saved klass back in the register.
  4098       __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
  4107       __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
  4099       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
  4108       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
  4100     } else {
  4109     } else {
  4101       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
  4110       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
  4102                 (intptr_t)markOopDesc::prototype()); // header
  4111                 (intptr_t)markWord::prototype().value()); // header
  4103       __ pop(rcx);   // get saved klass back in the register.
  4112       __ pop(rcx);   // get saved klass back in the register.
  4104     }
  4113     }
  4105 #ifdef _LP64
  4114 #ifdef _LP64
  4106     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
  4115     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
  4107     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
  4116     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
  4195   __ jmpb(resolved);
  4204   __ jmpb(resolved);
  4196 
  4205 
  4197   // Get superklass in rax and subklass in rbx
  4206   // Get superklass in rax and subklass in rbx
  4198   __ bind(quicked);
  4207   __ bind(quicked);
  4199   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
  4208   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
  4200   __ load_resolved_klass_at_index(rcx, rbx, rax);
  4209   __ load_resolved_klass_at_index(rax, rcx, rbx);
  4201 
  4210 
  4202   __ bind(resolved);
  4211   __ bind(resolved);
  4203   __ load_klass(rbx, rdx);
  4212   __ load_klass(rbx, rdx);
  4204 
  4213 
  4205   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
  4214   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
  4261   __ jmpb(resolved);
  4270   __ jmpb(resolved);
  4262 
  4271 
  4263   // Get superklass in rax and subklass in rdx
  4272   // Get superklass in rax and subklass in rdx
  4264   __ bind(quicked);
  4273   __ bind(quicked);
  4265   __ load_klass(rdx, rax);
  4274   __ load_klass(rdx, rax);
  4266   __ load_resolved_klass_at_index(rcx, rbx, rax);
  4275   __ load_resolved_klass_at_index(rax, rcx, rbx);
  4267 
  4276 
  4268   __ bind(resolved);
  4277   __ bind(resolved);
  4269 
  4278 
  4270   // Generate subtype check.  Blows rcx, rdi
  4279   // Generate subtype check.  Blows rcx, rdi
  4271   // Superklass in rax.  Subklass in rdx.
  4280   // Superklass in rax.  Subklass in rdx.