hotspot/src/cpu/x86/vm/c1_Runtime1_x86.cpp
changeset 30624 2e1803c8a26d
parent 30582 b943b080c599
child 30768 66b53dcce510
equal deleted inserted replaced
30596:0322b394e7fd 30624:2e1803c8a26d
   321 
   321 
   322   // In 64bit all the args are in regs so there are no additional stack slots
   322   // In 64bit all the args are in regs so there are no additional stack slots
   323   LP64_ONLY(num_rt_args = 0);
   323   LP64_ONLY(num_rt_args = 0);
   324   LP64_ONLY(assert((reg_save_frame_size * VMRegImpl::stack_slot_size) % 16 == 0, "must be 16 byte aligned");)
   324   LP64_ONLY(assert((reg_save_frame_size * VMRegImpl::stack_slot_size) % 16 == 0, "must be 16 byte aligned");)
   325   int frame_size_in_slots = reg_save_frame_size + num_rt_args; // args + thread
   325   int frame_size_in_slots = reg_save_frame_size + num_rt_args; // args + thread
   326   sasm->set_frame_size(frame_size_in_slots / VMRegImpl::slots_per_word );
   326   sasm->set_frame_size(frame_size_in_slots / VMRegImpl::slots_per_word);
   327 
   327 
   328   // record saved value locations in an OopMap
   328   // record saved value locations in an OopMap
   329   // locations are offsets from sp after runtime call; num_rt_args is number of arguments in call, including thread
   329   // locations are offsets from sp after runtime call; num_rt_args is number of arguments in call, including thread
   330   OopMap* map = new OopMap(frame_size_in_slots, 0);
   330   OopMap* map = new OopMap(frame_size_in_slots, 0);
   331   map->set_callee_saved(VMRegImpl::stack2reg(rax_off + num_rt_args), rax->as_VMReg());
   331   map->set_callee_saved(VMRegImpl::stack2reg(rax_off + num_rt_args), rax->as_VMReg());
   359   map->set_callee_saved(VMRegImpl::stack2reg(r12H_off + num_rt_args), r12->as_VMReg()->next());
   359   map->set_callee_saved(VMRegImpl::stack2reg(r12H_off + num_rt_args), r12->as_VMReg()->next());
   360   map->set_callee_saved(VMRegImpl::stack2reg(r13H_off + num_rt_args), r13->as_VMReg()->next());
   360   map->set_callee_saved(VMRegImpl::stack2reg(r13H_off + num_rt_args), r13->as_VMReg()->next());
   361   map->set_callee_saved(VMRegImpl::stack2reg(r14H_off + num_rt_args), r14->as_VMReg()->next());
   361   map->set_callee_saved(VMRegImpl::stack2reg(r14H_off + num_rt_args), r14->as_VMReg()->next());
   362   map->set_callee_saved(VMRegImpl::stack2reg(r15H_off + num_rt_args), r15->as_VMReg()->next());
   362   map->set_callee_saved(VMRegImpl::stack2reg(r15H_off + num_rt_args), r15->as_VMReg()->next());
   363 #endif // _LP64
   363 #endif // _LP64
       
   364 
       
   365   int xmm_bypass_limit = FrameMap::nof_xmm_regs;
       
   366 #ifdef _LP64
       
   367   if (UseAVX < 3) {
       
   368     xmm_bypass_limit = xmm_bypass_limit / 2;
       
   369   }
       
   370 #endif
   364 
   371 
   365   if (save_fpu_registers) {
   372   if (save_fpu_registers) {
   366     if (UseSSE < 2) {
   373     if (UseSSE < 2) {
   367       int fpu_off = float_regs_as_doubles_off;
   374       int fpu_off = float_regs_as_doubles_off;
   368       for (int n = 0; n < FrameMap::nof_fpu_regs; n++) {
   375       for (int n = 0; n < FrameMap::nof_fpu_regs; n++) {
   378     }
   385     }
   379 
   386 
   380     if (UseSSE >= 2) {
   387     if (UseSSE >= 2) {
   381       int xmm_off = xmm_regs_as_doubles_off;
   388       int xmm_off = xmm_regs_as_doubles_off;
   382       for (int n = 0; n < FrameMap::nof_xmm_regs; n++) {
   389       for (int n = 0; n < FrameMap::nof_xmm_regs; n++) {
   383         VMReg xmm_name_0 = as_XMMRegister(n)->as_VMReg();
   390         if (n < xmm_bypass_limit) {
   384         map->set_callee_saved(VMRegImpl::stack2reg(xmm_off +     num_rt_args), xmm_name_0);
   391           VMReg xmm_name_0 = as_XMMRegister(n)->as_VMReg();
   385         // %%% This is really a waste but we'll keep things as they were for now
   392           map->set_callee_saved(VMRegImpl::stack2reg(xmm_off + num_rt_args), xmm_name_0);
   386         if (true) {
   393           // %%% This is really a waste but we'll keep things as they were for now
   387           map->set_callee_saved(VMRegImpl::stack2reg(xmm_off + 1 + num_rt_args), xmm_name_0->next());
   394           if (true) {
       
   395             map->set_callee_saved(VMRegImpl::stack2reg(xmm_off + 1 + num_rt_args), xmm_name_0->next());
       
   396           }
   388         }
   397         }
   389         xmm_off += 2;
   398         xmm_off += 2;
   390       }
   399       }
   391       assert(xmm_off == float_regs_as_doubles_off, "incorrect number of xmm registers");
   400       assert(xmm_off == float_regs_as_doubles_off, "incorrect number of xmm registers");
   392 
   401 
   393     } else if (UseSSE == 1) {
   402     } else if (UseSSE == 1) {
   394       int xmm_off = xmm_regs_as_doubles_off;
   403       int xmm_off = xmm_regs_as_doubles_off;
   395       for (int n = 0; n < FrameMap::nof_xmm_regs; n++) {
   404       for (int n = 0; n < FrameMap::nof_xmm_regs; n++) {
   396         VMReg xmm_name_0 = as_XMMRegister(n)->as_VMReg();
   405         if (n < xmm_bypass_limit) {
   397         map->set_callee_saved(VMRegImpl::stack2reg(xmm_off +     num_rt_args), xmm_name_0);
   406           VMReg xmm_name_0 = as_XMMRegister(n)->as_VMReg();
       
   407           map->set_callee_saved(VMRegImpl::stack2reg(xmm_off + num_rt_args), xmm_name_0);
       
   408         }
   398         xmm_off += 2;
   409         xmm_off += 2;
   399       }
   410       }
   400       assert(xmm_off == float_regs_as_doubles_off, "incorrect number of xmm registers");
   411       assert(xmm_off == float_regs_as_doubles_off, "incorrect number of xmm registers");
   401     }
   412     }
   402   }
   413   }
   472       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 88), xmm11);
   483       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 88), xmm11);
   473       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 96), xmm12);
   484       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 96), xmm12);
   474       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 104), xmm13);
   485       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 104), xmm13);
   475       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 112), xmm14);
   486       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 112), xmm14);
   476       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 120), xmm15);
   487       __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 120), xmm15);
       
   488       if (UseAVX > 2) {
       
   489         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 128), xmm16);
       
   490         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 136), xmm17);
       
   491         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 144), xmm18);
       
   492         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 152), xmm19);
       
   493         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 160), xmm20);
       
   494         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 168), xmm21);
       
   495         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 176), xmm22);
       
   496         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 184), xmm23);
       
   497         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 192), xmm24);
       
   498         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 200), xmm25);
       
   499         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 208), xmm26);
       
   500         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 216), xmm27);
       
   501         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 224), xmm28);
       
   502         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 232), xmm29);
       
   503         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 240), xmm30);
       
   504         __ movdbl(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 248), xmm31);
       
   505       }
   477 #endif // _LP64
   506 #endif // _LP64
   478     } else if (UseSSE == 1) {
   507     } else if (UseSSE == 1) {
   479       // save XMM registers as float because double not supported without SSE2
   508       // save XMM registers as float because double not supported without SSE2
   480       __ movflt(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  0), xmm0);
   509       __ movflt(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  0), xmm0);
   481       __ movflt(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  8), xmm1);
   510       __ movflt(Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  8), xmm1);
   514       __ movdbl(xmm11, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 88));
   543       __ movdbl(xmm11, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 88));
   515       __ movdbl(xmm12, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 96));
   544       __ movdbl(xmm12, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 96));
   516       __ movdbl(xmm13, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 104));
   545       __ movdbl(xmm13, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 104));
   517       __ movdbl(xmm14, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 112));
   546       __ movdbl(xmm14, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 112));
   518       __ movdbl(xmm15, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 120));
   547       __ movdbl(xmm15, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 120));
       
   548       if (UseAVX > 2) {
       
   549         __ movdbl(xmm16, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 128));
       
   550         __ movdbl(xmm17, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 136));
       
   551         __ movdbl(xmm18, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 144));
       
   552         __ movdbl(xmm19, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 152));
       
   553         __ movdbl(xmm20, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 160));
       
   554         __ movdbl(xmm21, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 168));
       
   555         __ movdbl(xmm22, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 176));
       
   556         __ movdbl(xmm23, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 184));
       
   557         __ movdbl(xmm24, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 192));
       
   558         __ movdbl(xmm25, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 200));
       
   559         __ movdbl(xmm26, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 208));
       
   560         __ movdbl(xmm27, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 216));
       
   561         __ movdbl(xmm28, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 224));
       
   562         __ movdbl(xmm29, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 232));
       
   563         __ movdbl(xmm30, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 240));
       
   564         __ movdbl(xmm31, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size + 248));
       
   565       }
   519 #endif // _LP64
   566 #endif // _LP64
   520     } else if (UseSSE == 1) {
   567     } else if (UseSSE == 1) {
   521       // restore XMM registers
   568       // restore XMM registers
   522       __ movflt(xmm0, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  0));
   569       __ movflt(xmm0, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  0));
   523       __ movflt(xmm1, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  8));
   570       __ movflt(xmm1, Address(rsp, xmm_regs_as_doubles_off * VMRegImpl::stack_slot_size +  8));