511 } |
511 } |
512 |
512 |
513 void TemplateTable::iload_internal(RewriteControl rc) { |
513 void TemplateTable::iload_internal(RewriteControl rc) { |
514 transition(vtos, itos); |
514 transition(vtos, itos); |
515 if (RewriteFrequentPairs && rc == may_rewrite) { |
515 if (RewriteFrequentPairs && rc == may_rewrite) { |
516 // TODO : check x86 code for what to do here |
516 Label rewrite, done; |
517 __ call_Unimplemented(); |
517 Register bc = r4; |
518 } else { |
518 |
519 locals_index(r1); |
519 // get next bytecode |
520 __ ldr(r0, iaddress(r1)); |
520 __ load_unsigned_byte(r1, at_bcp(Bytecodes::length_for(Bytecodes::_iload))); |
521 } |
521 |
|
522 // if _iload, wait to rewrite to iload2. We only want to rewrite the |
|
523 // last two iloads in a pair. Comparing against fast_iload means that |
|
524 // the next bytecode is neither an iload or a caload, and therefore |
|
525 // an iload pair. |
|
526 __ cmpw(r1, Bytecodes::_iload); |
|
527 __ br(Assembler::EQ, done); |
|
528 |
|
529 // if _fast_iload rewrite to _fast_iload2 |
|
530 __ cmpw(r1, Bytecodes::_fast_iload); |
|
531 __ movw(bc, Bytecodes::_fast_iload2); |
|
532 __ br(Assembler::EQ, rewrite); |
|
533 |
|
534 // if _caload rewrite to _fast_icaload |
|
535 __ cmpw(r1, Bytecodes::_caload); |
|
536 __ movw(bc, Bytecodes::_fast_icaload); |
|
537 __ br(Assembler::EQ, rewrite); |
|
538 |
|
539 // else rewrite to _fast_iload |
|
540 __ movw(bc, Bytecodes::_fast_iload); |
|
541 |
|
542 // rewrite |
|
543 // bc: new bytecode |
|
544 __ bind(rewrite); |
|
545 patch_bytecode(Bytecodes::_iload, bc, r1, false); |
|
546 __ bind(done); |
|
547 |
|
548 } |
|
549 |
|
550 // do iload, get the local value into tos |
|
551 locals_index(r1); |
|
552 __ ldr(r0, iaddress(r1)); |
522 |
553 |
523 } |
554 } |
524 |
555 |
525 void TemplateTable::fast_iload2() |
556 void TemplateTable::fast_iload2() |
526 { |
557 { |
527 __ call_Unimplemented(); |
558 transition(vtos, itos); |
|
559 locals_index(r1); |
|
560 __ ldr(r0, iaddress(r1)); |
|
561 __ push(itos); |
|
562 locals_index(r1, 3); |
|
563 __ ldr(r0, iaddress(r1)); |
528 } |
564 } |
529 |
565 |
530 void TemplateTable::fast_iload() |
566 void TemplateTable::fast_iload() |
531 { |
567 { |
532 __ call_Unimplemented(); |
568 transition(vtos, itos); |
|
569 locals_index(r1); |
|
570 __ ldr(r0, iaddress(r1)); |
533 } |
571 } |
534 |
572 |
535 void TemplateTable::lload() |
573 void TemplateTable::lload() |
536 { |
574 { |
537 transition(vtos, ltos); |
575 transition(vtos, ltos); |
719 } |
757 } |
720 |
758 |
721 // iload followed by caload frequent pair |
759 // iload followed by caload frequent pair |
722 void TemplateTable::fast_icaload() |
760 void TemplateTable::fast_icaload() |
723 { |
761 { |
724 __ call_Unimplemented(); |
762 transition(vtos, itos); |
|
763 // load index out of locals |
|
764 locals_index(r2); |
|
765 __ ldr(r1, iaddress(r2)); |
|
766 |
|
767 __ pop_ptr(r0); |
|
768 |
|
769 // r0: array |
|
770 // r1: index |
|
771 index_check(r0, r1); // leaves index in r1, kills rscratch1 |
|
772 __ lea(r1, Address(r0, r1, Address::uxtw(1))); |
|
773 __ load_unsigned_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_CHAR))); |
725 } |
774 } |
726 |
775 |
727 void TemplateTable::saload() |
776 void TemplateTable::saload() |
728 { |
777 { |
729 transition(itos, itos); |
778 transition(itos, itos); |
795 // aload_0, aload_1 |
844 // aload_0, aload_1 |
796 // aload_0, iload_1 |
845 // aload_0, iload_1 |
797 // These bytecodes with a small amount of code are most profitable |
846 // These bytecodes with a small amount of code are most profitable |
798 // to rewrite |
847 // to rewrite |
799 if (RewriteFrequentPairs && rc == may_rewrite) { |
848 if (RewriteFrequentPairs && rc == may_rewrite) { |
800 __ call_Unimplemented(); |
849 Label rewrite, done; |
|
850 const Register bc = r4; |
|
851 |
|
852 // get next bytecode |
|
853 __ load_unsigned_byte(r1, at_bcp(Bytecodes::length_for(Bytecodes::_aload_0))); |
|
854 |
|
855 // do actual aload_0 |
|
856 aload(0); |
|
857 |
|
858 // if _getfield then wait with rewrite |
|
859 __ cmpw(r1, Bytecodes::Bytecodes::_getfield); |
|
860 __ br(Assembler::EQ, done); |
|
861 |
|
862 // if _igetfield then reqrite to _fast_iaccess_0 |
|
863 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "fix bytecode definition"); |
|
864 __ cmpw(r1, Bytecodes::_fast_igetfield); |
|
865 __ movw(bc, Bytecodes::_fast_iaccess_0); |
|
866 __ br(Assembler::EQ, rewrite); |
|
867 |
|
868 // if _agetfield then reqrite to _fast_aaccess_0 |
|
869 assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "fix bytecode definition"); |
|
870 __ cmpw(r1, Bytecodes::_fast_agetfield); |
|
871 __ movw(bc, Bytecodes::_fast_aaccess_0); |
|
872 __ br(Assembler::EQ, rewrite); |
|
873 |
|
874 // if _fgetfield then reqrite to _fast_faccess_0 |
|
875 assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "fix bytecode definition"); |
|
876 __ cmpw(r1, Bytecodes::_fast_fgetfield); |
|
877 __ movw(bc, Bytecodes::_fast_faccess_0); |
|
878 __ br(Assembler::EQ, rewrite); |
|
879 |
|
880 // else rewrite to _fast_aload0 |
|
881 assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "fix bytecode definition"); |
|
882 __ movw(bc, Bytecodes::Bytecodes::_fast_aload_0); |
|
883 |
|
884 // rewrite |
|
885 // bc: new bytecode |
|
886 __ bind(rewrite); |
|
887 patch_bytecode(Bytecodes::_aload_0, bc, r1, false); |
|
888 |
|
889 __ bind(done); |
801 } else { |
890 } else { |
802 aload(0); |
891 aload(0); |
803 } |
892 } |
804 } |
893 } |
805 |
894 |