src/hotspot/cpu/ppc/templateInterpreterGenerator_ppc.cpp
changeset 53281 672b629e1f72
parent 52460 f1bb77833b59
child 53408 e17d7ffad9e3
--- a/src/hotspot/cpu/ppc/templateInterpreterGenerator_ppc.cpp	Fri Jan 11 10:46:29 2019 +0100
+++ b/src/hotspot/cpu/ppc/templateInterpreterGenerator_ppc.cpp	Fri Jan 11 11:02:00 2019 +0100
@@ -532,14 +532,8 @@
   // these parameters the pre-barrier does not generate
   // the load of the previous value.
 
-  // Restore caller sp for c2i case.
-#ifdef ASSERT
-  __ ld(R9_ARG7, 0, R1_SP);
-  __ ld(R10_ARG8, 0, R21_sender_SP);
-  __ cmpd(CCR0, R9_ARG7, R10_ARG8);
-  __ asm_assert_eq("backlink", 0x544);
-#endif // ASSERT
-  __ mr(R1_SP, R21_sender_SP); // Cut the stack back to where the caller started.
+  // Restore caller sp for c2i case (from compiled) and for resized sender frame (from interpreted).
+  __ resize_frame_absolute(R21_sender_SP, R11_scratch1, R0);
 
   __ blr();
 
@@ -835,8 +829,13 @@
   assert(StubRoutines::throw_StackOverflowError_entry() != NULL, "generated in wrong order");
   __ load_const_optimized(Rscratch1, (StubRoutines::throw_StackOverflowError_entry()), R0);
   __ mtctr(Rscratch1);
-  // Restore caller_sp.
+  // Restore caller_sp (c2i adapter may exist, but no shrinking of interpreted caller frame).
 #ifdef ASSERT
+  Label frame_not_shrunk;
+  __ cmpld(CCR0, R1_SP, R21_sender_SP);
+  __ ble(CCR0, frame_not_shrunk);
+  __ stop("frame shrunk", 0x546);
+  __ bind(frame_not_shrunk);
   __ ld(Rscratch1, 0, R1_SP);
   __ ld(R0, 0, R21_sender_SP);
   __ cmpd(CCR0, R0, Rscratch1);
@@ -1155,15 +1154,6 @@
     }
   }
 
-  // Pop c2i arguments (if any) off when we return.
-#ifdef ASSERT
-  __ ld(R9_ARG7, 0, R1_SP);
-  __ ld(R10_ARG8, 0, R21_sender_SP);
-  __ cmpd(CCR0, R9_ARG7, R10_ARG8);
-  __ asm_assert_eq("backlink", 0x545);
-#endif // ASSERT
-  __ mr(R1_SP, R21_sender_SP); // Cut the stack back to where the caller started.
-
   if (use_instruction) {
     switch (kind) {
       case Interpreter::java_lang_math_sqrt: __ fsqrt(F1_RET, F1);          break;
@@ -1188,6 +1178,8 @@
     __ restore_LR_CR(R0);
   }
 
+  // Restore caller sp for c2i case (from compiled) and for resized sender frame (from interpreted).
+  __ resize_frame_absolute(R21_sender_SP, R11_scratch1, R0);
   __ blr();
 
   __ flush();
@@ -1843,8 +1835,8 @@
     StubRoutines::ppc64::generate_load_crc_table_addr(_masm, table);
     __ kernel_crc32_singleByte(crc, data, dataLen, table, tmp, true);
 
-    // Restore caller sp for c2i case and return.
-    __ mr(R1_SP, R21_sender_SP); // Cut the stack back to where the caller started.
+    // Restore caller sp for c2i case (from compiled) and for resized sender frame (from interpreted).
+    __ resize_frame_absolute(R21_sender_SP, R11_scratch1, R0);
     __ blr();
 
     // Generate a vanilla native entry as the slow path.
@@ -1931,8 +1923,8 @@
     // code compactness.
     __ kernel_crc32_1word(crc, data, dataLen, table, t0, t1, t2, t3, tc0, tc1, tc2, tc3, true);
 
-    // Restore caller sp for c2i case and return.
-    __ mr(R1_SP, R21_sender_SP); // Cut the stack back to where the caller started.
+    // Restore caller sp for c2i case (from compiled) and for resized sender frame (from interpreted).
+    __ resize_frame_absolute(R21_sender_SP, R11_scratch1, R0);
     __ blr();
 
     // Generate a vanilla native entry as the slow path.
@@ -2019,8 +2011,8 @@
     // code compactness.
     __ kernel_crc32_1word(crc, data, dataLen, table, t0, t1, t2, t3, tc0, tc1, tc2, tc3, false);
 
-    // Restore caller sp for c2i case and return.
-    __ mr(R1_SP, R21_sender_SP); // Cut the stack back to where the caller started.
+    // Restore caller sp for c2i case (from compiled) and for resized sender frame (from interpreted).
+    __ resize_frame_absolute(R21_sender_SP, R11_scratch1, R0);
     __ blr();
 
     BLOCK_COMMENT("} CRC32C_update{Bytes|DirectByteBuffer}");