changeset 9141:833ab38a3b4c

c1: misc bugfixes assert on java -XX:+ProfileInterpreter assert on java -XX:+PrintCompilation -XX:+WizardMode print two top stack slots with -XX:+TraceBytecodes -XX:+Verbose instead of TOS/TOS2, for the sake of more information for vtos marked bytescodes which actually operate on stack values (e.g. invoke*) Contributed-by: andrey.petushkov@gmail.com
author snazarki
date Thu, 11 Jul 2019 17:31:50 +0300
parents 6f05add32df5
children 0cdba2db78c6
files src/cpu/aarch32/vm/c1_Runtime1_aarch32.cpp src/cpu/aarch32/vm/interp_masm_aarch32.cpp src/cpu/aarch32/vm/interp_masm_aarch32.hpp src/cpu/aarch32/vm/templateInterpreter_aarch32.cpp src/cpu/aarch32/vm/templateTable_aarch32.cpp
diffstat 5 files changed, 20 insertions(+), 13 deletions(-) [+]
line wrap: on
line diff
--- a/src/cpu/aarch32/vm/c1_Runtime1_aarch32.cpp	Thu Jul 11 14:07:03 2019 +0300
+++ b/src/cpu/aarch32/vm/c1_Runtime1_aarch32.cpp	Thu Jul 11 17:31:50 2019 +0300
@@ -783,9 +783,9 @@
         __ enter();
         OopMap* map = save_live_registers(sasm);
         // Retrieve bci
-        __ ldr(bci, Address(rfp, 2*BytesPerWord));
+        __ ldr(bci, Address(rfp, 1*BytesPerWord));
         // And a pointer to the Method*
-        __ ldr(method, Address(rfp, 3*BytesPerWord));
+        __ ldr(method, Address(rfp, 2*BytesPerWord));
         int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
         oop_maps = new OopMapSet();
         oop_maps->add_gc_map(call_offset, map);
--- a/src/cpu/aarch32/vm/interp_masm_aarch32.cpp	Thu Jul 11 14:07:03 2019 +0300
+++ b/src/cpu/aarch32/vm/interp_masm_aarch32.cpp	Thu Jul 11 17:31:50 2019 +0300
@@ -1552,14 +1552,19 @@
 // Jump if ((*counter_addr += increment) & mask) satisfies the condition.
 void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
                                                         int increment, int mask,
-                                                        Register scratch, bool preloaded,
+                                                        Register scratch, Register scratch2, bool preloaded,
                                                         Condition cond, Label* where) {
   if (!preloaded) {
     ldr(scratch, counter_addr);
   }
   add(scratch, scratch, increment);
   str(scratch, counter_addr);
-  ands(scratch, scratch, mask);
+  if (Assembler::is_valid_for_imm12(mask))
+    ands(scratch, scratch, mask);
+  else {
+    mov(scratch2, mask);
+    ands(scratch, scratch, scratch2);
+  }
   b(*where, cond);
 }
 
--- a/src/cpu/aarch32/vm/interp_masm_aarch32.hpp	Thu Jul 11 14:07:03 2019 +0300
+++ b/src/cpu/aarch32/vm/interp_masm_aarch32.hpp	Thu Jul 11 17:31:50 2019 +0300
@@ -233,7 +233,7 @@
                              bool decrement = false);
   void increment_mask_and_jump(Address counter_addr,
                                int increment, int mask,
-                               Register scratch, bool preloaded,
+                               Register scratch, Register scratch2, bool preloaded,
                                Condition cond, Label* where);
   void set_mdp_flag_at(Register mdp_in, int flag_constant);
   void test_mdp_data_at(Register mdp_in, int offset, Register value,
--- a/src/cpu/aarch32/vm/templateInterpreter_aarch32.cpp	Thu Jul 11 14:07:03 2019 +0300
+++ b/src/cpu/aarch32/vm/templateInterpreter_aarch32.cpp	Thu Jul 11 17:31:50 2019 +0300
@@ -365,7 +365,7 @@
       // Increment counter in the MDO
       const Address mdo_invocation_counter(r0, in_bytes(MethodData::invocation_counter_offset()) +
                                            in_bytes(InvocationCounter::counter_offset()));
-      __ increment_mask_and_jump(mdo_invocation_counter, increment, mask, rscratch1, false, Assembler::EQ, overflow);
+      __ increment_mask_and_jump(mdo_invocation_counter, increment, mask, rscratch1, rscratch2, false, Assembler::EQ, overflow);
       __ b(done);
     }
     __ bind(no_mdo);
@@ -374,7 +374,7 @@
                   MethodCounters::invocation_counter_offset() +
                   InvocationCounter::counter_offset());
     __ get_method_counters(rmethod, rscratch2, done);
-    __ increment_mask_and_jump(invocation_counter, increment, mask, rscratch1, false, Assembler::EQ, overflow);
+    __ increment_mask_and_jump(invocation_counter, increment, mask, rscratch1, rscratch2, false, Assembler::EQ, overflow);
     __ bind(done);
   } else {
     const Address backedge_counter(rscratch2,
@@ -2019,9 +2019,11 @@
 
   __ push(state);
   // Save all registers on stack, so omit SP and PC
-  __ push(RegSet::range(r0, r12) + lr, sp);
-  __ mov(c_rarg2, r0);  // Pass itos
-  __ mov(c_rarg3, r1);  // Pass ltos/dtos high part
+  const RegSet push_set = RegSet::range(r0, r12) + lr;
+  const int push_set_cnt = __builtin_popcount(push_set.bits());
+  __ push(push_set, sp);
+  __ ldr(c_rarg2, Address(sp, push_set_cnt*wordSize));      // Pass top of stack
+  __ ldr(c_rarg3, Address(sp, (push_set_cnt+1)*wordSize));  // Pass top of stack high part/2nd stack word
   __ call_VM(noreg,
              CAST_FROM_FN_PTR(address, SharedRuntime::trace_bytecode),
              c_rarg1, c_rarg2, c_rarg3);
--- a/src/cpu/aarch32/vm/templateTable_aarch32.cpp	Thu Jul 11 14:07:03 2019 +0300
+++ b/src/cpu/aarch32/vm/templateTable_aarch32.cpp	Thu Jul 11 17:31:50 2019 +0300
@@ -1140,7 +1140,7 @@
 
   // Have a NULL in r0, r3=array, r2=index.  Store NULL at ary[idx]
   __ bind(is_null);
-  __ profile_null_seen(r2);
+  __ profile_null_seen(r1);
 
   __ lea(r1, Address(r3, r2, lsl(2)));
   // Store a NULL
@@ -2078,14 +2078,14 @@
         const Address mdo_backedge_counter(r1, in_bytes(MethodData::backedge_counter_offset()) +
                                            in_bytes(InvocationCounter::counter_offset()));
         __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
-                                   r0, false, Assembler::EQ, &backedge_counter_overflow);
+                                   r0, rscratch2, false, Assembler::EQ, &backedge_counter_overflow);
         __ b(dispatch);
       }
       __ bind(no_mdo);
       // Increment backedge counter in MethodCounters*
       __ ldr(rscratch1, Address(rmethod, Method::method_counters_offset()));
       __ increment_mask_and_jump(Address(rscratch1, be_offset), increment, mask,
-                                 r0, false, Assembler::EQ, &backedge_counter_overflow);
+                                 r0, rscratch2, false, Assembler::EQ, &backedge_counter_overflow);
     } else {
       // increment counter
       __ ldr(rscratch2, Address(rmethod, Method::method_counters_offset()));