changeset 52751:6551fd25cfa0

ZGC: 8212681: Refactor IC locking to use a fine grained CompiledICLocker
author eosterlund
date Fri, 24 Aug 2018 14:15:43 +0200
parents 8eb930547781
children 38e8c0eaa997
files src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp src/hotspot/cpu/aarch64/compiledIC_aot_aarch64.cpp src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp src/hotspot/cpu/arm/compiledIC_arm.cpp src/hotspot/cpu/ppc/compiledIC_ppc.cpp src/hotspot/cpu/ppc/nativeInst_ppc.cpp src/hotspot/cpu/s390/compiledIC_s390.cpp src/hotspot/cpu/sparc/compiledIC_sparc.cpp src/hotspot/cpu/sparc/nativeInst_sparc.cpp src/hotspot/cpu/x86/compiledIC_aot_x86_64.cpp src/hotspot/cpu/x86/compiledIC_x86.cpp src/hotspot/cpu/x86/nativeInst_x86.cpp src/hotspot/share/code/compiledIC.cpp src/hotspot/share/code/compiledIC.hpp src/hotspot/share/code/compiledMethod.cpp src/hotspot/share/code/icBuffer.cpp src/hotspot/share/code/nmethod.cpp src/hotspot/share/memory/universe.cpp src/hotspot/share/runtime/sharedRuntime.cpp src/hotspot/share/runtime/sweeper.cpp src/hotspot/share/runtime/vmBehaviours.cpp src/hotspot/share/runtime/vmBehaviours.hpp
diffstat 22 files changed, 207 insertions(+), 66 deletions(-) [+]
line wrap: on
line diff
--- a/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -179,10 +179,10 @@
 }
 
 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
+  assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
   // Creation also verifies the object.
   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
   method_holder->set_data(0);
--- a/src/hotspot/cpu/aarch64/compiledIC_aot_aarch64.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/compiledIC_aot_aarch64.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -69,10 +69,10 @@
 
 #ifdef NEVER_CALLED
 void CompiledPltStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
+  assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
   // Creation also verifies the object.
   NativeLoadGot* method_loader = nativeLoadGot_at(stub);
   NativeGotJump* jump          = nativeGotJump_at(method_loader->next_instruction_address());
--- a/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -178,7 +178,8 @@
 // during code generation, where no patching lock is needed.
 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
   assert(!assert_lock ||
-         (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
+         (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) ||
+         CompiledICLocker::is_safe(addr_at(0)),
          "concurrent code patching");
 
   ResourceMark rm;
--- a/src/hotspot/cpu/arm/compiledIC_arm.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/arm/compiledIC_arm.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -137,10 +137,10 @@
 }
 
 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
+  assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
   // Creation also verifies the object.
   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
--- a/src/hotspot/cpu/ppc/compiledIC_ppc.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/ppc/compiledIC_ppc.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -197,10 +197,10 @@
 }
 
 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
+  assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
   // Creation also verifies the object.
   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + IC_pos_in_java_to_interp_stub);
   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
--- a/src/hotspot/cpu/ppc/nativeInst_ppc.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/ppc/nativeInst_ppc.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -25,6 +25,7 @@
 
 #include "precompiled.hpp"
 #include "asm/macroAssembler.inline.hpp"
+#include "code/compiledIC.hpp"
 #include "memory/resourceArea.hpp"
 #include "nativeInst_ppc.hpp"
 #include "oops/compressedOops.inline.hpp"
@@ -94,7 +95,8 @@
 // during code generation, where no patching lock is needed.
 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
   assert(!assert_lock ||
-         (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
+         (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) ||
+         CompiledICLocker::is_safe(addr_at(0)),
          "concurrent code patching");
 
   ResourceMark rm;
--- a/src/hotspot/cpu/s390/compiledIC_s390.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/s390/compiledIC_s390.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -127,10 +127,10 @@
 }
 
 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
+  assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
   // Creation also verifies the object.
   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + NativeCall::get_IC_pos_in_java_to_interp_stub());
   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
--- a/src/hotspot/cpu/sparc/compiledIC_sparc.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/sparc/compiledIC_sparc.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -124,10 +124,10 @@
 }
 
 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
+  assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
   // Creation also verifies the object.
   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
--- a/src/hotspot/cpu/sparc/nativeInst_sparc.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/sparc/nativeInst_sparc.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -25,6 +25,7 @@
 #include "precompiled.hpp"
 #include "asm/macroAssembler.inline.hpp"
 #include "code/codeCache.hpp"
+#include "code/compiledIC.hpp"
 #include "memory/resourceArea.hpp"
 #include "nativeInst_sparc.hpp"
 #include "oops/oop.inline.hpp"
@@ -189,8 +190,9 @@
 //
 // Used in the runtime linkage of calls; see class CompiledIC.
 void NativeCall::set_destination_mt_safe(address dest) {
-  assert(Patching_lock->is_locked() ||
-         SafepointSynchronize::is_at_safepoint(), "concurrent code patching");
+  assert((Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) ||
+         CompiledICLocker::is_safe(addr_at(0)),
+         "concurrent code patching");
   // set_destination uses set_long_at which does the ICache::invalidate
   set_destination(dest);
 }
--- a/src/hotspot/cpu/x86/compiledIC_aot_x86_64.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/x86/compiledIC_aot_x86_64.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -88,10 +88,10 @@
 
 #ifdef NEVER_CALLED
 void CompiledPltStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
+  assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
   // Creation also verifies the object.
   NativeLoadGot* method_loader = nativeLoadGot_at(stub);
   NativeGotJump* jump          = nativeGotJump_at(method_loader->next_instruction_address());
--- a/src/hotspot/cpu/x86/compiledIC_x86.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/x86/compiledIC_x86.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -177,7 +177,7 @@
 }
 
 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
+  assert(CompiledICLocker::is_safe(static_stub->addr()), "mt unsafe call");
   // Reset stub.
   address stub = static_stub->addr();
   assert(stub != NULL, "stub not found");
--- a/src/hotspot/cpu/x86/nativeInst_x86.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/cpu/x86/nativeInst_x86.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -24,6 +24,7 @@
 
 #include "precompiled.hpp"
 #include "asm/macroAssembler.hpp"
+#include "code/compiledIC.hpp"
 #include "memory/resourceArea.hpp"
 #include "nativeInst_x86.hpp"
 #include "oops/oop.inline.hpp"
@@ -257,8 +258,8 @@
   debug_only(verify());
   // Make sure patching code is locked.  No two threads can patch at the same
   // time but one may be executing this code.
-  assert(Patching_lock->is_locked() ||
-         SafepointSynchronize::is_at_safepoint(), "concurrent code patching");
+  assert(Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint() ||
+         CompiledICLocker::is_safe(instruction_address()), "concurrent code patching");
   // Both C1 and C2 should now be generating code which aligns the patched address
   // to be within a single cache line.
   bool is_aligned = ((uintptr_t)displacement_address() + 0) / cache_line_size ==
--- a/src/hotspot/share/code/compiledIC.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/code/compiledIC.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -41,18 +41,42 @@
 #include "runtime/icache.hpp"
 #include "runtime/sharedRuntime.hpp"
 #include "runtime/stubRoutines.hpp"
+#include "runtime/vmBehaviours.hpp"
 #include "utilities/events.hpp"
 
 
 // Every time a compiled IC is changed or its type is being accessed,
 // either the CompiledIC_lock must be set or we must be at a safe point.
 
+CompiledICLocker::CompiledICLocker(CompiledMethod* method)
+  : _method(method),
+    _behaviour(CompiledICProtectionBehaviour::current()),
+    _locked(_behaviour->lock(_method)){
+}
+
+CompiledICLocker::~CompiledICLocker() {
+  if (_locked) {
+    _behaviour->unlock(_method);
+  }
+}
+
+bool CompiledICLocker::is_safe(CompiledMethod* method) {
+  return CompiledICProtectionBehaviour::current()->is_safe(method);
+}
+
+bool CompiledICLocker::is_safe(address code) {
+  CodeBlob* cb = CodeCache::find_blob_unsafe(code);
+  assert(cb != NULL && cb->is_compiled(), "must be compiled");
+  CompiledMethod* cm = cb->as_compiled_method();
+  return CompiledICProtectionBehaviour::current()->is_safe(cm);
+}
+
 //-----------------------------------------------------------------------------
 // Low-level access to an inline cache. Private, since they might not be
 // MT-safe to use.
 
 void* CompiledIC::cached_value() const {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   assert (!is_optimized(), "an optimized virtual call does not have a cached metadata");
 
   if (!is_in_transition_state()) {
@@ -69,7 +93,7 @@
 
 void CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) {
   assert(entry_point != NULL, "must set legal entry point");
-  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata");
   assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata");
 
@@ -101,11 +125,9 @@
   }
 
   {
-    MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
-#ifdef ASSERT
     CodeBlob* cb = CodeCache::find_blob_unsafe(_call->instruction_address());
+    MutexLockerEx pl(CompiledICLocker::is_safe(cb->as_compiled_method()) ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
     assert(cb != NULL && cb->is_compiled(), "must be compiled");
-#endif
     _call->set_destination_mt_safe(entry_point);
   }
 
@@ -130,23 +152,23 @@
 
 
 address CompiledIC::ic_destination() const {
- assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
- if (!is_in_transition_state()) {
-   return _call->destination();
- } else {
-   return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
- }
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
+  if (!is_in_transition_state()) {
+    return _call->destination();
+  } else {
+    return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
+  }
 }
 
 
 bool CompiledIC::is_in_transition_state() const {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   return InlineCacheBuffer::contains(_call->destination());;
 }
 
 
 bool CompiledIC::is_icholder_call() const {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   return !_is_optimized && is_icholder_entry(ic_destination());
 }
 
@@ -216,7 +238,7 @@
 }
 
 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
-  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 
@@ -270,7 +292,7 @@
 
 // true if destination is megamorphic stub
 bool CompiledIC::is_megamorphic() const {
-  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   assert(!is_optimized(), "an optimized call cannot be megamorphic");
 
   // Cannot rely on cached_value. It is either an interface or a method.
@@ -278,7 +300,7 @@
 }
 
 bool CompiledIC::is_call_to_compiled() const {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 
   // Use unsafe, since an inline cache might point to a zombie method. However, the zombie
   // method is guaranteed to still exist, since we only remove methods after all inline caches
@@ -304,7 +326,7 @@
 
 
 bool CompiledIC::is_call_to_interpreted() const {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   // Call to interpreter if destination is either calling to a stub (if it
   // is optimized), or calling to an I2C blob
   bool is_call_to_interpreted = false;
@@ -329,7 +351,7 @@
 }
 
 void CompiledIC::set_to_clean(bool in_use) {
-  assert(SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->is_locked() , "MT-unsafe call");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   if (TraceInlineCacheClearing || TraceICs) {
     tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));
     print();
@@ -339,7 +361,7 @@
 
   // A zombie transition will always be safe, since the metadata has already been set to NULL, so
   // we only need to patch the destination
-  bool safe_transition = _call->is_safe_for_patching() || !in_use || is_optimized() || SafepointSynchronize::is_at_safepoint();
+  bool safe_transition = _call->is_safe_for_patching() || !in_use || is_optimized() || CompiledICLocker::is_safe(_method);
 
   if (safe_transition) {
     // Kill any leftover stub we might have too
@@ -363,7 +385,7 @@
 }
 
 bool CompiledIC::is_clean() const {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   bool is_clean = false;
   address dest = ic_destination();
   is_clean = dest == _call->get_resolve_call_stub(is_optimized());
@@ -372,7 +394,7 @@
 }
 
 void CompiledIC::set_to_monomorphic(CompiledICInfo& info) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
+  assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
   // Updating a cache to the wrong entry can cause bugs that are very hard
   // to track down - if cache entry gets invalid - we just clean it. In
   // this way it is always the same code path that is responsible for
@@ -555,14 +577,9 @@
 
 void CompiledStaticCall::set_to_clean(bool in_use) {
   // in_use is unused but needed to match template function in CompiledMethod
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
+  assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
   // Reset call site
   MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
-#ifdef ASSERT
-  CodeBlob* cb = CodeCache::find_blob_unsafe(instruction_address());
-  assert(cb != NULL && cb->is_compiled(), "must be compiled");
-#endif
-
   set_destination_mt_safe(resolve_call_stub());
 
   // Do not reset stub here:  It is too expensive to call find_stub.
@@ -606,7 +623,7 @@
 }
 
 void CompiledStaticCall::set(const StaticCallInfo& info) {
-  assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
+  assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
   MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
   // Updating a cache to the wrong entry can cause bugs that are very hard
   // to track down - if cache entry gets invalid - we just clean it. In
--- a/src/hotspot/share/code/compiledIC.hpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/code/compiledIC.hpp	Fri Aug 24 14:15:43 2018 +0200
@@ -59,8 +59,22 @@
 // transition is made to a stub.
 //
 class CompiledIC;
+class CompiledICProtectionBehaviour;
+class CompiledMethod;
 class ICStub;
 
+class CompiledICLocker: public StackObj {
+  CompiledMethod* _method;
+  CompiledICProtectionBehaviour* _behaviour;
+  bool _locked;
+
+public:
+  CompiledICLocker(CompiledMethod* method);
+  ~CompiledICLocker();
+  static bool is_safe(CompiledMethod* method);
+  static bool is_safe(address code);
+};
+
 class CompiledICInfo : public StackObj {
  private:
   address _entry;              // entry point for call
--- a/src/hotspot/share/code/compiledMethod.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/code/compiledMethod.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -352,7 +352,7 @@
 
 // Clear ICStubs of all compiled ICs
 void CompiledMethod::clear_ic_stubs() {
-  assert_locked_or_safepoint(CompiledIC_lock);
+  assert(CompiledICLocker::is_safe(this), "mt unsafe call");
   ResourceMark rm;
   RelocIterator iter(this);
   while(iter.next()) {
@@ -526,7 +526,7 @@
 // Called to clean up after class unloading for live nmethods and from the sweeper
 // for all methods.
 void CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
-  assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
+  assert(CompiledICLocker::is_safe(this), "mt unsafe call");
   ResourceMark rm;
 
   // Find all calls in an nmethod and clear the ones that point to non-entrant,
--- a/src/hotspot/share/code/icBuffer.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/code/icBuffer.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -170,8 +170,9 @@
 
 
 void InlineCacheBuffer::create_transition_stub(CompiledIC *ic, void* cached_value, address entry) {
+  MutexLockerEx ml(CompiledIC_lock->owned_by_self() ? NULL : CompiledIC_lock);
   assert(!SafepointSynchronize::is_at_safepoint(), "should not be called during a safepoint");
-  assert (CompiledIC_lock->is_locked(), "");
+  assert(CompiledICLocker::is_safe(ic->instruction_address()), "mt unsafe call");
   if (TraceICBuffer) {
     tty->print_cr("  create transition stub for " INTPTR_FORMAT " destination " INTPTR_FORMAT " cached value " INTPTR_FORMAT,
                   p2i(ic->instruction_address()), p2i(entry), p2i(cached_value));
@@ -224,7 +225,9 @@
 // not safe to free them until them since they might be visible to
 // another thread.
 void InlineCacheBuffer::queue_for_release(CompiledICHolder* icholder) {
-  MutexLockerEx mex(InlineCacheBuffer_lock);
+  MutexLockerEx mex1((CompiledIC_lock->owned_by_self() ||
+                      SafepointSynchronize::is_at_safepoint()) ? NULL : CompiledIC_lock);
+  MutexLockerEx mex2(InlineCacheBuffer_lock);
   icholder->set_next(_pending_released);
   _pending_released = icholder;
   _pending_count++;
--- a/src/hotspot/share/code/nmethod.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/code/nmethod.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -954,7 +954,7 @@
 
 
 void nmethod::verify_clean_inline_caches() {
-  assert_locked_or_safepoint(CompiledIC_lock);
+  assert(CompiledICLocker::is_safe(this), "mt unsafe call");
 
   ResourceMark rm;
   RelocIterator iter(this, oops_reloc_begin());
@@ -2052,14 +2052,11 @@
 void nmethod::verify_interrupt_point(address call_site) {
   // Verify IC only when nmethod installation is finished.
   if (!is_not_installed()) {
-    Thread *cur = Thread::current();
-    if (CompiledIC_lock->owner() == cur ||
-        ((cur->is_VM_thread() || cur->is_ConcurrentGC_thread()) &&
-         SafepointSynchronize::is_at_safepoint())) {
+    if (CompiledICLocker::is_safe(this)) {
       CompiledIC_at(this, call_site);
       CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
     } else {
-      MutexLocker ml_verify (CompiledIC_lock);
+      CompiledICLocker ml_verify(this);
       CompiledIC_at(this, call_site);
     }
   }
@@ -2756,7 +2753,7 @@
     switch (iter.type()) {
     case relocInfo::virtual_call_type:
     case relocInfo::opt_virtual_call_type: {
-      VerifyMutexLocker mc(CompiledIC_lock);
+      CompiledICLocker ml_verify(this);
       CompiledIC_at(&iter)->print();
       break;
     }
--- a/src/hotspot/share/memory/universe.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/memory/universe.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -60,9 +60,10 @@
 #include "prims/resolvedMethodTable.hpp"
 #include "runtime/arguments.hpp"
 #include "runtime/atomic.hpp"
+#include "runtime/deoptimization.hpp"
 #include "runtime/flags/flagSetting.hpp"
 #include "runtime/flags/jvmFlagConstraintList.hpp"
-#include "runtime/deoptimization.hpp"
+#include "runtime/vmBehaviours.hpp"
 #include "runtime/handles.inline.hpp"
 #include "runtime/init.hpp"
 #include "runtime/java.hpp"
@@ -640,6 +641,10 @@
   return (void*)_non_oop_bits;
 }
 
+static void initialize_global_behaviours() {
+  CompiledICProtectionBehaviour::set_current(new DefaultICProtectionBehaviour());
+}
+
 jint universe_init() {
   assert(!Universe::_fully_initialized, "called after initialize_vtables");
   guarantee(1 << LogHeapWordSize == sizeof(HeapWord),
@@ -652,6 +657,8 @@
 
   JavaClasses::compute_hard_coded_offsets();
 
+  initialize_global_behaviours();
+
   jint status = Universe::initialize_heap();
   if (status != JNI_OK) {
     return status;
--- a/src/hotspot/share/runtime/sharedRuntime.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/runtime/sharedRuntime.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -1351,7 +1351,7 @@
 
   // grab lock, check for deoptimization and potentially patch caller
   {
-    MutexLocker ml_patch(CompiledIC_lock);
+    CompiledICLocker ml(caller_nm);
 
     // Lock blocks for safepoint during which both nmethods can change state.
 
@@ -1382,7 +1382,7 @@
       }
     }
 
-  } // unlock CompiledIC_lock
+  } // unlock CompiledICLocker
 
   return callee_method;
 }
@@ -1585,11 +1585,13 @@
   JvmtiDynamicCodeEventCollector event_collector;
 
   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
-  { MutexLocker ml_patch (CompiledIC_lock);
+  {
     RegisterMap reg_map(thread, false);
     frame caller_frame = thread->last_frame().sender(&reg_map);
     CodeBlob* cb = caller_frame.cb();
     CompiledMethod* caller_nm = cb->as_compiled_method_or_null();
+    CompiledICLocker ml(caller_nm);
+
     if (cb->is_compiled()) {
       CompiledIC* inline_cache = CompiledIC_before(((CompiledMethod*)cb), caller_frame.pc());
       bool should_be_mono = false;
@@ -1647,7 +1649,7 @@
     } else {
       fatal("Unimplemented");
     }
-  } // Release CompiledIC_lock
+  } // Release CompiledICLocker
 
   return callee_method;
 }
@@ -1731,8 +1733,7 @@
       // to a wrong method). It should not be performance critical, since the
       // resolve is only done once.
 
-      bool is_nmethod = caller_nm->is_nmethod();
-      MutexLocker ml(CompiledIC_lock);
+      CompiledICLocker ml(caller_nm);
       if (is_static_call) {
         CompiledStaticCall* ssc = caller_nm->compiledStaticCall_at(call_addr);
         ssc->set_to_clean();
--- a/src/hotspot/share/runtime/sweeper.cpp	Thu Aug 09 14:10:00 2018 +0200
+++ b/src/hotspot/share/runtime/sweeper.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -673,8 +673,8 @@
   // Clean up any CompiledICHolders
   {
     ResourceMark rm;
-    MutexLocker ml_patch(CompiledIC_lock);
     RelocIterator iter(nm);
+    MutexLockerEx mex(CompiledIC_lock);
     while (iter.next()) {
       if (iter.type() == relocInfo::virtual_call_type) {
         CompiledIC::cleanup_call_site(iter.virtual_call_reloc(), nm);
@@ -701,7 +701,7 @@
     // But still remember to clean-up inline caches for alive nmethods
     if (cm->is_alive()) {
       // Clean inline caches that point to zombie/non-entrant/unloaded nmethods
-      MutexLocker cl(CompiledIC_lock);
+      CompiledICLocker ml(cm);
       cm->cleanup_inline_caches(false);
       SWEEP(cm);
     }
@@ -723,7 +723,7 @@
       // Clear ICStubs to prevent back patching stubs of zombie or flushed
       // nmethods during the next safepoint (see ICStub::finalize).
       {
-        MutexLocker cl(CompiledIC_lock);
+        CompiledICLocker ml(cm);
         cm->clear_ic_stubs();
       }
       // Code cache state change is tracked in make_zombie()
@@ -747,7 +747,7 @@
       }
     } else {
       // Still alive, clean up its inline caches
-      MutexLocker cl(CompiledIC_lock);
+      CompiledICLocker ml(cm);
       cm->cleanup_inline_caches(false);
       SWEEP(cm);
     }
@@ -757,7 +757,7 @@
     {
       // Clean ICs of unloaded nmethods as well because they may reference other
       // unloaded nmethods that may be flushed earlier in the sweeper cycle.
-      MutexLocker cl(CompiledIC_lock);
+      CompiledICLocker ml(cm);
       cm->cleanup_inline_caches(false);
     }
     if (cm->is_osr_method()) {
@@ -778,7 +778,7 @@
       possibly_flush((nmethod*)cm);
     }
     // Clean inline caches that point to zombie/non-entrant/unloaded nmethods
-    MutexLocker cl(CompiledIC_lock);
+    CompiledICLocker ml(cm);
     cm->cleanup_inline_caches(false);
     SWEEP(cm);
   }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/hotspot/share/runtime/vmBehaviours.cpp	Fri Aug 24 14:15:43 2018 +0200
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#include "precompiled.hpp"
+#include "runtime/vmBehaviours.hpp"
+#include "runtime/mutexLocker.hpp"
+#include "runtime/safepoint.hpp"
+
+CompiledICProtectionBehaviour* CompiledICProtectionBehaviour::_current = NULL;
+
+bool DefaultICProtectionBehaviour::lock(CompiledMethod* method) {
+  if (CompiledIC_lock->owned_by_self()) {
+    return false;
+  }
+  CompiledIC_lock->lock();
+  return true;
+}
+
+void DefaultICProtectionBehaviour::unlock(CompiledMethod* method) {
+  CompiledIC_lock->unlock();
+}
+
+bool DefaultICProtectionBehaviour::is_safe(CompiledMethod* method) {
+  return SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->owned_by_self();
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/hotspot/share/runtime/vmBehaviours.hpp	Fri Aug 24 14:15:43 2018 +0200
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_RUNTIME_BEHAVIOURS_HPP
+#define SHARE_RUNTIME_BEHAVIOURS_HPP
+
+#include "memory/allocation.hpp"
+
+class CompiledMethod;
+
+class CompiledICProtectionBehaviour {
+  static CompiledICProtectionBehaviour* _current;
+
+public:
+  virtual bool lock(CompiledMethod* method) = 0;
+  virtual void unlock(CompiledMethod* method) = 0;
+  virtual bool is_safe(CompiledMethod* method) = 0;
+
+  static CompiledICProtectionBehaviour* current() { return _current; }
+  static void set_current(CompiledICProtectionBehaviour* current) { _current = current; }
+};
+
+class DefaultICProtectionBehaviour: public CompiledICProtectionBehaviour, public CHeapObj<mtInternal> {
+  virtual bool lock(CompiledMethod* method);
+  virtual void unlock(CompiledMethod* method);
+  virtual bool is_safe(CompiledMethod* method);
+};
+
+#endif // SHARE_RUNTIME_BEHAVIOURS_HPP