changeset 51801:09e8e51c948a

8210713: Let CollectedHeap::ensure_parsability() take care of TLAB statistics gathering Reviewed-by: eosterlund, sjohanss
author pliden
date Wed, 19 Sep 2018 14:09:11 +0200
parents bccd9966f1ed
children 15094d12a632
files src/hotspot/share/gc/g1/g1CollectedHeap.cpp src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp src/hotspot/share/gc/parallel/psMarkSweep.cpp src/hotspot/share/gc/parallel/psParallelCompact.cpp src/hotspot/share/gc/parallel/psScavenge.cpp src/hotspot/share/gc/shared/collectedHeap.cpp src/hotspot/share/gc/shared/collectedHeap.hpp src/hotspot/share/gc/shared/genCollectedHeap.cpp src/hotspot/share/gc/z/zCollectedHeap.hpp src/hotspot/share/gc/z/zObjectAllocator.cpp
diffstat 11 files changed, 11 insertions(+), 30 deletions(-) [+]
line wrap: on
line diff
--- a/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -2484,7 +2484,6 @@
 
   // Fill TLAB's and such
   double start = os::elapsedTime();
-  accumulate_statistics_all_tlabs();
   ensure_parsability(true);
   g1_policy()->phase_times()->record_prepare_tlab_time_ms((os::elapsedTime() - start) * 1000.0);
 }
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -488,10 +488,6 @@
   return result;
 }
 
-void ParallelScavengeHeap::accumulate_statistics_all_tlabs() {
-  CollectedHeap::accumulate_statistics_all_tlabs();
-}
-
 void ParallelScavengeHeap::resize_all_tlabs() {
   CollectedHeap::resize_all_tlabs();
 }
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Wed Sep 19 14:09:11 2018 +0200
@@ -206,7 +206,6 @@
   HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
 
   void ensure_parsability(bool retire_tlabs);
-  void accumulate_statistics_all_tlabs();
   void resize_all_tlabs();
 
   bool supports_tlab_allocation() const { return true; }
--- a/src/hotspot/share/gc/parallel/psMarkSweep.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/parallel/psMarkSweep.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -150,7 +150,6 @@
   heap->trace_heap_before_gc(_gc_tracer);
 
   // Fill in TLABs
-  heap->accumulate_statistics_all_tlabs();
   heap->ensure_parsability(true);  // retire TLABs
 
   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
--- a/src/hotspot/share/gc/parallel/psParallelCompact.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/parallel/psParallelCompact.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -972,7 +972,6 @@
   heap->trace_heap_before_gc(&_gc_tracer);
 
   // Fill in TLABs
-  heap->accumulate_statistics_all_tlabs();
   heap->ensure_parsability(true);  // retire TLABs
 
   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
--- a/src/hotspot/share/gc/parallel/psScavenge.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/parallel/psScavenge.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -279,7 +279,6 @@
   assert(!AlwaysTenure || _tenuring_threshold == 0, "Sanity");
 
   // Fill in TLABs
-  heap->accumulate_statistics_all_tlabs();
   heap->ensure_parsability(true);  // retire TLABs
 
   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
--- a/src/hotspot/share/gc/shared/collectedHeap.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/shared/collectedHeap.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -476,35 +476,31 @@
   // started allocating (nothing much to verify) or we have
   // started allocating but are now a full-fledged JavaThread
   // (and have thus made our TLAB's) available for filling.
-  assert(SafepointSynchronize::is_at_safepoint() ||
-         !is_init_completed(),
+  assert(SafepointSynchronize::is_at_safepoint() || !is_init_completed(),
          "Should only be called at a safepoint or at start-up"
          " otherwise concurrent mutator activity may make heap "
          " unparsable again");
-  const bool use_tlab = UseTLAB;
+
+  if (UseTLAB && retire_tlabs) {
+    // Accumulate statistics before retiring
+    ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
+  }
+
   // The main thread starts allocating via a TLAB even before it
   // has added itself to the threads list at vm boot-up.
   JavaThreadIteratorWithHandle jtiwh;
-  assert(!use_tlab || jtiwh.length() > 0,
+  assert(jtiwh.length() > 0,
          "Attempt to fill tlabs before main thread has been added"
          " to threads list is doomed to failure!");
   BarrierSet *bs = BarrierSet::barrier_set();
   for (; JavaThread *thread = jtiwh.next(); ) {
-     if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
+     if (UseTLAB) {
+       thread->tlab().make_parsable(retire_tlabs);
+     }
      bs->make_parsable(thread);
   }
 }
 
-void CollectedHeap::accumulate_statistics_all_tlabs() {
-  if (UseTLAB) {
-    assert(SafepointSynchronize::is_at_safepoint() ||
-         !is_init_completed(),
-         "should only accumulate statistics on tlabs at safepoint");
-
-    ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
-  }
-}
-
 void CollectedHeap::resize_all_tlabs() {
   assert(SafepointSynchronize::is_at_safepoint() || !is_init_completed(),
          "Should only resize tlabs at safepoint");
--- a/src/hotspot/share/gc/shared/collectedHeap.hpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/shared/collectedHeap.hpp	Wed Sep 19 14:09:11 2018 +0200
@@ -137,9 +137,6 @@
                                       size_t requested_size,
                                       size_t* actual_size);
 
-  // Accumulate statistics on all tlabs.
-  virtual void accumulate_statistics_all_tlabs();
-
   // Reinitialize tlabs before resuming mutators.
   virtual void resize_all_tlabs();
 
--- a/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -1293,7 +1293,6 @@
   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
 
   // Fill TLAB's and such
-  CollectedHeap::accumulate_statistics_all_tlabs();
   ensure_parsability(true);   // retire TLABs
 
   // Walk generations
--- a/src/hotspot/share/gc/z/zCollectedHeap.hpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/z/zCollectedHeap.hpp	Wed Sep 19 14:09:11 2018 +0200
@@ -57,7 +57,6 @@
   static ZCollectedHeap* heap();
 
   using CollectedHeap::ensure_parsability;
-  using CollectedHeap::accumulate_statistics_all_tlabs;
   using CollectedHeap::resize_all_tlabs;
 
   ZCollectedHeap(ZCollectorPolicy* policy);
--- a/src/hotspot/share/gc/z/zObjectAllocator.cpp	Wed Sep 12 13:09:51 2018 +0200
+++ b/src/hotspot/share/gc/z/zObjectAllocator.cpp	Wed Sep 19 14:09:11 2018 +0200
@@ -300,7 +300,6 @@
   // Retire TLABs
   if (UseTLAB) {
     ZCollectedHeap* heap = ZCollectedHeap::heap();
-    heap->accumulate_statistics_all_tlabs();
     heap->ensure_parsability(true /* retire_tlabs */);
     heap->resize_all_tlabs();
   }