annotate src/share/vm/prims/jvmtiTagMap.cpp @ 23286:dd9cc155639c

Merge with jdk8u66-b17
author Gilles Duboscq <gilles.m.duboscq@oracle.com>
date Thu, 07 Jan 2016 17:28:46 +0100
parents 7848fc12602b 9686a796c829
children
rev   line source
duke@0 1 /*
minqi@10265 2 * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
duke@0 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@0 4 *
duke@0 5 * This code is free software; you can redistribute it and/or modify it
duke@0 6 * under the terms of the GNU General Public License version 2 only, as
duke@0 7 * published by the Free Software Foundation.
duke@0 8 *
duke@0 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@0 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@0 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@0 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@0 13 * accompanied this code).
duke@0 14 *
duke@0 15 * You should have received a copy of the GNU General Public License version
duke@0 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@0 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@0 18 *
trims@1552 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1552 20 * or visit www.oracle.com if you need additional information or have any
trims@1552 21 * questions.
duke@0 22 *
duke@0 23 */
duke@0 24
stefank@1972 25 #include "precompiled.hpp"
stefank@1972 26 #include "classfile/symbolTable.hpp"
stefank@1972 27 #include "classfile/systemDictionary.hpp"
stefank@1972 28 #include "classfile/vmSymbols.hpp"
stefank@1972 29 #include "jvmtifiles/jvmtiEnv.hpp"
never@2376 30 #include "oops/instanceMirrorKlass.hpp"
stefank@1972 31 #include "oops/objArrayKlass.hpp"
stefank@1972 32 #include "oops/oop.inline2.hpp"
stefank@1972 33 #include "prims/jvmtiEventController.hpp"
stefank@1972 34 #include "prims/jvmtiEventController.inline.hpp"
stefank@1972 35 #include "prims/jvmtiExport.hpp"
stefank@1972 36 #include "prims/jvmtiImpl.hpp"
stefank@1972 37 #include "prims/jvmtiTagMap.hpp"
stefank@1972 38 #include "runtime/biasedLocking.hpp"
stefank@1972 39 #include "runtime/javaCalls.hpp"
stefank@1972 40 #include "runtime/jniHandles.hpp"
stefank@1972 41 #include "runtime/mutex.hpp"
stefank@1972 42 #include "runtime/mutexLocker.hpp"
stefank@1972 43 #include "runtime/reflectionUtils.hpp"
stefank@1972 44 #include "runtime/vframe.hpp"
stefank@1972 45 #include "runtime/vmThread.hpp"
stefank@1972 46 #include "runtime/vm_operations.hpp"
stefank@1972 47 #include "services/serviceUtil.hpp"
jprovino@8001 48 #include "utilities/macros.hpp"
jprovino@8001 49 #if INCLUDE_ALL_GCS
stefank@1972 50 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
jprovino@8001 51 #endif // INCLUDE_ALL_GCS
duke@0 52
duke@0 53 // JvmtiTagHashmapEntry
duke@0 54 //
kamg@2125 55 // Each entry encapsulates a reference to the tagged object
duke@0 56 // and the tag value. In addition an entry includes a next pointer which
duke@0 57 // is used to chain entries together.
duke@0 58
zgu@6197 59 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> {
duke@0 60 private:
duke@0 61 friend class JvmtiTagMap;
duke@0 62
kamg@2125 63 oop _object; // tagged object
duke@0 64 jlong _tag; // the tag
duke@0 65 JvmtiTagHashmapEntry* _next; // next on the list
duke@0 66
kamg@2125 67 inline void init(oop object, jlong tag) {
duke@0 68 _object = object;
duke@0 69 _tag = tag;
duke@0 70 _next = NULL;
duke@0 71 }
duke@0 72
duke@0 73 // constructor
kamg@2125 74 JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); }
duke@0 75
duke@0 76 public:
duke@0 77
duke@0 78 // accessor methods
kamg@2125 79 inline oop object() const { return _object; }
kamg@2125 80 inline oop* object_addr() { return &_object; }
kamg@2125 81 inline jlong tag() const { return _tag; }
duke@0 82
duke@0 83 inline void set_tag(jlong tag) {
duke@0 84 assert(tag != 0, "can't be zero");
duke@0 85 _tag = tag;
duke@0 86 }
duke@0 87
duke@0 88 inline JvmtiTagHashmapEntry* next() const { return _next; }
duke@0 89 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; }
duke@0 90 };
duke@0 91
duke@0 92
duke@0 93 // JvmtiTagHashmap
duke@0 94 //
duke@0 95 // A hashmap is essentially a table of pointers to entries. Entries
duke@0 96 // are hashed to a location, or position in the table, and then
duke@0 97 // chained from that location. The "key" for hashing is address of
kamg@2125 98 // the object, or oop. The "value" is the tag value.
duke@0 99 //
duke@0 100 // A hashmap maintains a count of the number entries in the hashmap
duke@0 101 // and resizes if the number of entries exceeds a given threshold.
duke@0 102 // The threshold is specified as a percentage of the size - for
duke@0 103 // example a threshold of 0.75 will trigger the hashmap to resize
duke@0 104 // if the number of entries is >75% of table size.
duke@0 105 //
duke@0 106 // A hashmap provides functions for adding, removing, and finding
duke@0 107 // entries. It also provides a function to iterate over all entries
duke@0 108 // in the hashmap.
duke@0 109
zgu@6197 110 class JvmtiTagHashmap : public CHeapObj<mtInternal> {
duke@0 111 private:
duke@0 112 friend class JvmtiTagMap;
duke@0 113
duke@0 114 enum {
duke@0 115 small_trace_threshold = 10000, // threshold for tracing
duke@0 116 medium_trace_threshold = 100000,
duke@0 117 large_trace_threshold = 1000000,
duke@0 118 initial_trace_threshold = small_trace_threshold
duke@0 119 };
duke@0 120
duke@0 121 static int _sizes[]; // array of possible hashmap sizes
duke@0 122 int _size; // actual size of the table
duke@0 123 int _size_index; // index into size table
duke@0 124
duke@0 125 int _entry_count; // number of entries in the hashmap
duke@0 126
duke@0 127 float _load_factor; // load factor as a % of the size
duke@0 128 int _resize_threshold; // computed threshold to trigger resizing.
duke@0 129 bool _resizing_enabled; // indicates if hashmap can resize
duke@0 130
duke@0 131 int _trace_threshold; // threshold for trace messages
duke@0 132
duke@0 133 JvmtiTagHashmapEntry** _table; // the table of entries.
duke@0 134
duke@0 135 // private accessors
duke@0 136 int resize_threshold() const { return _resize_threshold; }
duke@0 137 int trace_threshold() const { return _trace_threshold; }
duke@0 138
duke@0 139 // initialize the hashmap
duke@0 140 void init(int size_index=0, float load_factor=4.0f) {
duke@0 141 int initial_size = _sizes[size_index];
duke@0 142 _size_index = size_index;
duke@0 143 _size = initial_size;
duke@0 144 _entry_count = 0;
duke@0 145 if (TraceJVMTIObjectTagging) {
duke@0 146 _trace_threshold = initial_trace_threshold;
duke@0 147 } else {
duke@0 148 _trace_threshold = -1;
duke@0 149 }
duke@0 150 _load_factor = load_factor;
duke@0 151 _resize_threshold = (int)(_load_factor * _size);
duke@0 152 _resizing_enabled = true;
duke@0 153 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
zgu@6197 154 _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
duke@0 155 if (_table == NULL) {
ccheung@10161 156 vm_exit_out_of_memory(s, OOM_MALLOC_ERROR,
ccheung@10161 157 "unable to allocate initial hashtable for jvmti object tags");
duke@0 158 }
duke@0 159 for (int i=0; i<initial_size; i++) {
duke@0 160 _table[i] = NULL;
duke@0 161 }
duke@0 162 }
duke@0 163
duke@0 164 // hash a given key (oop) with the specified size
duke@0 165 static unsigned int hash(oop key, int size) {
duke@0 166 // shift right to get better distribution (as these bits will be zero
duke@0 167 // with aligned addresses)
hseigel@12316 168 unsigned int addr = (unsigned int)(cast_from_oop<intptr_t>(key));
duke@0 169 #ifdef _LP64
duke@0 170 return (addr >> 3) % size;
duke@0 171 #else
duke@0 172 return (addr >> 2) % size;
duke@0 173 #endif
duke@0 174 }
duke@0 175
duke@0 176 // hash a given key (oop)
duke@0 177 unsigned int hash(oop key) {
duke@0 178 return hash(key, _size);
duke@0 179 }
duke@0 180
duke@0 181 // resize the hashmap - allocates a large table and re-hashes
duke@0 182 // all entries into the new table.
duke@0 183 void resize() {
duke@0 184 int new_size_index = _size_index+1;
duke@0 185 int new_size = _sizes[new_size_index];
duke@0 186 if (new_size < 0) {
duke@0 187 // hashmap already at maximum capacity
duke@0 188 return;
duke@0 189 }
duke@0 190
duke@0 191 // allocate new table
duke@0 192 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
zgu@6197 193 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
duke@0 194 if (new_table == NULL) {
duke@0 195 warning("unable to allocate larger hashtable for jvmti object tags");
duke@0 196 set_resizing_enabled(false);
duke@0 197 return;
duke@0 198 }
duke@0 199
duke@0 200 // initialize new table
duke@0 201 int i;
duke@0 202 for (i=0; i<new_size; i++) {
duke@0 203 new_table[i] = NULL;
duke@0 204 }
duke@0 205
duke@0 206 // rehash all entries into the new table
duke@0 207 for (i=0; i<_size; i++) {
duke@0 208 JvmtiTagHashmapEntry* entry = _table[i];
duke@0 209 while (entry != NULL) {
duke@0 210 JvmtiTagHashmapEntry* next = entry->next();
kamg@2125 211 oop key = entry->object();
duke@0 212 assert(key != NULL, "jni weak reference cleared!!");
duke@0 213 unsigned int h = hash(key, new_size);
duke@0 214 JvmtiTagHashmapEntry* anchor = new_table[h];
duke@0 215 if (anchor == NULL) {
duke@0 216 new_table[h] = entry;
duke@0 217 entry->set_next(NULL);
duke@0 218 } else {
duke@0 219 entry->set_next(anchor);
duke@0 220 new_table[h] = entry;
duke@0 221 }
duke@0 222 entry = next;
duke@0 223 }
duke@0 224 }
duke@0 225
duke@0 226 // free old table and update settings.
duke@0 227 os::free((void*)_table);
duke@0 228 _table = new_table;
duke@0 229 _size_index = new_size_index;
duke@0 230 _size = new_size;
duke@0 231
duke@0 232 // compute new resize threshold
duke@0 233 _resize_threshold = (int)(_load_factor * _size);
duke@0 234 }
duke@0 235
duke@0 236
duke@0 237 // internal remove function - remove an entry at a given position in the
duke@0 238 // table.
duke@0 239 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
duke@0 240 assert(pos >= 0 && pos < _size, "out of range");
duke@0 241 if (prev == NULL) {
duke@0 242 _table[pos] = entry->next();
duke@0 243 } else {
duke@0 244 prev->set_next(entry->next());
duke@0 245 }
duke@0 246 assert(_entry_count > 0, "checking");
duke@0 247 _entry_count--;
duke@0 248 }
duke@0 249
duke@0 250 // resizing switch
duke@0 251 bool is_resizing_enabled() const { return _resizing_enabled; }
duke@0 252 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; }
duke@0 253
duke@0 254 // debugging
duke@0 255 void print_memory_usage();
duke@0 256 void compute_next_trace_threshold();
duke@0 257
duke@0 258 public:
duke@0 259
duke@0 260 // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
duke@0 261 // The preferred size is rounded down to an actual size.
duke@0 262 JvmtiTagHashmap(int size, float load_factor=0.0f) {
duke@0 263 int i=0;
duke@0 264 while (_sizes[i] < size) {
duke@0 265 if (_sizes[i] < 0) {
duke@0 266 assert(i > 0, "sanity check");
duke@0 267 i--;
duke@0 268 break;
duke@0 269 }
duke@0 270 i++;
duke@0 271 }
duke@0 272
duke@0 273 // if a load factor is specified then use it, otherwise use default
duke@0 274 if (load_factor > 0.01f) {
duke@0 275 init(i, load_factor);
duke@0 276 } else {
duke@0 277 init(i);
duke@0 278 }
duke@0 279 }
duke@0 280
duke@0 281 // create a JvmtiTagHashmap with default settings
duke@0 282 JvmtiTagHashmap() {
duke@0 283 init();
duke@0 284 }
duke@0 285
duke@0 286 // release table when JvmtiTagHashmap destroyed
duke@0 287 ~JvmtiTagHashmap() {
duke@0 288 if (_table != NULL) {
duke@0 289 os::free((void*)_table);
duke@0 290 _table = NULL;
duke@0 291 }
duke@0 292 }
duke@0 293
duke@0 294 // accessors
duke@0 295 int size() const { return _size; }
duke@0 296 JvmtiTagHashmapEntry** table() const { return _table; }
duke@0 297 int entry_count() const { return _entry_count; }
duke@0 298
duke@0 299 // find an entry in the hashmap, returns NULL if not found.
duke@0 300 inline JvmtiTagHashmapEntry* find(oop key) {
duke@0 301 unsigned int h = hash(key);
duke@0 302 JvmtiTagHashmapEntry* entry = _table[h];
duke@0 303 while (entry != NULL) {
kamg@2125 304 if (entry->object() == key) {
kamg@2125 305 return entry;
duke@0 306 }
duke@0 307 entry = entry->next();
duke@0 308 }
kamg@2125 309 return NULL;
duke@0 310 }
duke@0 311
duke@0 312
duke@0 313 // add a new entry to hashmap
duke@0 314 inline void add(oop key, JvmtiTagHashmapEntry* entry) {
duke@0 315 assert(key != NULL, "checking");
duke@0 316 assert(find(key) == NULL, "duplicate detected");
duke@0 317 unsigned int h = hash(key);
duke@0 318 JvmtiTagHashmapEntry* anchor = _table[h];
duke@0 319 if (anchor == NULL) {
duke@0 320 _table[h] = entry;
duke@0 321 entry->set_next(NULL);
duke@0 322 } else {
duke@0 323 entry->set_next(anchor);
duke@0 324 _table[h] = entry;
duke@0 325 }
duke@0 326
duke@0 327 _entry_count++;
duke@0 328 if (trace_threshold() > 0 && entry_count() >= trace_threshold()) {
duke@0 329 assert(TraceJVMTIObjectTagging, "should only get here when tracing");
duke@0 330 print_memory_usage();
duke@0 331 compute_next_trace_threshold();
duke@0 332 }
duke@0 333
duke@0 334 // if the number of entries exceed the threshold then resize
duke@0 335 if (entry_count() > resize_threshold() && is_resizing_enabled()) {
duke@0 336 resize();
duke@0 337 }
duke@0 338 }
duke@0 339
duke@0 340 // remove an entry with the given key.
duke@0 341 inline JvmtiTagHashmapEntry* remove(oop key) {
duke@0 342 unsigned int h = hash(key);
duke@0 343 JvmtiTagHashmapEntry* entry = _table[h];
duke@0 344 JvmtiTagHashmapEntry* prev = NULL;
duke@0 345 while (entry != NULL) {
kamg@2125 346 if (key == entry->object()) {
duke@0 347 break;
duke@0 348 }
duke@0 349 prev = entry;
duke@0 350 entry = entry->next();
duke@0 351 }
duke@0 352 if (entry != NULL) {
duke@0 353 remove(prev, h, entry);
duke@0 354 }
duke@0 355 return entry;
duke@0 356 }
duke@0 357
duke@0 358 // iterate over all entries in the hashmap
duke@0 359 void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
duke@0 360 };
duke@0 361
duke@0 362 // possible hashmap sizes - odd primes that roughly double in size.
duke@0 363 // To avoid excessive resizing the odd primes from 4801-76831 and
duke@0 364 // 76831-307261 have been removed. The list must be terminated by -1.
duke@0 365 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891,
duke@0 366 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
duke@0 367
duke@0 368
duke@0 369 // A supporting class for iterating over all entries in Hashmap
duke@0 370 class JvmtiTagHashmapEntryClosure {
duke@0 371 public:
duke@0 372 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
duke@0 373 };
duke@0 374
duke@0 375
duke@0 376 // iterate over all entries in the hashmap
duke@0 377 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
duke@0 378 for (int i=0; i<_size; i++) {
duke@0 379 JvmtiTagHashmapEntry* entry = _table[i];
duke@0 380 JvmtiTagHashmapEntry* prev = NULL;
duke@0 381 while (entry != NULL) {
duke@0 382 // obtain the next entry before invoking do_entry - this is
duke@0 383 // necessary because do_entry may remove the entry from the
duke@0 384 // hashmap.
duke@0 385 JvmtiTagHashmapEntry* next = entry->next();
duke@0 386 closure->do_entry(entry);
duke@0 387 entry = next;
duke@0 388 }
duke@0 389 }
duke@0 390 }
duke@0 391
duke@0 392 // debugging
duke@0 393 void JvmtiTagHashmap::print_memory_usage() {
duke@0 394 intptr_t p = (intptr_t)this;
duke@0 395 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
duke@0 396
duke@0 397 // table + entries in KB
duke@0 398 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
duke@0 399 entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
duke@0 400
duke@0 401 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
duke@0 402 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
duke@0 403 entry_count(), hashmap_usage, weak_globals_usage);
duke@0 404 }
duke@0 405
duke@0 406 // compute threshold for the next trace message
duke@0 407 void JvmtiTagHashmap::compute_next_trace_threshold() {
duke@0 408 if (trace_threshold() < medium_trace_threshold) {
duke@0 409 _trace_threshold += small_trace_threshold;
duke@0 410 } else {
duke@0 411 if (trace_threshold() < large_trace_threshold) {
duke@0 412 _trace_threshold += medium_trace_threshold;
duke@0 413 } else {
duke@0 414 _trace_threshold += large_trace_threshold;
duke@0 415 }
duke@0 416 }
duke@0 417 }
duke@0 418
duke@0 419 // create a JvmtiTagMap
duke@0 420 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
duke@0 421 _env(env),
duke@0 422 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
duke@0 423 _free_entries(NULL),
duke@0 424 _free_entries_count(0)
duke@0 425 {
duke@0 426 assert(JvmtiThreadState_lock->is_locked(), "sanity check");
duke@0 427 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
duke@0 428
kamg@2125 429 _hashmap = new JvmtiTagHashmap();
duke@0 430
duke@0 431 // finally add us to the environment
duke@0 432 ((JvmtiEnvBase *)env)->set_tag_map(this);
duke@0 433 }
duke@0 434
duke@0 435
duke@0 436 // destroy a JvmtiTagMap
duke@0 437 JvmtiTagMap::~JvmtiTagMap() {
duke@0 438
duke@0 439 // no lock acquired as we assume the enclosing environment is
duke@0 440 // also being destroryed.
duke@0 441 ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
duke@0 442
kamg@2125 443 JvmtiTagHashmapEntry** table = _hashmap->table();
kamg@2125 444 for (int j = 0; j < _hashmap->size(); j++) {
kamg@2125 445 JvmtiTagHashmapEntry* entry = table[j];
kamg@2125 446 while (entry != NULL) {
kamg@2125 447 JvmtiTagHashmapEntry* next = entry->next();
kamg@2125 448 delete entry;
kamg@2125 449 entry = next;
duke@0 450 }
duke@0 451 }
duke@0 452
kamg@2125 453 // finally destroy the hashmap
kamg@2125 454 delete _hashmap;
kamg@2125 455 _hashmap = NULL;
kamg@2125 456
duke@0 457 // remove any entries on the free list
duke@0 458 JvmtiTagHashmapEntry* entry = _free_entries;
duke@0 459 while (entry != NULL) {
duke@0 460 JvmtiTagHashmapEntry* next = entry->next();
duke@0 461 delete entry;
duke@0 462 entry = next;
duke@0 463 }
kamg@2125 464 _free_entries = NULL;
duke@0 465 }
duke@0 466
duke@0 467 // create a hashmap entry
duke@0 468 // - if there's an entry on the (per-environment) free list then this
duke@0 469 // is returned. Otherwise an new entry is allocated.
kamg@2125 470 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) {
duke@0 471 assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
duke@0 472 JvmtiTagHashmapEntry* entry;
duke@0 473 if (_free_entries == NULL) {
duke@0 474 entry = new JvmtiTagHashmapEntry(ref, tag);
duke@0 475 } else {
duke@0 476 assert(_free_entries_count > 0, "mismatched _free_entries_count");
duke@0 477 _free_entries_count--;
duke@0 478 entry = _free_entries;
duke@0 479 _free_entries = entry->next();
duke@0 480 entry->init(ref, tag);
duke@0 481 }
duke@0 482 return entry;
duke@0 483 }
duke@0 484
duke@0 485 // destroy an entry by returning it to the free list
duke@0 486 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
duke@0 487 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
duke@0 488 // limit the size of the free list
duke@0 489 if (_free_entries_count >= max_free_entries) {
duke@0 490 delete entry;
duke@0 491 } else {
duke@0 492 entry->set_next(_free_entries);
duke@0 493 _free_entries = entry;
duke@0 494 _free_entries_count++;
duke@0 495 }
duke@0 496 }
duke@0 497
duke@0 498 // returns the tag map for the given environments. If the tag map
duke@0 499 // doesn't exist then it is created.
duke@0 500 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
kamg@2125 501 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map();
duke@0 502 if (tag_map == NULL) {
duke@0 503 MutexLocker mu(JvmtiThreadState_lock);
kamg@2125 504 tag_map = ((JvmtiEnvBase*)env)->tag_map();
duke@0 505 if (tag_map == NULL) {
duke@0 506 tag_map = new JvmtiTagMap(env);
duke@0 507 }
duke@0 508 } else {
duke@0 509 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
duke@0 510 }
duke@0 511 return tag_map;
duke@0 512 }
duke@0 513
duke@0 514 // iterate over all entries in the tag map.
duke@0 515 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
kamg@2125 516 hashmap()->entry_iterate(closure);
duke@0 517 }
duke@0 518
duke@0 519 // returns true if the hashmaps are empty
duke@0 520 bool JvmtiTagMap::is_empty() {
duke@0 521 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
kamg@2125 522 return hashmap()->entry_count() == 0;
duke@0 523 }
duke@0 524
duke@0 525
duke@0 526 // Return the tag value for an object, or 0 if the object is
duke@0 527 // not tagged
duke@0 528 //
duke@0 529 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
kamg@2125 530 JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o);
duke@0 531 if (entry == NULL) {
duke@0 532 return 0;
duke@0 533 } else {
duke@0 534 return entry->tag();
duke@0 535 }
duke@0 536 }
duke@0 537
duke@0 538
duke@0 539 // A CallbackWrapper is a support class for querying and tagging an object
duke@0 540 // around a callback to a profiler. The constructor does pre-callback
duke@0 541 // work to get the tag value, klass tag value, ... and the destructor
duke@0 542 // does the post-callback work of tagging or untagging the object.
duke@0 543 //
duke@0 544 // {
duke@0 545 // CallbackWrapper wrapper(tag_map, o);
duke@0 546 //
duke@0 547 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
duke@0 548 //
duke@0 549 // } // wrapper goes out of scope here which results in the destructor
duke@0 550 // checking to see if the object has been tagged, untagged, or the
duke@0 551 // tag value has changed.
duke@0 552 //
duke@0 553 class CallbackWrapper : public StackObj {
duke@0 554 private:
duke@0 555 JvmtiTagMap* _tag_map;
duke@0 556 JvmtiTagHashmap* _hashmap;
duke@0 557 JvmtiTagHashmapEntry* _entry;
duke@0 558 oop _o;
duke@0 559 jlong _obj_size;
duke@0 560 jlong _obj_tag;
duke@0 561 jlong _klass_tag;
duke@0 562
duke@0 563 protected:
duke@0 564 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@0 565
duke@0 566 // invoked post-callback to tag, untag, or update the tag of an object
duke@0 567 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
duke@0 568 JvmtiTagHashmapEntry* entry, jlong obj_tag);
duke@0 569 public:
duke@0 570 CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
duke@0 571 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
duke@0 572 "MT unsafe or must be VM thread");
duke@0 573
coleenp@6725 574 // object to tag
coleenp@6725 575 _o = o;
duke@0 576
duke@0 577 // object size
sla@6171 578 _obj_size = (jlong)_o->size() * wordSize;
duke@0 579
duke@0 580 // record the context
duke@0 581 _tag_map = tag_map;
kamg@2125 582 _hashmap = tag_map->hashmap();
duke@0 583 _entry = _hashmap->find(_o);
duke@0 584
duke@0 585 // get object tag
duke@0 586 _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
duke@0 587
duke@0 588 // get the class and the class's tag value
coleenp@6725 589 assert(SystemDictionary::Class_klass()->oop_is_instanceMirror(), "Is not?");
coleenp@6725 590
coleenp@6725 591 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
duke@0 592 }
duke@0 593
duke@0 594 ~CallbackWrapper() {
duke@0 595 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
duke@0 596 }
duke@0 597
duke@0 598 inline jlong* obj_tag_p() { return &_obj_tag; }
duke@0 599 inline jlong obj_size() const { return _obj_size; }
duke@0 600 inline jlong obj_tag() const { return _obj_tag; }
duke@0 601 inline jlong klass_tag() const { return _klass_tag; }
duke@0 602 };
duke@0 603
duke@0 604
duke@0 605
duke@0 606 // callback post-callback to tag, untag, or update the tag of an object
duke@0 607 void inline CallbackWrapper::post_callback_tag_update(oop o,
duke@0 608 JvmtiTagHashmap* hashmap,
duke@0 609 JvmtiTagHashmapEntry* entry,
duke@0 610 jlong obj_tag) {
duke@0 611 if (entry == NULL) {
duke@0 612 if (obj_tag != 0) {
duke@0 613 // callback has tagged the object
duke@0 614 assert(Thread::current()->is_VM_thread(), "must be VMThread");
kamg@2125 615 entry = tag_map()->create_entry(o, obj_tag);
duke@0 616 hashmap->add(o, entry);
duke@0 617 }
duke@0 618 } else {
duke@0 619 // object was previously tagged - the callback may have untagged
duke@0 620 // the object or changed the tag value
duke@0 621 if (obj_tag == 0) {
duke@0 622
duke@0 623 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
duke@0 624 assert(entry_removed == entry, "checking");
duke@0 625 tag_map()->destroy_entry(entry);
duke@0 626
duke@0 627 } else {
duke@0 628 if (obj_tag != entry->tag()) {
duke@0 629 entry->set_tag(obj_tag);
duke@0 630 }
duke@0 631 }
duke@0 632 }
duke@0 633 }
duke@0 634
duke@0 635 // An extended CallbackWrapper used when reporting an object reference
duke@0 636 // to the agent.
duke@0 637 //
duke@0 638 // {
duke@0 639 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
duke@0 640 //
duke@0 641 // (*callback)(wrapper.klass_tag(),
duke@0 642 // wrapper.obj_size(),
duke@0 643 // wrapper.obj_tag_p()
duke@0 644 // wrapper.referrer_tag_p(), ...)
duke@0 645 //
duke@0 646 // } // wrapper goes out of scope here which results in the destructor
duke@0 647 // checking to see if the referrer object has been tagged, untagged,
duke@0 648 // or the tag value has changed.
duke@0 649 //
duke@0 650 class TwoOopCallbackWrapper : public CallbackWrapper {
duke@0 651 private:
duke@0 652 bool _is_reference_to_self;
duke@0 653 JvmtiTagHashmap* _referrer_hashmap;
duke@0 654 JvmtiTagHashmapEntry* _referrer_entry;
duke@0 655 oop _referrer;
duke@0 656 jlong _referrer_obj_tag;
duke@0 657 jlong _referrer_klass_tag;
duke@0 658 jlong* _referrer_tag_p;
duke@0 659
duke@0 660 bool is_reference_to_self() const { return _is_reference_to_self; }
duke@0 661
duke@0 662 public:
duke@0 663 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
duke@0 664 CallbackWrapper(tag_map, o)
duke@0 665 {
duke@0 666 // self reference needs to be handled in a special way
duke@0 667 _is_reference_to_self = (referrer == o);
duke@0 668
duke@0 669 if (_is_reference_to_self) {
duke@0 670 _referrer_klass_tag = klass_tag();
duke@0 671 _referrer_tag_p = obj_tag_p();
duke@0 672 } else {
coleenp@6725 673 _referrer = referrer;
duke@0 674 // record the context
kamg@2125 675 _referrer_hashmap = tag_map->hashmap();
duke@0 676 _referrer_entry = _referrer_hashmap->find(_referrer);
duke@0 677
duke@0 678 // get object tag
duke@0 679 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
duke@0 680 _referrer_tag_p = &_referrer_obj_tag;
duke@0 681
duke@0 682 // get referrer class tag.
coleenp@6725 683 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
duke@0 684 }
duke@0 685 }
duke@0 686
duke@0 687 ~TwoOopCallbackWrapper() {
duke@0 688 if (!is_reference_to_self()){
duke@0 689 post_callback_tag_update(_referrer,
duke@0 690 _referrer_hashmap,
duke@0 691 _referrer_entry,
duke@0 692 _referrer_obj_tag);
duke@0 693 }
duke@0 694 }
duke@0 695
duke@0 696 // address of referrer tag
duke@0 697 // (for a self reference this will return the same thing as obj_tag_p())
duke@0 698 inline jlong* referrer_tag_p() { return _referrer_tag_p; }
duke@0 699
duke@0 700 // referrer's class tag
duke@0 701 inline jlong referrer_klass_tag() { return _referrer_klass_tag; }
duke@0 702 };
duke@0 703
duke@0 704 // tag an object
duke@0 705 //
duke@0 706 // This function is performance critical. If many threads attempt to tag objects
duke@0 707 // around the same time then it's possible that the Mutex associated with the
kamg@2125 708 // tag map will be a hot lock.
duke@0 709 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
duke@0 710 MutexLocker ml(lock());
duke@0 711
duke@0 712 // resolve the object
duke@0 713 oop o = JNIHandles::resolve_non_null(object);
duke@0 714
duke@0 715 // see if the object is already tagged
kamg@2125 716 JvmtiTagHashmap* hashmap = _hashmap;
duke@0 717 JvmtiTagHashmapEntry* entry = hashmap->find(o);
duke@0 718
duke@0 719 // if the object is not already tagged then we tag it
duke@0 720 if (entry == NULL) {
duke@0 721 if (tag != 0) {
kamg@2125 722 entry = create_entry(o, tag);
kamg@2125 723 hashmap->add(o, entry);
duke@0 724 } else {
duke@0 725 // no-op
duke@0 726 }
duke@0 727 } else {
duke@0 728 // if the object is already tagged then we either update
duke@0 729 // the tag (if a new tag value has been provided)
duke@0 730 // or remove the object if the new tag value is 0.
duke@0 731 if (tag == 0) {
duke@0 732 hashmap->remove(o);
duke@0 733 destroy_entry(entry);
duke@0 734 } else {
duke@0 735 entry->set_tag(tag);
duke@0 736 }
duke@0 737 }
duke@0 738 }
duke@0 739
duke@0 740 // get the tag for an object
duke@0 741 jlong JvmtiTagMap::get_tag(jobject object) {
duke@0 742 MutexLocker ml(lock());
duke@0 743
duke@0 744 // resolve the object
duke@0 745 oop o = JNIHandles::resolve_non_null(object);
duke@0 746
coleenp@6725 747 return tag_for(this, o);
duke@0 748 }
duke@0 749
duke@0 750
duke@0 751 // Helper class used to describe the static or instance fields of a class.
duke@0 752 // For each field it holds the field index (as defined by the JVMTI specification),
duke@0 753 // the field type, and the offset.
duke@0 754
zgu@6197 755 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
duke@0 756 private:
duke@0 757 int _field_index;
duke@0 758 int _field_offset;
duke@0 759 char _field_type;
duke@0 760 public:
duke@0 761 ClassFieldDescriptor(int index, char type, int offset) :
duke@0 762 _field_index(index), _field_type(type), _field_offset(offset) {
duke@0 763 }
duke@0 764 int field_index() const { return _field_index; }
duke@0 765 char field_type() const { return _field_type; }
duke@0 766 int field_offset() const { return _field_offset; }
duke@0 767 };
duke@0 768
zgu@6197 769 class ClassFieldMap: public CHeapObj<mtInternal> {
duke@0 770 private:
duke@0 771 enum {
duke@0 772 initial_field_count = 5
duke@0 773 };
duke@0 774
duke@0 775 // list of field descriptors
duke@0 776 GrowableArray<ClassFieldDescriptor*>* _fields;
duke@0 777
duke@0 778 // constructor
duke@0 779 ClassFieldMap();
duke@0 780
duke@0 781 // add a field
duke@0 782 void add(int index, char type, int offset);
duke@0 783
duke@0 784 // returns the field count for the given class
duke@0 785 static int compute_field_count(instanceKlassHandle ikh);
duke@0 786
duke@0 787 public:
duke@0 788 ~ClassFieldMap();
duke@0 789
duke@0 790 // access
duke@0 791 int field_count() { return _fields->length(); }
duke@0 792 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
duke@0 793
duke@0 794 // functions to create maps of static or instance fields
coleenp@6725 795 static ClassFieldMap* create_map_of_static_fields(Klass* k);
duke@0 796 static ClassFieldMap* create_map_of_instance_fields(oop obj);
duke@0 797 };
duke@0 798
duke@0 799 ClassFieldMap::ClassFieldMap() {
zgu@6197 800 _fields = new (ResourceObj::C_HEAP, mtInternal)
zgu@6197 801 GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
duke@0 802 }
duke@0 803
duke@0 804 ClassFieldMap::~ClassFieldMap() {
duke@0 805 for (int i=0; i<_fields->length(); i++) {
duke@0 806 delete _fields->at(i);
duke@0 807 }
duke@0 808 delete _fields;
duke@0 809 }
duke@0 810
duke@0 811 void ClassFieldMap::add(int index, char type, int offset) {
duke@0 812 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
duke@0 813 _fields->append(field);
duke@0 814 }
duke@0 815
duke@0 816 // Returns a heap allocated ClassFieldMap to describe the static fields
duke@0 817 // of the given class.
duke@0 818 //
coleenp@6725 819 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
duke@0 820 HandleMark hm;
duke@0 821 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
duke@0 822
duke@0 823 // create the field map
duke@0 824 ClassFieldMap* field_map = new ClassFieldMap();
duke@0 825
duke@0 826 FilteredFieldStream f(ikh, false, false);
duke@0 827 int max_field_index = f.field_count()-1;
duke@0 828
duke@0 829 int index = 0;
duke@0 830 for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
duke@0 831 // ignore instance fields
duke@0 832 if (!fld.access_flags().is_static()) {
duke@0 833 continue;
duke@0 834 }
duke@0 835 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
duke@0 836 }
duke@0 837 return field_map;
duke@0 838 }
duke@0 839
duke@0 840 // Returns a heap allocated ClassFieldMap to describe the instance fields
duke@0 841 // of the given class. All instance fields are included (this means public
duke@0 842 // and private fields declared in superclasses and superinterfaces too).
duke@0 843 //
duke@0 844 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
duke@0 845 HandleMark hm;
duke@0 846 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
duke@0 847
duke@0 848 // create the field map
duke@0 849 ClassFieldMap* field_map = new ClassFieldMap();
duke@0 850
duke@0 851 FilteredFieldStream f(ikh, false, false);
duke@0 852
duke@0 853 int max_field_index = f.field_count()-1;
duke@0 854
duke@0 855 int index = 0;
duke@0 856 for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
duke@0 857 // ignore static fields
duke@0 858 if (fld.access_flags().is_static()) {
duke@0 859 continue;
duke@0 860 }
duke@0 861 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
duke@0 862 }
duke@0 863
duke@0 864 return field_map;
duke@0 865 }
duke@0 866
duke@0 867 // Helper class used to cache a ClassFileMap for the instance fields of
coleenp@6725 868 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
duke@0 869 // heap iteration and avoid creating a field map for each object in the heap
duke@0 870 // (only need to create the map when the first instance of a class is encountered).
duke@0 871 //
zgu@6197 872 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
duke@0 873 private:
duke@0 874 enum {
duke@0 875 initial_class_count = 200
duke@0 876 };
duke@0 877 ClassFieldMap* _field_map;
duke@0 878
duke@0 879 ClassFieldMap* field_map() const { return _field_map; }
duke@0 880
duke@0 881 JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
duke@0 882 ~JvmtiCachedClassFieldMap();
duke@0 883
coleenp@6725 884 static GrowableArray<InstanceKlass*>* _class_list;
coleenp@6725 885 static void add_to_class_list(InstanceKlass* ik);
duke@0 886
duke@0 887 public:
duke@0 888 // returns the field map for a given object (returning map cached
coleenp@6725 889 // by InstanceKlass if possible
duke@0 890 static ClassFieldMap* get_map_of_instance_fields(oop obj);
duke@0 891
duke@0 892 // removes the field map from all instanceKlasses - should be
duke@0 893 // called before VM operation completes
duke@0 894 static void clear_cache();
duke@0 895
duke@0 896 // returns the number of ClassFieldMap cached by instanceKlasses
duke@0 897 static int cached_field_map_count();
duke@0 898 };
duke@0 899
coleenp@6725 900 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
duke@0 901
duke@0 902 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
duke@0 903 _field_map = field_map;
duke@0 904 }
duke@0 905
duke@0 906 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
duke@0 907 if (_field_map != NULL) {
duke@0 908 delete _field_map;
duke@0 909 }
duke@0 910 }
duke@0 911
duke@0 912 // Marker class to ensure that the class file map cache is only used in a defined
duke@0 913 // scope.
duke@0 914 class ClassFieldMapCacheMark : public StackObj {
duke@0 915 private:
duke@0 916 static bool _is_active;
duke@0 917 public:
duke@0 918 ClassFieldMapCacheMark() {
duke@0 919 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@0 920 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
duke@0 921 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
duke@0 922 _is_active = true;
duke@0 923 }
duke@0 924 ~ClassFieldMapCacheMark() {
duke@0 925 JvmtiCachedClassFieldMap::clear_cache();
duke@0 926 _is_active = false;
duke@0 927 }
duke@0 928 static bool is_active() { return _is_active; }
duke@0 929 };
duke@0 930
duke@0 931 bool ClassFieldMapCacheMark::_is_active;
duke@0 932
duke@0 933
coleenp@6725 934 // record that the given InstanceKlass is caching a field map
coleenp@6725 935 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
duke@0 936 if (_class_list == NULL) {
zgu@6197 937 _class_list = new (ResourceObj::C_HEAP, mtInternal)
coleenp@6725 938 GrowableArray<InstanceKlass*>(initial_class_count, true);
duke@0 939 }
duke@0 940 _class_list->push(ik);
duke@0 941 }
duke@0 942
duke@0 943 // returns the instance field map for the given object
coleenp@6725 944 // (returns field map cached by the InstanceKlass if possible)
duke@0 945 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
duke@0 946 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@0 947 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
duke@0 948
coleenp@6725 949 Klass* k = obj->klass();
coleenp@6725 950 InstanceKlass* ik = InstanceKlass::cast(k);
duke@0 951
duke@0 952 // return cached map if possible
duke@0 953 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
duke@0 954 if (cached_map != NULL) {
duke@0 955 assert(cached_map->field_map() != NULL, "missing field list");
duke@0 956 return cached_map->field_map();
duke@0 957 } else {
duke@0 958 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
duke@0 959 cached_map = new JvmtiCachedClassFieldMap(field_map);
duke@0 960 ik->set_jvmti_cached_class_field_map(cached_map);
duke@0 961 add_to_class_list(ik);
duke@0 962 return field_map;
duke@0 963 }
duke@0 964 }
duke@0 965
duke@0 966 // remove the fields maps cached from all instanceKlasses
duke@0 967 void JvmtiCachedClassFieldMap::clear_cache() {
duke@0 968 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@0 969 if (_class_list != NULL) {
duke@0 970 for (int i = 0; i < _class_list->length(); i++) {
coleenp@6725 971 InstanceKlass* ik = _class_list->at(i);
duke@0 972 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
duke@0 973 assert(cached_map != NULL, "should not be NULL");
duke@0 974 ik->set_jvmti_cached_class_field_map(NULL);
duke@0 975 delete cached_map; // deletes the encapsulated field map
duke@0 976 }
duke@0 977 delete _class_list;
duke@0 978 _class_list = NULL;
duke@0 979 }
duke@0 980 }
duke@0 981
duke@0 982 // returns the number of ClassFieldMap cached by instanceKlasses
duke@0 983 int JvmtiCachedClassFieldMap::cached_field_map_count() {
duke@0 984 return (_class_list == NULL) ? 0 : _class_list->length();
duke@0 985 }
duke@0 986
duke@0 987 // helper function to indicate if an object is filtered by its tag or class tag
duke@0 988 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
duke@0 989 jlong klass_tag,
duke@0 990 int heap_filter) {
duke@0 991 // apply the heap filter
duke@0 992 if (obj_tag != 0) {
duke@0 993 // filter out tagged objects
duke@0 994 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
duke@0 995 } else {
duke@0 996 // filter out untagged objects
duke@0 997 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
duke@0 998 }
duke@0 999 if (klass_tag != 0) {
duke@0 1000 // filter out objects with tagged classes
duke@0 1001 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
duke@0 1002 } else {
duke@0 1003 // filter out objects with untagged classes.
duke@0 1004 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
duke@0 1005 }
duke@0 1006 return false;
duke@0 1007 }
duke@0 1008
duke@0 1009 // helper function to indicate if an object is filtered by a klass filter
duke@0 1010 static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
duke@0 1011 if (!klass_filter.is_null()) {
duke@0 1012 if (obj->klass() != klass_filter()) {
duke@0 1013 return true;
duke@0 1014 }
duke@0 1015 }
duke@0 1016 return false;
duke@0 1017 }
duke@0 1018
duke@0 1019 // helper function to tell if a field is a primitive field or not
duke@0 1020 static inline bool is_primitive_field_type(char type) {
duke@0 1021 return (type != 'L' && type != '[');
duke@0 1022 }
duke@0 1023
duke@0 1024 // helper function to copy the value from location addr to jvalue.
duke@0 1025 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
duke@0 1026 switch (value_type) {
duke@0 1027 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
duke@0 1028 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; }
duke@0 1029 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; }
duke@0 1030 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; }
duke@0 1031 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; }
duke@0 1032 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; }
duke@0 1033 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; }
duke@0 1034 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; }
duke@0 1035 default: ShouldNotReachHere();
duke@0 1036 }
duke@0 1037 }
duke@0 1038
duke@0 1039 // helper function to invoke string primitive value callback
duke@0 1040 // returns visit control flags
duke@0 1041 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
duke@0 1042 CallbackWrapper* wrapper,
duke@0 1043 oop str,
duke@0 1044 void* user_data)
duke@0 1045 {
never@1142 1046 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
duke@0 1047
aph@22828 1048 typeArrayOop s_value = java_lang_String::value(str);
aph@22828 1049
aph@22828 1050 // JDK-6584008: the value field may be null if a String instance is
aph@22828 1051 // partially constructed.
aph@22828 1052 if (s_value == NULL) {
aph@22828 1053 return 0;
aph@22828 1054 }
duke@0 1055 // get the string value and length
duke@0 1056 // (string value may be offset from the base)
duke@0 1057 int s_len = java_lang_String::length(str);
duke@0 1058 int s_offset = java_lang_String::offset(str);
duke@0 1059 jchar* value;
duke@0 1060 if (s_len > 0) {
duke@0 1061 value = s_value->char_at_addr(s_offset);
duke@0 1062 } else {
duke@0 1063 value = (jchar*) s_value->base(T_CHAR);
duke@0 1064 }
duke@0 1065
duke@0 1066 // invoke the callback
duke@0 1067 return (*cb)(wrapper->klass_tag(),
duke@0 1068 wrapper->obj_size(),
duke@0 1069 wrapper->obj_tag_p(),
duke@0 1070 value,
duke@0 1071 (jint)s_len,
duke@0 1072 user_data);
duke@0 1073 }
duke@0 1074
duke@0 1075 // helper function to invoke string primitive value callback
duke@0 1076 // returns visit control flags
duke@0 1077 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
duke@0 1078 CallbackWrapper* wrapper,
duke@0 1079 oop obj,
duke@0 1080 void* user_data)
duke@0 1081 {
duke@0 1082 assert(obj->is_typeArray(), "not a primitive array");
duke@0 1083
duke@0 1084 // get base address of first element
duke@0 1085 typeArrayOop array = typeArrayOop(obj);
coleenp@6831 1086 BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
duke@0 1087 void* elements = array->base(type);
duke@0 1088
duke@0 1089 // jvmtiPrimitiveType is defined so this mapping is always correct
duke@0 1090 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
duke@0 1091
duke@0 1092 return (*cb)(wrapper->klass_tag(),
duke@0 1093 wrapper->obj_size(),
duke@0 1094 wrapper->obj_tag_p(),
duke@0 1095 (jint)array->length(),
duke@0 1096 elem_type,
duke@0 1097 elements,
duke@0 1098 user_data);
duke@0 1099 }
duke@0 1100
duke@0 1101 // helper function to invoke the primitive field callback for all static fields
duke@0 1102 // of a given class
duke@0 1103 static jint invoke_primitive_field_callback_for_static_fields
duke@0 1104 (CallbackWrapper* wrapper,
duke@0 1105 oop obj,
duke@0 1106 jvmtiPrimitiveFieldCallback cb,
duke@0 1107 void* user_data)
duke@0 1108 {
duke@0 1109 // for static fields only the index will be set
duke@0 1110 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@0 1111
never@1142 1112 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
duke@0 1113 if (java_lang_Class::is_primitive(obj)) {
duke@0 1114 return 0;
duke@0 1115 }
coleenp@6725 1116 Klass* klass = java_lang_Class::as_Klass(obj);
duke@0 1117
duke@0 1118 // ignore classes for object and type arrays
duke@0 1119 if (!klass->oop_is_instance()) {
duke@0 1120 return 0;
duke@0 1121 }
duke@0 1122
duke@0 1123 // ignore classes which aren't linked yet
coleenp@6725 1124 InstanceKlass* ik = InstanceKlass::cast(klass);
duke@0 1125 if (!ik->is_linked()) {
duke@0 1126 return 0;
duke@0 1127 }
duke@0 1128
duke@0 1129 // get the field map
coleenp@6725 1130 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
duke@0 1131
duke@0 1132 // invoke the callback for each static primitive field
duke@0 1133 for (int i=0; i<field_map->field_count(); i++) {
duke@0 1134 ClassFieldDescriptor* field = field_map->field_at(i);
duke@0 1135
duke@0 1136 // ignore non-primitive fields
duke@0 1137 char type = field->field_type();
duke@0 1138 if (!is_primitive_field_type(type)) {
duke@0 1139 continue;
duke@0 1140 }
duke@0 1141 // one-to-one mapping
duke@0 1142 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
duke@0 1143
duke@0 1144 // get offset and field value
duke@0 1145 int offset = field->field_offset();
rbackman@6871 1146 address addr = (address)klass->java_mirror() + offset;
duke@0 1147 jvalue value;
duke@0 1148 copy_to_jvalue(&value, addr, value_type);
duke@0 1149
duke@0 1150 // field index
duke@0 1151 reference_info.field.index = field->field_index();
duke@0 1152
duke@0 1153 // invoke the callback
duke@0 1154 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
duke@0 1155 &reference_info,
duke@0 1156 wrapper->klass_tag(),
duke@0 1157 wrapper->obj_tag_p(),
duke@0 1158 value,
duke@0 1159 value_type,
duke@0 1160 user_data);
duke@0 1161 if (res & JVMTI_VISIT_ABORT) {
duke@0 1162 delete field_map;
duke@0 1163 return res;
duke@0 1164 }
duke@0 1165 }
duke@0 1166
duke@0 1167 delete field_map;
duke@0 1168 return 0;
duke@0 1169 }
duke@0 1170
duke@0 1171 // helper function to invoke the primitive field callback for all instance fields
duke@0 1172 // of a given object
duke@0 1173 static jint invoke_primitive_field_callback_for_instance_fields(
duke@0 1174 CallbackWrapper* wrapper,
duke@0 1175 oop obj,
duke@0 1176 jvmtiPrimitiveFieldCallback cb,
duke@0 1177 void* user_data)
duke@0 1178 {
duke@0 1179 // for instance fields only the index will be set
duke@0 1180 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@0 1181
duke@0 1182 // get the map of the instance fields
duke@0 1183 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
duke@0 1184
duke@0 1185 // invoke the callback for each instance primitive field
duke@0 1186 for (int i=0; i<fields->field_count(); i++) {
duke@0 1187 ClassFieldDescriptor* field = fields->field_at(i);
duke@0 1188
duke@0 1189 // ignore non-primitive fields
duke@0 1190 char type = field->field_type();
duke@0 1191 if (!is_primitive_field_type(type)) {
duke@0 1192 continue;
duke@0 1193 }
duke@0 1194 // one-to-one mapping
duke@0 1195 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
duke@0 1196
duke@0 1197 // get offset and field value
duke@0 1198 int offset = field->field_offset();
duke@0 1199 address addr = (address)obj + offset;
duke@0 1200 jvalue value;
duke@0 1201 copy_to_jvalue(&value, addr, value_type);
duke@0 1202
duke@0 1203 // field index
duke@0 1204 reference_info.field.index = field->field_index();
duke@0 1205
duke@0 1206 // invoke the callback
duke@0 1207 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
duke@0 1208 &reference_info,
duke@0 1209 wrapper->klass_tag(),
duke@0 1210 wrapper->obj_tag_p(),
duke@0 1211 value,
duke@0 1212 value_type,
duke@0 1213 user_data);
duke@0 1214 if (res & JVMTI_VISIT_ABORT) {
duke@0 1215 return res;
duke@0 1216 }
duke@0 1217 }
duke@0 1218 return 0;
duke@0 1219 }
duke@0 1220
duke@0 1221
duke@0 1222 // VM operation to iterate over all objects in the heap (both reachable
duke@0 1223 // and unreachable)
duke@0 1224 class VM_HeapIterateOperation: public VM_Operation {
duke@0 1225 private:
duke@0 1226 ObjectClosure* _blk;
duke@0 1227 public:
duke@0 1228 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
duke@0 1229
duke@0 1230 VMOp_Type type() const { return VMOp_HeapIterateOperation; }
duke@0 1231 void doit() {
duke@0 1232 // allows class files maps to be cached during iteration
duke@0 1233 ClassFieldMapCacheMark cm;
duke@0 1234
duke@0 1235 // make sure that heap is parsable (fills TLABs with filler objects)
duke@0 1236 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
duke@0 1237
duke@0 1238 // Verify heap before iteration - if the heap gets corrupted then
duke@0 1239 // JVMTI's IterateOverHeap will crash.
duke@0 1240 if (VerifyBeforeIteration) {
duke@0 1241 Universe::verify();
duke@0 1242 }
duke@0 1243
duke@0 1244 // do the iteration
jmasa@517 1245 // If this operation encounters a bad object when using CMS,
jmasa@517 1246 // consider using safe_object_iterate() which avoids perm gen
jmasa@517 1247 // objects that may contain bad references.
duke@0 1248 Universe::heap()->object_iterate(_blk);
duke@0 1249 }
duke@0 1250
duke@0 1251 };
duke@0 1252
duke@0 1253
duke@0 1254 // An ObjectClosure used to support the deprecated IterateOverHeap and
duke@0 1255 // IterateOverInstancesOfClass functions
duke@0 1256 class IterateOverHeapObjectClosure: public ObjectClosure {
duke@0 1257 private:
duke@0 1258 JvmtiTagMap* _tag_map;
duke@0 1259 KlassHandle _klass;
duke@0 1260 jvmtiHeapObjectFilter _object_filter;
duke@0 1261 jvmtiHeapObjectCallback _heap_object_callback;
duke@0 1262 const void* _user_data;
duke@0 1263
duke@0 1264 // accessors
duke@0 1265 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@0 1266 jvmtiHeapObjectFilter object_filter() const { return _object_filter; }
duke@0 1267 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
duke@0 1268 KlassHandle klass() const { return _klass; }
duke@0 1269 const void* user_data() const { return _user_data; }
duke@0 1270
duke@0 1271 // indicates if iteration has been aborted
duke@0 1272 bool _iteration_aborted;
duke@0 1273 bool is_iteration_aborted() const { return _iteration_aborted; }
duke@0 1274 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; }
duke@0 1275
duke@0 1276 public:
duke@0 1277 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
duke@0 1278 KlassHandle klass,
duke@0 1279 jvmtiHeapObjectFilter object_filter,
duke@0 1280 jvmtiHeapObjectCallback heap_object_callback,
duke@0 1281 const void* user_data) :
duke@0 1282 _tag_map(tag_map),
duke@0 1283 _klass(klass),
duke@0 1284 _object_filter(object_filter),
duke@0 1285 _heap_object_callback(heap_object_callback),
duke@0 1286 _user_data(user_data),
duke@0 1287 _iteration_aborted(false)
duke@0 1288 {
duke@0 1289 }
duke@0 1290
duke@0 1291 void do_object(oop o);
duke@0 1292 };
duke@0 1293
duke@0 1294 // invoked for each object in the heap
duke@0 1295 void IterateOverHeapObjectClosure::do_object(oop o) {
duke@0 1296 // check if iteration has been halted
duke@0 1297 if (is_iteration_aborted()) return;
duke@0 1298
duke@0 1299 // ignore any objects that aren't visible to profiler
duke@0 1300 if (!ServiceUtil::visible_oop(o)) return;
duke@0 1301
duke@0 1302 // instanceof check when filtering by klass
duke@0 1303 if (!klass().is_null() && !o->is_a(klass()())) {
duke@0 1304 return;
duke@0 1305 }
duke@0 1306 // prepare for the calllback
duke@0 1307 CallbackWrapper wrapper(tag_map(), o);
duke@0 1308
duke@0 1309 // if the object is tagged and we're only interested in untagged objects
duke@0 1310 // then don't invoke the callback. Similiarly, if the object is untagged
duke@0 1311 // and we're only interested in tagged objects we skip the callback.
duke@0 1312 if (wrapper.obj_tag() != 0) {
duke@0 1313 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
duke@0 1314 } else {
duke@0 1315 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
duke@0 1316 }
duke@0 1317
duke@0 1318 // invoke the agent's callback
duke@0 1319 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
duke@0 1320 wrapper.obj_size(),
duke@0 1321 wrapper.obj_tag_p(),
duke@0 1322 (void*)user_data());
duke@0 1323 if (control == JVMTI_ITERATION_ABORT) {
duke@0 1324 set_iteration_aborted(true);
duke@0 1325 }
duke@0 1326 }
duke@0 1327
duke@0 1328 // An ObjectClosure used to support the IterateThroughHeap function
duke@0 1329 class IterateThroughHeapObjectClosure: public ObjectClosure {
duke@0 1330 private:
duke@0 1331 JvmtiTagMap* _tag_map;
duke@0 1332 KlassHandle _klass;
duke@0 1333 int _heap_filter;
duke@0 1334 const jvmtiHeapCallbacks* _callbacks;
duke@0 1335 const void* _user_data;
duke@0 1336
duke@0 1337 // accessor functions
duke@0 1338 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@0 1339 int heap_filter() const { return _heap_filter; }
duke@0 1340 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; }
duke@0 1341 KlassHandle klass() const { return _klass; }
duke@0 1342 const void* user_data() const { return _user_data; }
duke@0 1343
duke@0 1344 // indicates if the iteration has been aborted
duke@0 1345 bool _iteration_aborted;
duke@0 1346 bool is_iteration_aborted() const { return _iteration_aborted; }
duke@0 1347
duke@0 1348 // used to check the visit control flags. If the abort flag is set
duke@0 1349 // then we set the iteration aborted flag so that the iteration completes
duke@0 1350 // without processing any further objects
duke@0 1351 bool check_flags_for_abort(jint flags) {
duke@0 1352 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
duke@0 1353 if (is_abort) {
duke@0 1354 _iteration_aborted = true;
duke@0 1355 }
duke@0 1356 return is_abort;
duke@0 1357 }
duke@0 1358
duke@0 1359 public:
duke@0 1360 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
duke@0 1361 KlassHandle klass,
duke@0 1362 int heap_filter,
duke@0 1363 const jvmtiHeapCallbacks* heap_callbacks,
duke@0 1364 const void* user_data) :
duke@0 1365 _tag_map(tag_map),
duke@0 1366 _klass(klass),
duke@0 1367 _heap_filter(heap_filter),
duke@0 1368 _callbacks(heap_callbacks),
duke@0 1369 _user_data(user_data),
duke@0 1370 _iteration_aborted(false)
duke@0 1371 {
duke@0 1372 }
duke@0 1373
duke@0 1374 void do_object(oop o);
duke@0 1375 };
duke@0 1376
duke@0 1377 // invoked for each object in the heap
duke@0 1378 void IterateThroughHeapObjectClosure::do_object(oop obj) {
duke@0 1379 // check if iteration has been halted
duke@0 1380 if (is_iteration_aborted()) return;
duke@0 1381
duke@0 1382 // ignore any objects that aren't visible to profiler
duke@0 1383 if (!ServiceUtil::visible_oop(obj)) return;
duke@0 1384
duke@0 1385 // apply class filter
duke@0 1386 if (is_filtered_by_klass_filter(obj, klass())) return;
duke@0 1387
duke@0 1388 // prepare for callback
duke@0 1389 CallbackWrapper wrapper(tag_map(), obj);
duke@0 1390
duke@0 1391 // check if filtered by the heap filter
duke@0 1392 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
duke@0 1393 return;
duke@0 1394 }
duke@0 1395
duke@0 1396 // for arrays we need the length, otherwise -1
duke@0 1397 bool is_array = obj->is_array();
duke@0 1398 int len = is_array ? arrayOop(obj)->length() : -1;
duke@0 1399
duke@0 1400 // invoke the object callback (if callback is provided)
duke@0 1401 if (callbacks()->heap_iteration_callback != NULL) {
duke@0 1402 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
duke@0 1403 jint res = (*cb)(wrapper.klass_tag(),
duke@0 1404 wrapper.obj_size(),
duke@0 1405 wrapper.obj_tag_p(),
duke@0 1406 (jint)len,
duke@0 1407 (void*)user_data());
duke@0 1408 if (check_flags_for_abort(res)) return;
duke@0 1409 }
duke@0 1410
duke@0 1411 // for objects and classes we report primitive fields if callback provided
duke@0 1412 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
duke@0 1413 jint res;
duke@0 1414 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
never@1142 1415 if (obj->klass() == SystemDictionary::Class_klass()) {
duke@0 1416 res = invoke_primitive_field_callback_for_static_fields(&wrapper,
duke@0 1417 obj,
duke@0 1418 cb,
duke@0 1419 (void*)user_data());
duke@0 1420 } else {
duke@0 1421 res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
duke@0 1422 obj,
duke@0 1423 cb,
duke@0 1424 (void*)user_data());
duke@0 1425 }
duke@0 1426 if (check_flags_for_abort(res)) return;
duke@0 1427 }
duke@0 1428
duke@0 1429 // string callback
duke@0 1430 if (!is_array &&
duke@0 1431 callbacks()->string_primitive_value_callback != NULL &&
never@1142 1432 obj->klass() == SystemDictionary::String_klass()) {
duke@0 1433 jint res = invoke_string_value_callback(
duke@0 1434 callbacks()->string_primitive_value_callback,
duke@0 1435 &wrapper,
duke@0 1436 obj,
duke@0 1437 (void*)user_data() );
duke@0 1438 if (check_flags_for_abort(res)) return;
duke@0 1439 }
duke@0 1440
duke@0 1441 // array callback
duke@0 1442 if (is_array &&
duke@0 1443 callbacks()->array_primitive_value_callback != NULL &&
duke@0 1444 obj->is_typeArray()) {
duke@0 1445 jint res = invoke_array_primitive_value_callback(
duke@0 1446 callbacks()->array_primitive_value_callback,
duke@0 1447 &wrapper,
duke@0 1448 obj,
duke@0 1449 (void*)user_data() );
duke@0 1450 if (check_flags_for_abort(res)) return;
duke@0 1451 }
duke@0 1452 };
duke@0 1453
duke@0 1454
duke@0 1455 // Deprecated function to iterate over all objects in the heap
duke@0 1456 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
duke@0 1457 KlassHandle klass,
duke@0 1458 jvmtiHeapObjectCallback heap_object_callback,
duke@0 1459 const void* user_data)
duke@0 1460 {
duke@0 1461 MutexLocker ml(Heap_lock);
duke@0 1462 IterateOverHeapObjectClosure blk(this,
duke@0 1463 klass,
duke@0 1464 object_filter,
duke@0 1465 heap_object_callback,
duke@0 1466 user_data);
duke@0 1467 VM_HeapIterateOperation op(&blk);
duke@0 1468 VMThread::execute(&op);
duke@0 1469 }
duke@0 1470
duke@0 1471
duke@0 1472 // Iterates over all objects in the heap
duke@0 1473 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
duke@0 1474 KlassHandle klass,
duke@0 1475 const jvmtiHeapCallbacks* callbacks,
duke@0 1476 const void* user_data)
duke@0 1477 {
duke@0 1478 MutexLocker ml(Heap_lock);
duke@0 1479 IterateThroughHeapObjectClosure blk(this,
duke@0 1480 klass,
duke@0 1481 heap_filter,
duke@0 1482 callbacks,
duke@0 1483 user_data);
duke@0 1484 VM_HeapIterateOperation op(&blk);
duke@0 1485 VMThread::execute(&op);
duke@0 1486 }
duke@0 1487
duke@0 1488 // support class for get_objects_with_tags
duke@0 1489
duke@0 1490 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
duke@0 1491 private:
duke@0 1492 JvmtiEnv* _env;
duke@0 1493 jlong* _tags;
duke@0 1494 jint _tag_count;
duke@0 1495
duke@0 1496 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs)
duke@0 1497 GrowableArray<uint64_t>* _tag_results; // collected tags
duke@0 1498
duke@0 1499 public:
duke@0 1500 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
duke@0 1501 _env = env;
duke@0 1502 _tags = (jlong*)tags;
duke@0 1503 _tag_count = tag_count;
zgu@6197 1504 _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true);
zgu@6197 1505 _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true);
duke@0 1506 }
duke@0 1507
duke@0 1508 ~TagObjectCollector() {
duke@0 1509 delete _object_results;
duke@0 1510 delete _tag_results;
duke@0 1511 }
duke@0 1512
duke@0 1513 // for each tagged object check if the tag value matches
duke@0 1514 // - if it matches then we create a JNI local reference to the object
duke@0 1515 // and record the reference and tag value.
duke@0 1516 //
duke@0 1517 void do_entry(JvmtiTagHashmapEntry* entry) {
duke@0 1518 for (int i=0; i<_tag_count; i++) {
duke@0 1519 if (_tags[i] == entry->tag()) {
kamg@2125 1520 oop o = entry->object();
coleenp@6725 1521 assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check");
duke@0 1522 jobject ref = JNIHandles::make_local(JavaThread::current(), o);
duke@0 1523 _object_results->append(ref);
duke@0 1524 _tag_results->append((uint64_t)entry->tag());
duke@0 1525 }
duke@0 1526 }
duke@0 1527 }
duke@0 1528
duke@0 1529 // return the results from the collection
duke@0 1530 //
duke@0 1531 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
duke@0 1532 jvmtiError error;
duke@0 1533 int count = _object_results->length();
duke@0 1534 assert(count >= 0, "sanity check");
duke@0 1535
duke@0 1536 // if object_result_ptr is not NULL then allocate the result and copy
duke@0 1537 // in the object references.
duke@0 1538 if (object_result_ptr != NULL) {
duke@0 1539 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
duke@0 1540 if (error != JVMTI_ERROR_NONE) {
duke@0 1541 return error;
duke@0 1542 }
duke@0 1543 for (int i=0; i<count; i++) {
duke@0 1544 (*object_result_ptr)[i] = _object_results->at(i);
duke@0 1545 }
duke@0 1546 }
duke@0 1547
duke@0 1548 // if tag_result_ptr is not NULL then allocate the result and copy
duke@0 1549 // in the tag values.
duke@0 1550 if (tag_result_ptr != NULL) {
duke@0 1551 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
duke@0 1552 if (error != JVMTI_ERROR_NONE) {
duke@0 1553 if (object_result_ptr != NULL) {
duke@0 1554 _env->Deallocate((unsigned char*)object_result_ptr);
duke@0 1555 }
duke@0 1556 return error;
duke@0 1557 }
duke@0 1558 for (int i=0; i<count; i++) {
duke@0 1559 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
duke@0 1560 }
duke@0 1561 }
duke@0 1562
duke@0 1563 *count_ptr = count;
duke@0 1564 return JVMTI_ERROR_NONE;
duke@0 1565 }
duke@0 1566 };
duke@0 1567
duke@0 1568 // return the list of objects with the specified tags
duke@0 1569 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
duke@0 1570 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
duke@0 1571
duke@0 1572 TagObjectCollector collector(env(), tags, count);
duke@0 1573 {
duke@0 1574 // iterate over all tagged objects
duke@0 1575 MutexLocker ml(lock());
duke@0 1576 entry_iterate(&collector);
duke@0 1577 }
duke@0 1578 return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
duke@0 1579 }
duke@0 1580
duke@0 1581
duke@0 1582 // ObjectMarker is used to support the marking objects when walking the
duke@0 1583 // heap.
duke@0 1584 //
duke@0 1585 // This implementation uses the existing mark bits in an object for
duke@0 1586 // marking. Objects that are marked must later have their headers restored.
duke@0 1587 // As most objects are unlocked and don't have their identity hash computed
duke@0 1588 // we don't have to save their headers. Instead we save the headers that
duke@0 1589 // are "interesting". Later when the headers are restored this implementation
duke@0 1590 // restores all headers to their initial value and then restores the few
duke@0 1591 // objects that had interesting headers.
duke@0 1592 //
duke@0 1593 // Future work: This implementation currently uses growable arrays to save
duke@0 1594 // the oop and header of interesting objects. As an optimization we could
duke@0 1595 // use the same technique as the GC and make use of the unused area
duke@0 1596 // between top() and end().
duke@0 1597 //
duke@0 1598
duke@0 1599 // An ObjectClosure used to restore the mark bits of an object
duke@0 1600 class RestoreMarksClosure : public ObjectClosure {
duke@0 1601 public:
duke@0 1602 void do_object(oop o) {
duke@0 1603 if (o != NULL) {
duke@0 1604 markOop mark = o->mark();
duke@0 1605 if (mark->is_marked()) {
duke@0 1606 o->init_mark();
duke@0 1607 }
duke@0 1608 }
duke@0 1609 }
duke@0 1610 };
duke@0 1611
duke@0 1612 // ObjectMarker provides the mark and visited functions
duke@0 1613 class ObjectMarker : AllStatic {
duke@0 1614 private:
duke@0 1615 // saved headers
duke@0 1616 static GrowableArray<oop>* _saved_oop_stack;
duke@0 1617 static GrowableArray<markOop>* _saved_mark_stack;
dcubed@3815 1618 static bool _needs_reset; // do we need to reset mark bits?
duke@0 1619
duke@0 1620 public:
duke@0 1621 static void init(); // initialize
duke@0 1622 static void done(); // clean-up
duke@0 1623
duke@0 1624 static inline void mark(oop o); // mark an object
duke@0 1625 static inline bool visited(oop o); // check if object has been visited
dcubed@3815 1626
dcubed@3815 1627 static inline bool needs_reset() { return _needs_reset; }
dcubed@3815 1628 static inline void set_needs_reset(bool v) { _needs_reset = v; }
duke@0 1629 };
duke@0 1630
duke@0 1631 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
duke@0 1632 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
dcubed@3815 1633 bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default
duke@0 1634
duke@0 1635 // initialize ObjectMarker - prepares for object marking
duke@0 1636 void ObjectMarker::init() {
duke@0 1637 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@0 1638
duke@0 1639 // prepare heap for iteration
duke@0 1640 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
duke@0 1641
duke@0 1642 // create stacks for interesting headers
zgu@6197 1643 _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true);
zgu@6197 1644 _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true);
duke@0 1645
duke@0 1646 if (UseBiasedLocking) {
duke@0 1647 BiasedLocking::preserve_marks();
duke@0 1648 }
duke@0 1649 }
duke@0 1650
duke@0 1651 // Object marking is done so restore object headers
duke@0 1652 void ObjectMarker::done() {
duke@0 1653 // iterate over all objects and restore the mark bits to
duke@0 1654 // their initial value
duke@0 1655 RestoreMarksClosure blk;
dcubed@3815 1656 if (needs_reset()) {
dcubed@3815 1657 Universe::heap()->object_iterate(&blk);
dcubed@3815 1658 } else {
dcubed@3815 1659 // We don't need to reset mark bits on this call, but reset the
dcubed@3815 1660 // flag to the default for the next call.
dcubed@3815 1661 set_needs_reset(true);
dcubed@3815 1662 }
duke@0 1663
duke@0 1664 // now restore the interesting headers
duke@0 1665 for (int i = 0; i < _saved_oop_stack->length(); i++) {
duke@0 1666 oop o = _saved_oop_stack->at(i);
duke@0 1667 markOop mark = _saved_mark_stack->at(i);
duke@0 1668 o->set_mark(mark);
duke@0 1669 }
duke@0 1670
duke@0 1671 if (UseBiasedLocking) {
duke@0 1672 BiasedLocking::restore_marks();
duke@0 1673 }
duke@0 1674
duke@0 1675 // free the stacks
duke@0 1676 delete _saved_oop_stack;
duke@0 1677 delete _saved_mark_stack;
duke@0 1678 }
duke@0 1679
duke@0 1680 // mark an object
duke@0 1681 inline void ObjectMarker::mark(oop o) {
duke@0 1682 assert(Universe::heap()->is_in(o), "sanity check");
duke@0 1683 assert(!o->mark()->is_marked(), "should only mark an object once");
duke@0 1684
duke@0 1685 // object's mark word
duke@0 1686 markOop mark = o->mark();
duke@0 1687
duke@0 1688 if (mark->must_be_preserved(o)) {
duke@0 1689 _saved_mark_stack->push(mark);
duke@0 1690 _saved_oop_stack->push(o);
duke@0 1691 }
duke@0 1692
duke@0 1693 // mark the object
duke@0 1694 o->set_mark(markOopDesc::prototype()->set_marked());
duke@0 1695 }
duke@0 1696
duke@0 1697 // return true if object is marked
duke@0 1698 inline bool ObjectMarker::visited(oop o) {
duke@0 1699 return o->mark()->is_marked();
duke@0 1700 }
duke@0 1701
duke@0 1702 // Stack allocated class to help ensure that ObjectMarker is used
duke@0 1703 // correctly. Constructor initializes ObjectMarker, destructor calls
duke@0 1704 // ObjectMarker's done() function to restore object headers.
duke@0 1705 class ObjectMarkerController : public StackObj {
duke@0 1706 public:
duke@0 1707 ObjectMarkerController() {
duke@0 1708 ObjectMarker::init();
duke@0 1709 }
duke@0 1710 ~ObjectMarkerController() {
duke@0 1711 ObjectMarker::done();
duke@0 1712 }
duke@0 1713 };
duke@0 1714
duke@0 1715
duke@0 1716 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
duke@0 1717 // (not performance critical as only used for roots)
duke@0 1718 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
duke@0 1719 switch (kind) {
duke@0 1720 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL;
duke@0 1721 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
duke@0 1722 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR;
duke@0 1723 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL;
duke@0 1724 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL;
duke@0 1725 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD;
duke@0 1726 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER;
duke@0 1727 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER;
duke@0 1728 }
duke@0 1729 }
duke@0 1730
duke@0 1731 // Base class for all heap walk contexts. The base class maintains a flag
duke@0 1732 // to indicate if the context is valid or not.
duke@0 1733 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
duke@0 1734 private:
duke@0 1735 bool _valid;
duke@0 1736 public:
duke@0 1737 HeapWalkContext(bool valid) { _valid = valid; }
duke@0 1738 void invalidate() { _valid = false; }
duke@0 1739 bool is_valid() const { return _valid; }
duke@0 1740 };
duke@0 1741
duke@0 1742 // A basic heap walk context for the deprecated heap walking functions.
duke@0 1743 // The context for a basic heap walk are the callbacks and fields used by
duke@0 1744 // the referrer caching scheme.
duke@0 1745 class BasicHeapWalkContext: public HeapWalkContext {
duke@0 1746 private:
duke@0 1747 jvmtiHeapRootCallback _heap_root_callback;
duke@0 1748 jvmtiStackReferenceCallback _stack_ref_callback;
duke@0 1749 jvmtiObjectReferenceCallback _object_ref_callback;
duke@0 1750
duke@0 1751 // used for caching
duke@0 1752 oop _last_referrer;
duke@0 1753 jlong _last_referrer_tag;
duke@0 1754
duke@0 1755 public:
duke@0 1756 BasicHeapWalkContext() : HeapWalkContext(false) { }
duke@0 1757
duke@0 1758 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
duke@0 1759 jvmtiStackReferenceCallback stack_ref_callback,
duke@0 1760 jvmtiObjectReferenceCallback object_ref_callback) :
duke@0 1761 HeapWalkContext(true),
duke@0 1762 _heap_root_callback(heap_root_callback),
duke@0 1763 _stack_ref_callback(stack_ref_callback),
duke@0 1764 _object_ref_callback(object_ref_callback),
duke@0 1765 _last_referrer(NULL),
duke@0 1766 _last_referrer_tag(0) {
duke@0 1767 }
duke@0 1768
duke@0 1769 // accessors
duke@0 1770 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; }
duke@0 1771 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; }
duke@0 1772 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; }
duke@0 1773
duke@0 1774 oop last_referrer() const { return _last_referrer; }
duke@0 1775 void set_last_referrer(oop referrer) { _last_referrer = referrer; }
duke@0 1776 jlong last_referrer_tag() const { return _last_referrer_tag; }
duke@0 1777 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
duke@0 1778 };
duke@0 1779
duke@0 1780 // The advanced heap walk context for the FollowReferences functions.
duke@0 1781 // The context is the callbacks, and the fields used for filtering.
duke@0 1782 class AdvancedHeapWalkContext: public HeapWalkContext {
duke@0 1783 private:
duke@0 1784 jint _heap_filter;
duke@0 1785 KlassHandle _klass_filter;
duke@0 1786 const jvmtiHeapCallbacks* _heap_callbacks;
duke@0 1787
duke@0 1788 public:
duke@0 1789 AdvancedHeapWalkContext() : HeapWalkContext(false) { }
duke@0 1790
duke@0 1791 AdvancedHeapWalkContext(jint heap_filter,
duke@0 1792 KlassHandle klass_filter,
duke@0 1793 const jvmtiHeapCallbacks* heap_callbacks) :
duke@0 1794 HeapWalkContext(true),
duke@0 1795 _heap_filter(heap_filter),
duke@0 1796 _klass_filter(klass_filter),
duke@0 1797 _heap_callbacks(heap_callbacks) {
duke@0 1798 }
duke@0 1799
duke@0 1800 // accessors
duke@0 1801 jint heap_filter() const { return _heap_filter; }
duke@0 1802 KlassHandle klass_filter() const { return _klass_filter; }
duke@0 1803
duke@0 1804 const jvmtiHeapReferenceCallback heap_reference_callback() const {
duke@0 1805 return _heap_callbacks->heap_reference_callback;
duke@0 1806 };
duke@0 1807 const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
duke@0 1808 return _heap_callbacks->primitive_field_callback;
duke@0 1809 }
duke@0 1810 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
duke@0 1811 return _heap_callbacks->array_primitive_value_callback;
duke@0 1812 }
duke@0 1813 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
duke@0 1814 return _heap_callbacks->string_primitive_value_callback;
duke@0 1815 }
duke@0 1816 };
duke@0 1817
duke@0 1818 // The CallbackInvoker is a class with static functions that the heap walk can call
duke@0 1819 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
duke@0 1820 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
duke@0 1821 // mode is for the newer FollowReferences function which supports a lot of
duke@0 1822 // additional callbacks.
duke@0 1823 class CallbackInvoker : AllStatic {
duke@0 1824 private:
duke@0 1825 // heap walk styles
duke@0 1826 enum { basic, advanced };
duke@0 1827 static int _heap_walk_type;
duke@0 1828 static bool is_basic_heap_walk() { return _heap_walk_type == basic; }
duke@0 1829 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; }
duke@0 1830
duke@0 1831 // context for basic style heap walk
duke@0 1832 static BasicHeapWalkContext _basic_context;
duke@0 1833 static BasicHeapWalkContext* basic_context() {
duke@0 1834 assert(_basic_context.is_valid(), "invalid");
duke@0 1835 return &_basic_context;
duke@0 1836 }
duke@0 1837
duke@0 1838 // context for advanced style heap walk
duke@0 1839 static AdvancedHeapWalkContext _advanced_context;
duke@0 1840 static AdvancedHeapWalkContext* advanced_context() {
duke@0 1841 assert(_advanced_context.is_valid(), "invalid");
duke@0 1842 return &_advanced_context;
duke@0 1843 }
duke@0 1844
duke@0 1845 // context needed for all heap walks
duke@0 1846 static JvmtiTagMap* _tag_map;
duke@0 1847 static const void* _user_data;
duke@0 1848 static GrowableArray<oop>* _visit_stack;
duke@0 1849
duke@0 1850 // accessors
duke@0 1851 static JvmtiTagMap* tag_map() { return _tag_map; }
duke@0 1852 static const void* user_data() { return _user_data; }
duke@0 1853 static GrowableArray<oop>* visit_stack() { return _visit_stack; }
duke@0 1854
duke@0 1855 // if the object hasn't been visited then push it onto the visit stack
duke@0 1856 // so that it will be visited later
duke@0 1857 static inline bool check_for_visit(oop obj) {
duke@0 1858 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
duke@0 1859 return true;
duke@0 1860 }
duke@0 1861
duke@0 1862 // invoke basic style callbacks
duke@0 1863 static inline bool invoke_basic_heap_root_callback
duke@0 1864 (jvmtiHeapRootKind root_kind, oop obj);
duke@0 1865 static inline bool invoke_basic_stack_ref_callback
duke@0 1866 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
duke@0 1867 int slot, oop obj);
duke@0 1868 static inline bool invoke_basic_object_reference_callback
duke@0 1869 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
duke@0 1870
duke@0 1871 // invoke advanced style callbacks
duke@0 1872 static inline bool invoke_advanced_heap_root_callback
duke@0 1873 (jvmtiHeapReferenceKind ref_kind, oop obj);
duke@0 1874 static inline bool invoke_advanced_stack_ref_callback
duke@0 1875 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
duke@0 1876 jmethodID method, jlocation bci, jint slot, oop obj);
duke@0 1877 static inline bool invoke_advanced_object_reference_callback
duke@0 1878 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
duke@0 1879
duke@0 1880 // used to report the value of primitive fields
duke@0 1881 static inline bool report_primitive_field
duke@0 1882 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
duke@0 1883
duke@0 1884 public:
duke@0 1885 // initialize for basic mode
duke@0 1886 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
duke@0 1887 GrowableArray<oop>* visit_stack,
duke@0 1888 const void* user_data,
duke@0 1889 BasicHeapWalkContext context);
duke@0 1890
duke@0 1891 // initialize for advanced mode
duke@0 1892 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
duke@0 1893 GrowableArray<oop>* visit_stack,
duke@0 1894 const void* user_data,
duke@0 1895 AdvancedHeapWalkContext context);
duke@0 1896
duke@0 1897 // functions to report roots
duke@0 1898 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
duke@0 1899 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
duke@0 1900 jmethodID m, oop o);
duke@0 1901 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
duke@0 1902 jmethodID method, jlocation bci, jint slot, oop o);
duke@0 1903
duke@0 1904 // functions to report references
duke@0 1905 static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
duke@0 1906 static inline bool report_class_reference(oop referrer, oop referree);
duke@0 1907 static inline bool report_class_loader_reference(oop referrer, oop referree);
duke@0 1908 static inline bool report_signers_reference(oop referrer, oop referree);
duke@0 1909 static inline bool report_protection_domain_reference(oop referrer, oop referree);
duke@0 1910 static inline bool report_superclass_reference(oop referrer, oop referree);
duke@0 1911 static inline bool report_interface_reference(oop referrer, oop referree);
duke@0 1912 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
duke@0 1913 static inline bool report_field_reference(oop referrer, oop referree, jint slot);
duke@0 1914 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
duke@0 1915 static inline bool report_primitive_array_values(oop array);
duke@0 1916 static inline bool report_string_value(oop str);
duke@0 1917 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
duke@0 1918 static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
duke@0 1919 };
duke@0 1920
duke@0 1921 // statics
duke@0 1922 int CallbackInvoker::_heap_walk_type;
duke@0 1923 BasicHeapWalkContext CallbackInvoker::_basic_context;
duke@0 1924 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
duke@0 1925 JvmtiTagMap* CallbackInvoker::_tag_map;
duke@0 1926 const void* CallbackInvoker::_user_data;
duke@0 1927 GrowableArray<oop>* CallbackInvoker::_visit_stack;
duke@0 1928
duke@0 1929 // initialize for basic heap walk (IterateOverReachableObjects et al)
duke@0 1930 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
duke@0 1931 GrowableArray<oop>* visit_stack,
duke@0 1932 const void* user_data,
duke@0 1933 BasicHeapWalkContext context) {
duke@0 1934 _tag_map = tag_map;
duke@0 1935 _visit_stack = visit_stack;
duke@0 1936 _user_data = user_data;
duke@0 1937 _basic_context = context;
duke@0 1938 _advanced_context.invalidate(); // will trigger assertion if used
duke@0 1939 _heap_walk_type = basic;
duke@0 1940 }
duke@0 1941
duke@0 1942 // initialize for advanced heap walk (FollowReferences)
duke@0 1943 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
duke@0 1944 GrowableArray<oop>* visit_stack,
duke@0 1945 const void* user_data,
duke@0 1946 AdvancedHeapWalkContext context) {
duke@0 1947 _tag_map = tag_map;
duke@0 1948 _visit_stack = visit_stack;
duke@0 1949 _user_data = user_data;
duke@0 1950 _advanced_context = context;
duke@0 1951 _basic_context.invalidate(); // will trigger assertion if used
duke@0 1952 _heap_walk_type = advanced;
duke@0 1953 }
duke@0 1954
duke@0 1955
duke@0 1956 // invoke basic style heap root callback
duke@0 1957 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
duke@0 1958 assert(ServiceUtil::visible_oop(obj), "checking");
duke@0 1959
duke@0 1960 // if we heap roots should be reported
duke@0 1961 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
duke@0 1962 if (cb == NULL) {
duke@0 1963 return check_for_visit(obj);
duke@0 1964 }
duke@0 1965
duke@0 1966 CallbackWrapper wrapper(tag_map(), obj);
duke@0 1967 jvmtiIterationControl control = (*cb)(root_kind,
duke@0 1968 wrapper.klass_tag(),
duke@0 1969 wrapper.obj_size(),
duke@0 1970 wrapper.obj_tag_p(),
duke@0 1971 (void*)user_data());
duke@0 1972 // push root to visit stack when following references
duke@0 1973 if (control == JVMTI_ITERATION_CONTINUE &&
duke@0 1974 basic_context()->object_ref_callback() != NULL) {
duke@0 1975 visit_stack()->push(obj);
duke@0 1976 }
duke@0 1977 return control != JVMTI_ITERATION_ABORT;
duke@0 1978 }
duke@0 1979
duke@0 1980 // invoke basic style stack ref callback
duke@0 1981 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
duke@0 1982 jlong thread_tag,
duke@0 1983 jint depth,
duke@0 1984 jmethodID method,
duke@0 1985 jint slot,
duke@0 1986 oop obj) {
duke@0 1987 assert(ServiceUtil::visible_oop(obj), "checking");
duke@0 1988
duke@0 1989 // if we stack refs should be reported
duke@0 1990 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
duke@0 1991 if (cb == NULL) {
duke@0 1992 return check_for_visit(obj);
duke@0 1993 }
duke@0 1994
duke@0 1995 CallbackWrapper wrapper(tag_map(), obj);
duke@0 1996 jvmtiIterationControl control = (*cb)(root_kind,
duke@0 1997 wrapper.klass_tag(),
duke@0 1998 wrapper.obj_size(),
duke@0 1999 wrapper.obj_tag_p(),
duke@0 2000 thread_tag,
duke@0 2001 depth,
duke@0 2002 method,
duke@0 2003 slot,
duke@0 2004 (void*)user_data());
duke@0 2005 // push root to visit stack when following references
duke@0 2006 if (control == JVMTI_ITERATION_CONTINUE &&
duke@0 2007 basic_context()->object_ref_callback() != NULL) {
duke@0 2008 visit_stack()->push(obj);
duke@0 2009 }
duke@0 2010 return control != JVMTI_ITERATION_ABORT;
duke@0 2011 }
duke@0 2012
duke@0 2013 // invoke basic style object reference callback
duke@0 2014 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
duke@0 2015 oop referrer,
duke@0 2016 oop referree,
duke@0 2017 jint index) {
duke@0 2018
duke@0 2019 assert(ServiceUtil::visible_oop(referrer), "checking");
duke@0 2020 assert(ServiceUtil::visible_oop(referree), "checking");
duke@0 2021
duke@0 2022 BasicHeapWalkContext* context = basic_context();
duke@0 2023
duke@0 2024 // callback requires the referrer's tag. If it's the same referrer
duke@0 2025 // as the last call then we use the cached value.
duke@0 2026 jlong referrer_tag;
duke@0 2027 if (referrer == context->last_referrer()) {
duke@0 2028 referrer_tag = context->last_referrer_tag();
duke@0 2029 } else {
coleenp@6725 2030 referrer_tag = tag_for(tag_map(), referrer);
duke@0 2031 }
duke@0 2032
duke@0 2033 // do the callback
duke@0 2034 CallbackWrapper wrapper(tag_map(), referree);
duke@0 2035 jvmtiObjectReferenceCallback cb = context->object_ref_callback();
duke@0 2036 jvmtiIterationControl control = (*cb)(ref_kind,
duke@0 2037 wrapper.klass_tag(),
duke@0 2038 wrapper.obj_size(),
duke@0 2039 wrapper.obj_tag_p(),
duke@0 2040 referrer_tag,
duke@0 2041 index,
duke@0 2042 (void*)user_data());
duke@0 2043
duke@0 2044 // record referrer and referrer tag. For self-references record the
duke@0 2045 // tag value from the callback as this might differ from referrer_tag.
duke@0 2046 context->set_last_referrer(referrer);
duke@0 2047 if (referrer == referree) {
duke@0 2048 context->set_last_referrer_tag(*wrapper.obj_tag_p());
duke@0 2049 } else {
duke@0 2050 context->set_last_referrer_tag(referrer_tag);
duke@0 2051 }
duke@0 2052
duke@0 2053 if (control == JVMTI_ITERATION_CONTINUE) {
duke@0 2054 return check_for_visit(referree);
duke@0 2055 } else {
duke@0 2056 return control != JVMTI_ITERATION_ABORT;
duke@0 2057 }
duke@0 2058 }
duke@0 2059
duke@0 2060 // invoke advanced style heap root callback
duke@0 2061 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
duke@0 2062 oop obj) {
duke@0 2063 assert(ServiceUtil::visible_oop(obj), "checking");
duke@0 2064
duke@0 2065 AdvancedHeapWalkContext* context = advanced_context();
duke@0 2066
duke@0 2067 // check that callback is provided
duke@0 2068 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
duke@0 2069 if (cb == NULL) {
duke@0 2070 return check_for_visit(obj);
duke@0 2071 }
duke@0 2072
duke@0 2073 // apply class filter
duke@0 2074 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@0 2075 return check_for_visit(obj);
duke@0 2076 }
duke@0 2077
duke@0 2078 // setup the callback wrapper
duke@0 2079 CallbackWrapper wrapper(tag_map(), obj);
duke@0 2080
duke@0 2081 // apply tag filter
duke@0 2082 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@0 2083 wrapper.klass_tag(),
duke@0 2084 context->heap_filter())) {
duke@0 2085 return check_for_visit(obj);
duke@0 2086 }
duke@0 2087
duke@0 2088 // for arrays we need the length, otherwise -1
duke@0 2089 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
duke@0 2090
duke@0 2091 // invoke the callback
duke@0 2092 jint res = (*cb)(ref_kind,
duke@0 2093 NULL, // referrer info
duke@0 2094 wrapper.klass_tag(),
duke@0 2095 0, // referrer_class_tag is 0 for heap root
duke@0 2096 wrapper.obj_size(),
duke@0 2097 wrapper.obj_tag_p(),
duke@0 2098 NULL, // referrer_tag_p
duke@0 2099 len,
duke@0 2100 (void*)user_data());
duke@0 2101 if (res & JVMTI_VISIT_ABORT) {
duke@0 2102 return false;// referrer class tag
duke@0 2103 }
duke@0 2104 if (res & JVMTI_VISIT_OBJECTS) {
duke@0 2105 check_for_visit(obj);
duke@0 2106 }
duke@0 2107 return true;
duke@0 2108 }
duke@0 2109
duke@0 2110 // report a reference from a thread stack to an object
duke@0 2111 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
duke@0 2112 jlong thread_tag,
duke@0 2113 jlong tid,
duke@0 2114 int depth,
duke@0 2115 jmethodID method,
duke@0 2116 jlocation bci,
duke@0 2117 jint slot,
duke@0 2118 oop obj) {
duke@0 2119 assert(ServiceUtil::visible_oop(obj), "checking");
duke@0 2120
duke@0 2121 AdvancedHeapWalkContext* context = advanced_context();
duke@0 2122
duke@0 2123 // check that callback is provider
duke@0 2124 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
duke@0 2125 if (cb == NULL) {
duke@0 2126 return check_for_visit(obj);
duke@0 2127 }
duke@0 2128
duke@0 2129 // apply class filter
duke@0 2130 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@0 2131 return check_for_visit(obj);
duke@0 2132 }
duke@0 2133
duke@0 2134 // setup the callback wrapper
duke@0 2135 CallbackWrapper wrapper(tag_map(), obj);
duke@0 2136
duke@0 2137 // apply tag filter
duke@0 2138 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@0 2139 wrapper.klass_tag(),
duke@0 2140 context->heap_filter())) {
duke@0 2141 return check_for_visit(obj);
duke@0 2142 }
duke@0 2143
duke@0 2144 // setup the referrer info
duke@0 2145 jvmtiHeapReferenceInfo reference_info;
duke@0 2146 reference_info.stack_local.thread_tag = thread_tag;
duke@0 2147 reference_info.stack_local.thread_id = tid;
duke@0 2148 reference_info.stack_local.depth = depth;
duke@0 2149 reference_info.stack_local.method = method;
duke@0 2150 reference_info.stack_local.location = bci;
duke@0 2151 reference_info.stack_local.slot = slot;
duke@0 2152
duke@0 2153 // for arrays we need the length, otherwise -1
duke@0 2154 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
duke@0 2155
duke@0 2156 // call into the agent
duke@0 2157 int res = (*cb)(ref_kind,
duke@0 2158 &reference_info,
duke@0 2159 wrapper.klass_tag(),
duke@0 2160 0, // referrer_class_tag is 0 for heap root (stack)
duke@0 2161 wrapper.obj_size(),
duke@0 2162 wrapper.obj_tag_p(),
duke@0 2163 NULL, // referrer_tag is 0 for root
duke@0 2164 len,
duke@0 2165 (void*)user_data());
duke@0 2166
duke@0 2167 if (res & JVMTI_VISIT_ABORT) {
duke@0 2168 return false;
duke@0 2169 }
duke@0 2170 if (res & JVMTI_VISIT_OBJECTS) {
duke@0 2171 check_for_visit(obj);
duke@0 2172 }
duke@0 2173 return true;
duke@0 2174 }
duke@0 2175
duke@0 2176 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
duke@0 2177 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
duke@0 2178 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \
duke@0 2179 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \
duke@0 2180 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
duke@0 2181 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
duke@0 2182 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \
duke@0 2183 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
duke@0 2184
duke@0 2185 // invoke the object reference callback to report a reference
duke@0 2186 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
duke@0 2187 oop referrer,
duke@0 2188 oop obj,
duke@0 2189 jint index)
duke@0 2190 {
duke@0 2191 // field index is only valid field in reference_info
duke@0 2192 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@0 2193
duke@0 2194 assert(ServiceUtil::visible_oop(referrer), "checking");
duke@0 2195 assert(ServiceUtil::visible_oop(obj), "checking");
duke@0 2196
duke@0 2197 AdvancedHeapWalkContext* context = advanced_context();
duke@0 2198
duke@0 2199 // check that callback is provider
duke@0 2200 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
duke@0 2201 if (cb == NULL) {
duke@0 2202 return check_for_visit(obj);
duke@0 2203 }
duke@0 2204
duke@0 2205 // apply class filter
duke@0 2206 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@0 2207 return check_for_visit(obj);
duke@0 2208 }
duke@0 2209
duke@0 2210 // setup the callback wrapper
duke@0 2211 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
duke@0 2212
duke@0 2213 // apply tag filter
duke@0 2214 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@0 2215 wrapper.klass_tag(),
duke@0 2216 context->heap_filter())) {
duke@0 2217 return check_for_visit(obj);
duke@0 2218 }
duke@0 2219
duke@0 2220 // field index is only valid field in reference_info
duke@0 2221 reference_info.field.index = index;
duke@0 2222
duke@0 2223 // for arrays we need the length, otherwise -1
duke@0 2224 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
duke@0 2225
duke@0 2226 // invoke the callback
duke@0 2227 int res = (*cb)(ref_kind,
duke@0 2228 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
duke@0 2229 wrapper.klass_tag(),
duke@0 2230 wrapper.referrer_klass_tag(),
duke@0 2231 wrapper.obj_size(),
duke@0 2232 wrapper.obj_tag_p(),
duke@0 2233 wrapper.referrer_tag_p(),
duke@0 2234 len,
duke@0 2235 (void*)user_data());
duke@0 2236
duke@0 2237 if (res & JVMTI_VISIT_ABORT) {
duke@0 2238 return false;
duke@0 2239 }
duke@0 2240 if (res & JVMTI_VISIT_OBJECTS) {
duke@0 2241 check_for_visit(obj);
duke@0 2242 }
duke@0 2243 return true;
duke@0 2244 }
duke@0 2245
duke@0 2246 // report a "simple root"
duke@0 2247 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
duke@0 2248 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
duke@0 2249 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
duke@0 2250 assert(ServiceUtil::visible_oop(obj), "checking");
duke@0 2251
duke@0 2252 if (is_basic_heap_walk()) {
duke@0 2253 // map to old style root kind
duke@0 2254 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
duke@0 2255 return invoke_basic_heap_root_callback(root_kind, obj);
duke@0 2256 } else {
duke@0 2257 assert(is_advanced_heap_walk(), "wrong heap walk type");
duke@0 2258 return invoke_advanced_heap_root_callback(kind, obj);
duke@0 2259 }
duke@0 2260 }
duke@0 2261
duke@0 2262
duke@0 2263 // invoke the primitive array values
duke@0 2264 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
duke@0 2265 assert(obj->is_typeArray(), "not a primitive array");
duke@0 2266
duke@0 2267 AdvancedHeapWalkContext* context = advanced_context();
duke@0 2268 assert(context->array_primitive_value_callback() != NULL, "no callback");
duke@0 2269
duke@0 2270 // apply class filter
duke@0 2271 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@0 2272 return true;
duke@0 2273 }
duke@0 2274
duke@0 2275 CallbackWrapper wrapper(tag_map(), obj);
duke@0 2276
duke@0 2277 // apply tag filter
duke@0 2278 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@0 2279 wrapper.klass_tag(),
duke@0 2280 context->heap_filter())) {
duke@0 2281 return true;
duke@0 2282 }
duke@0 2283
duke@0 2284 // invoke the callback
duke@0 2285 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
duke@0 2286 &wrapper,
duke@0 2287 obj,
duke@0 2288 (void*)user_data());
duke@0 2289 return (!(res & JVMTI_VISIT_ABORT));
duke@0 2290 }
duke@0 2291
duke@0 2292 // invoke the string value callback
duke@0 2293 inline bool CallbackInvoker::report_string_value(oop str) {
never@1142 2294 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
duke@0 2295
duke@0 2296 AdvancedHeapWalkContext* context = advanced_context();
duke@0 2297 assert(context->string_primitive_value_callback() != NULL, "no callback");
duke@0 2298
duke@0 2299 // apply class filter
duke@0 2300 if (is_filtered_by_klass_filter(str, context->klass_filter())) {
duke@0 2301 return true;
duke@0 2302 }
duke@0 2303
duke@0 2304 CallbackWrapper wrapper(tag_map(), str);
duke@0 2305
duke@0 2306 // apply tag filter
duke@0 2307 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@0 2308 wrapper.klass_tag(),
duke@0 2309 context->heap_filter())) {
duke@0 2310 return true;
duke@0 2311 }
duke@0 2312
duke@0 2313 // invoke the callback
duke@0 2314 int res = invoke_string_value_callback(context->string_primitive_value_callback(),
duke@0 2315 &wrapper,
duke@0 2316 str,
duke@0 2317 (void*)user_data());
duke@0 2318 return (!(res & JVMTI_VISIT_ABORT));
duke@0 2319 }
duke@0 2320
duke@0 2321 // invoke the primitive field callback
duke@0 2322 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
duke@0 2323 oop obj,
duke@0 2324 jint index,
duke@0 2325 address addr,
duke@0 2326 char type)
duke@0 2327 {
duke@0 2328 // for primitive fields only the index will be set
duke@0 2329 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@0 2330
duke@0 2331 AdvancedHeapWalkContext* context = advanced_context();
duke@0 2332 assert(context->primitive_field_callback() != NULL, "no callback");
duke@0 2333
duke@0 2334 // apply class filter
duke@0 2335 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@0 2336 return true;
duke@0 2337 }
duke@0 2338
duke@0 2339 CallbackWrapper wrapper(tag_map(), obj);
duke@0 2340
duke@0 2341 // apply tag filter
duke@0 2342 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@0 2343 wrapper.klass_tag(),
duke@0 2344 context->heap_filter())) {
duke@0 2345 return true;
duke@0 2346 }
duke@0 2347
duke@0 2348 // the field index in the referrer
duke@0 2349 reference_info.field.index = index;
duke@0 2350
duke@0 2351 // map the type
duke@0 2352 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
duke@0 2353
duke@0 2354 // setup the jvalue
duke@0 2355 jvalue value;
duke@0 2356 copy_to_jvalue(&value, addr, value_type);
duke@0 2357
duke@0 2358 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
duke@0 2359 int res = (*cb)(ref_kind,
duke@0 2360 &reference_info,
duke@0 2361 wrapper.klass_tag(),
duke@0 2362 wrapper.obj_tag_p(),
duke@0 2363 value,
duke@0 2364 value_type,
duke@0 2365 (void*)user_data());
duke@0 2366 return (!(res & JVMTI_VISIT_ABORT));
duke@0 2367 }
duke@0 2368
duke@0 2369
duke@0 2370 // instance field
duke@0 2371 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
duke@0 2372 jint index,
duke@0 2373 address value,
duke@0 2374 char type) {
duke@0 2375 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
duke@0 2376 obj,
duke@0 2377 index,
duke@0 2378 value,
duke@0 2379 type);
duke@0 2380 }
duke@0 2381
duke@0 2382 // static field
duke@0 2383 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
duke@0 2384 jint index,
duke@0 2385 address value,
duke@0 2386 char type) {
duke@0 2387 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
duke@0 2388 obj,
duke@0 2389 index,
duke@0 2390 value,
duke@0 2391 type);
duke@0 2392 }
duke@0 2393
duke@0 2394 // report a JNI local (root object) to the profiler
duke@0 2395 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
duke@0 2396 if (is_basic_heap_walk()) {
duke@0 2397 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
duke@0 2398 thread_tag,
duke@0 2399 depth,
duke@0 2400 m,
duke@0 2401 -1,
duke@0 2402 obj);
duke@0 2403 } else {
duke@0 2404 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
duke@0 2405 thread_tag, tid,
duke@0 2406 depth,
duke@0 2407 m,
duke@0 2408 (jlocation)-1,
duke@0 2409 -1,
duke@0 2410 obj);
duke@0 2411 }
duke@0 2412 }
duke@0 2413
duke@0 2414
duke@0 2415 // report a local (stack reference, root object)
duke@0 2416 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
duke@0 2417 jlong tid,
duke@0 2418 jint depth,
duke@0 2419 jmethodID method,
duke@0 2420 jlocation bci,
duke@0 2421 jint slot,
duke@0 2422 oop obj) {
duke@0 2423 if (is_basic_heap_walk()) {
duke@0 2424 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
duke@0 2425 thread_tag,
duke@0 2426 depth,
duke@0 2427 method,
duke@0 2428 slot,
duke@0 2429 obj);
duke@0 2430 } else {
duke@0 2431 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
duke@0 2432 thread_tag,
duke@0 2433 tid,
duke@0 2434 depth,
duke@0 2435 method,
duke@0 2436 bci,
duke@0 2437 slot,
duke@0 2438 obj);
duke@0 2439 }
duke@0 2440 }
duke@0 2441
duke@0 2442 // report an object referencing a class.
duke@0 2443 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
duke@0 2444 if (is_basic_heap_walk()) {
duke@0 2445 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
duke@0 2446 } else {
duke@0 2447 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
duke@0 2448 }
duke@0 2449 }
duke@0 2450
duke@0 2451 // report a class referencing its class loader.
duke@0 2452 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
duke@0 2453 if (is_basic_heap_walk()) {
duke@0 2454 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
duke@0 2455 } else {
duke@0 2456 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
duke@0 2457 }
duke@0 2458 }
duke@0 2459
duke@0 2460 // report a class referencing its signers.
duke@0 2461 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
duke@0 2462 if (is_basic_heap_walk()) {
duke@0 2463 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
duke@0 2464 } else {
duke@0 2465 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
duke@0 2466 }
duke@0 2467 }
duke@0 2468
duke@0 2469 // report a class referencing its protection domain..
duke@0 2470 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
duke@0 2471 if (is_basic_heap_walk()) {
duke@0 2472 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
duke@0 2473 } else {
duke@0 2474 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
duke@0 2475 }
duke@0 2476 }
duke@0 2477
duke@0 2478 // report a class referencing its superclass.
duke@0 2479 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
duke@0 2480 if (is_basic_heap_walk()) {
duke@0 2481 // Send this to be consistent with past implementation
duke@0 2482 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
duke@0 2483 } else {
duke@0 2484 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
duke@0 2485 }
duke@0 2486 }
duke@0 2487
duke@0 2488 // report a class referencing one of its interfaces.
duke@0 2489 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
duke@0 2490 if (is_basic_heap_walk()) {
duke@0 2491 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
duke@0 2492 } else {
duke@0 2493 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
duke@0 2494 }
duke@0 2495 }
duke@0 2496
duke@0 2497 // report a class referencing one of its static fields.
duke@0 2498 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
duke@0 2499 if (is_basic_heap_walk()) {
duke@0 2500 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
duke@0 2501 } else {
duke@0 2502 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
duke@0 2503 }
duke@0 2504 }
duke@0 2505
duke@0 2506 // report an array referencing an element object
duke@0 2507 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
duke@0 2508 if (is_basic_heap_walk()) {
duke@0 2509 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
duke@0 2510 } else {
duke@0 2511 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
duke@0 2512 }
duke@0 2513 }
duke@0 2514
duke@0 2515 // report an object referencing an instance field object
duke@0 2516 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
duke@0 2517 if (is_basic_heap_walk()) {
duke@0 2518 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
duke@0 2519 } else {
duke@0 2520 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
duke@0 2521 }
duke@0 2522 }
duke@0 2523
duke@0 2524 // report an array referencing an element object
duke@0 2525 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
duke@0 2526 if (is_basic_heap_walk()) {
duke@0 2527 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
duke@0 2528 } else {
duke@0 2529 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
duke@0 2530 }
duke@0 2531 }
duke@0 2532
duke@0 2533 // A supporting closure used to process simple roots
duke@0 2534 class SimpleRootsClosure : public OopClosure {
duke@0 2535 private:
duke@0 2536 jvmtiHeapReferenceKind _kind;
duke@0 2537 bool _continue;
duke@0 2538
duke@0 2539 jvmtiHeapReferenceKind root_kind() { return _kind; }
duke@0 2540
duke@0 2541 public:
duke@0 2542 void set_kind(jvmtiHeapReferenceKind kind) {
duke@0 2543 _kind = kind;
duke@0 2544 _continue = true;
duke@0 2545 }
duke@0 2546
duke@0 2547 inline bool stopped() {
duke@0 2548 return !_continue;
duke@0 2549 }
duke@0 2550
duke@0 2551 void do_oop(oop* obj_p) {
duke@0 2552 // iteration has terminated
duke@0 2553 if (stopped()) {
duke@0 2554 return;
duke@0 2555 }
duke@0 2556
duke@0 2557 // ignore null or deleted handles
duke@0 2558 oop o = *obj_p;
duke@0 2559 if (o == NULL || o == JNIHandles::deleted_handle()) {
duke@0 2560 return;
duke@0 2561 }
duke@0 2562
stefank@6738 2563 assert(Universe::heap()->is_in_reserved(o), "should be impossible");
stefank@6738 2564
duke@0 2565 jvmtiHeapReferenceKind kind = root_kind();
stefank@6738 2566 if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
duke@0 2567 // SystemDictionary::always_strong_oops_do reports the application
duke@0 2568 // class loader as a root. We want this root to be reported as
duke@0 2569 // a root kind of "OTHER" rather than "SYSTEM_CLASS".
stefank@6738 2570 if (!o->is_instanceMirror()) {
duke@0 2571 kind = JVMTI_HEAP_REFERENCE_OTHER;
duke@0 2572 }
stefank@6738 2573 }
duke@0 2574
duke@0 2575 // some objects are ignored - in the case of simple
coleenp@2177 2576 // roots it's mostly Symbol*s that we are skipping
duke@0 2577 // here.
duke@0 2578 if (!ServiceUtil::visible_oop(o)) {
duke@0 2579 return;
duke@0 2580 }
duke@0 2581
duke@0 2582 // invoke the callback
duke@0 2583 _continue = CallbackInvoker::report_simple_root(kind, o);
duke@0 2584
duke@0 2585 }
coleenp@113 2586 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
duke@0 2587 };
duke@0 2588
duke@0 2589 // A supporting closure used to process JNI locals
duke@0 2590 class JNILocalRootsClosure : public OopClosure {
duke@0 2591 private:
duke@0 2592 jlong _thread_tag;
duke@0 2593 jlong _tid;
duke@0 2594 jint _depth;
duke@0 2595 jmethodID _method;
duke@0 2596 bool _continue;
duke@0 2597 public:
duke@0 2598 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
duke@0 2599 _thread_tag = thread_tag;
duke@0 2600 _tid = tid;
duke@0 2601 _depth = depth;
duke@0 2602 _method = method;
duke@0 2603 _continue = true;
duke@0 2604 }
duke@0 2605
duke@0 2606 inline bool stopped() {
duke@0 2607 return !_continue;
duke@0 2608 }
duke@0 2609
duke@0 2610 void do_oop(oop* obj_p) {
duke@0 2611 // iteration has terminated
duke@0 2612 if (stopped()) {
duke@0 2613 return;
duke@0 2614 }
duke@0 2615
duke@0 2616 // ignore null or deleted handles
duke@0 2617 oop o = *obj_p;
duke@0 2618 if (o == NULL || o == JNIHandles::deleted_handle()) {
duke@0 2619 return;
duke@0 2620 }
duke@0 2621
duke@0 2622 if (!ServiceUtil::visible_oop(o)) {
duke@0 2623 return;
duke@0 2624 }
duke@0 2625
duke@0 2626 // invoke the callback
duke@0 2627 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
duke@0 2628 }
coleenp@113 2629 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
duke@0 2630 };
duke@0 2631
duke@0 2632
duke@0 2633 // A VM operation to iterate over objects that are reachable from
duke@0 2634 // a set of roots or an initial object.
duke@0 2635 //
duke@0 2636 // For VM_HeapWalkOperation the set of roots used is :-
duke@0 2637 //
duke@0 2638 // - All JNI global references
duke@0 2639 // - All inflated monitors
duke@0 2640 // - All classes loaded by the boot class loader (or all classes
duke@0 2641 // in the event that class unloading is disabled)
duke@0 2642 // - All java threads
duke@0 2643 // - For each java thread then all locals and JNI local references
duke@0 2644 // on the thread's execution stack
duke@0 2645 // - All visible/explainable objects from Universes::oops_do
duke@0 2646 //
duke@0 2647 class VM_HeapWalkOperation: public VM_Operation {
duke@0 2648 private:
duke@0 2649 enum {
duke@0 2650 initial_visit_stack_size = 4000
duke@0 2651 };
duke@0 2652
duke@0 2653 bool _is_advanced_heap_walk; // indicates FollowReferences
duke@0 2654 JvmtiTagMap* _tag_map;
duke@0 2655 Handle _initial_object;
duke@0 2656 GrowableArray<oop>* _visit_stack; // the visit stack
duke@0 2657
duke@0 2658 bool _collecting_heap_roots; // are we collecting roots
duke@0 2659 bool _following_object_refs; // are we following object references
duke@0 2660
duke@0 2661 bool _reporting_primitive_fields; // optional reporting
duke@0 2662 bool _reporting_primitive_array_values;
duke@0 2663 bool _reporting_string_values;
duke@0 2664
duke@0 2665 GrowableArray<oop>* create_visit_stack() {
zgu@6197 2666 return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true);
duke@0 2667 }
duke@0 2668
duke@0 2669 // accessors
duke@0 2670 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; }
duke@0 2671 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@0 2672 Handle initial_object() const { return _initial_object; }
duke@0 2673
duke@0 2674 bool is_following_references() const { return _following_object_refs; }
duke@0 2675
duke@0 2676 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; }
duke@0 2677 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
duke@0 2678 bool is_reporting_string_values() const { return _reporting_string_values; }
duke@0 2679
duke@0 2680 GrowableArray<oop>* visit_stack() const { return _visit_stack; }
duke@0 2681
duke@0 2682 // iterate over the various object types
duke@0 2683 inline bool iterate_over_array(oop o);
duke@0 2684 inline bool iterate_over_type_array(oop o);
coleenp@6725 2685 inline bool iterate_over_class(oop o);
duke@0 2686 inline bool iterate_over_object(oop o);
duke@0 2687
duke@0 2688 // root collection
duke@0 2689 inline bool collect_simple_roots();
duke@0 2690 inline bool collect_stack_roots();
duke@0 2691 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
duke@0 2692
duke@0 2693 // visit an object
duke@0 2694 inline bool visit(oop o);
duke@0 2695
duke@0 2696 public:
duke@0 2697 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@0 2698 Handle initial_object,
duke@0 2699 BasicHeapWalkContext callbacks,
duke@0 2700 const void* user_data);
duke@0 2701
duke@0 2702 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@0 2703 Handle initial_object,
duke@0 2704 AdvancedHeapWalkContext callbacks,
duke@0 2705 const void* user_data);
duke@0 2706
duke@0 2707 ~VM_HeapWalkOperation();
duke@0 2708
duke@0 2709 VMOp_Type type() const { return VMOp_HeapWalkOperation; }
duke@0 2710 void doit();
duke@0 2711 };
duke@0 2712
duke@0 2713
duke@0 2714 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@0 2715 Handle initial_object,
duke@0 2716 BasicHeapWalkContext callbacks,
duke@0 2717 const void* user_data) {
duke@0 2718 _is_advanced_heap_walk = false;
duke@0 2719 _tag_map = tag_map;
duke@0 2720 _initial_object = initial_object;
duke@0 2721 _following_object_refs = (callbacks.object_ref_callback() != NULL);
duke@0 2722 _reporting_primitive_fields = false;
duke@0 2723 _reporting_primitive_array_values = false;
duke@0 2724 _reporting_string_values = false;
duke@0 2725 _visit_stack = create_visit_stack();
duke@0 2726
duke@0 2727
duke@0 2728 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
duke@0 2729 }
duke@0 2730
duke@0 2731 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@0 2732 Handle initial_object,
duke@0 2733 AdvancedHeapWalkContext callbacks,
duke@0 2734 const void* user_data) {
duke@0 2735 _is_advanced_heap_walk = true;
duke@0 2736 _tag_map = tag_map;
duke@0 2737 _initial_object = initial_object;
duke@0 2738 _following_object_refs = true;
duke@0 2739 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
duke@0 2740 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
duke@0 2741 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
duke@0 2742 _visit_stack = create_visit_stack();
duke@0 2743
duke@0 2744 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
duke@0 2745 }
duke@0 2746
duke@0 2747 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
duke@0 2748 if (_following_object_refs) {
duke@0 2749 assert(_visit_stack != NULL, "checking");
duke@0 2750 delete _visit_stack;
duke@0 2751 _visit_stack = NULL;
duke@0 2752 }
duke@0 2753 }
duke@0 2754
duke@0 2755 // an array references its class and has a reference to
duke@0 2756 // each element in the array
duke@0 2757 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
duke@0 2758 objArrayOop array = objArrayOop(o);
duke@0 2759
duke@0 2760 // array reference to its class
coleenp@6831 2761 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
duke@0 2762 if (!CallbackInvoker::report_class_reference(o, mirror)) {
duke@0 2763 return false;
duke@0 2764 }
duke@0 2765
duke@0 2766 // iterate over the array and report each reference to a
duke@0 2767 // non-null element
duke@0 2768 for (int index=0; index<array->length(); index++) {
duke@0 2769 oop elem = array->obj_at(index);
duke@0 2770 if (elem == NULL) {
duke@0 2771 continue;
duke@0 2772 }
duke@0 2773
duke@0 2774 // report the array reference o[index] = elem
duke@0 2775 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
duke@0 2776 return false;
duke@0 2777 }
duke@0 2778 }
duke@0 2779 return true;
duke@0 2780 }
duke@0 2781
duke@0 2782 // a type array references its class
duke@0 2783 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
coleenp@6725 2784 Klass* k = o->klass();
hseigel@6983 2785 oop mirror = k->java_mirror();
duke@0 2786 if (!CallbackInvoker::report_class_reference(o, mirror)) {
duke@0 2787 return false;
duke@0 2788 }
duke@0 2789
duke@0 2790 // report the array contents if required
duke@0 2791 if (is_reporting_primitive_array_values()) {
duke@0 2792 if (!CallbackInvoker::report_primitive_array_values(o)) {
duke@0 2793 return false;
duke@0 2794 }
duke@0 2795 }
duke@0 2796 return true;
duke@0 2797 }
duke@0 2798
henryjen@15505 2799 #ifdef ASSERT
duke@0 2800 // verify that a static oop field is in range
coleenp@6725 2801 static inline bool verify_static_oop(InstanceKlass* ik,
never@2376 2802 oop mirror, int offset) {
never@2376 2803 address obj_p = (address)mirror + offset;
coleenp@6735 2804 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
never@2376 2805 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
duke@0 2806 assert(end >= start, "sanity check");
duke@0 2807
duke@0 2808 if (obj_p >= start && obj_p < end) {
duke@0 2809 return true;
duke@0 2810 } else {
duke@0 2811 return false;
duke@0 2812 }
duke@0 2813 }
henryjen@15505 2814 #endif // #ifdef ASSERT
duke@0 2815
duke@0 2816 // a class references its super class, interfaces, class loader, ...
duke@0 2817 // and finally its static fields
coleenp@6725 2818 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
duke@0 2819 int i;
coleenp@6725 2820 Klass* klass = java_lang_Class::as_Klass(java_class);
duke@0 2821
duke@0 2822 if (klass->oop_is_instance()) {
coleenp@6725 2823 InstanceKlass* ik = InstanceKlass::cast(klass);
duke@0 2824
duke@0 2825 // ignore the class if it's has been initialized yet
duke@0 2826 if (!ik->is_linked()) {
duke@0 2827 return true;
duke@0 2828 }
duke@0 2829
duke@0 2830 // get the java mirror
duke@0 2831 oop mirror = klass->java_mirror();
duke@0 2832
duke@0 2833 // super (only if something more interesting than java.lang.Object)
coleenp@6725 2834 Klass* java_super = ik->java_super();
never@1142 2835 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
hseigel@6983 2836 oop super = java_super->java_mirror();
duke@0 2837 if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
duke@0 2838 return false;
duke@0 2839 }
duke@0 2840 }
duke@0 2841
duke@0 2842 // class loader
duke@0 2843 oop cl = ik->class_loader();
duke@0 2844 if (cl != NULL) {
duke@0 2845 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
duke@0 2846 return false;
duke@0 2847 }
duke@0 2848 }
duke@0 2849
duke@0 2850 // protection domain
duke@0 2851 oop pd = ik->protection_domain();
duke@0 2852 if (pd != NULL) {
duke@0 2853 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
duke@0 2854 return false;
duke@0 2855 }
duke@0 2856 }
duke@0 2857
duke@0 2858 // signers
duke@0 2859 oop signers = ik->signers();
duke@0 2860 if (signers != NULL) {
duke@0 2861 if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
duke@0 2862 return false;
duke@0 2863 }
duke@0 2864 }
duke@0 2865
duke@0 2866 // references from the constant pool
duke@0 2867 {
minqi@10265 2868 ConstantPool* pool = ik->constants();
duke@0 2869 for (int i = 1; i < pool->length(); i++) {
duke@0 2870 constantTag tag = pool->tag_at(i).value();
duke@0 2871 if (tag.is_string() || tag.is_klass()) {
duke@0 2872 oop entry;
duke@0 2873 if (tag.is_string()) {
duke@0 2874 entry = pool->resolved_string_at(i);
jiangli@6855 2875 // If the entry is non-null it is resolved.
coleenp@6725 2876 if (entry == NULL) continue;
duke@0 2877 } else {
hseigel@6983 2878 entry = pool->resolved_klass_at(i)->java_mirror();
duke@0 2879 }
duke@0 2880 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
duke@0 2881 return false;
duke@0 2882 }
duke@0 2883 }
duke@0 2884 }
duke@0 2885 }
duke@0 2886
duke@0 2887 // interfaces
duke@0 2888 // (These will already have been reported as references from the constant pool
duke@0 2889 // but are specified by IterateOverReachableObjects and must be reported).
coleenp@6725 2890 Array<Klass*>* interfaces = ik->local_interfaces();
duke@0 2891 for (i = 0; i < interfaces->length(); i++) {
hseigel@6983 2892 oop interf = ((Klass*)interfaces->at(i))->java_mirror();
duke@0 2893 if (interf == NULL) {
duke@0 2894 continue;
duke@0 2895 }
duke@0 2896 if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
duke@0 2897 return false;
duke@0 2898 }
duke@0 2899 }
duke@0 2900
duke@0 2901 // iterate over the static fields
duke@0 2902
coleenp@6725 2903 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
duke@0 2904 for (i=0; i<field_map->field_count(); i++) {
duke@0 2905 ClassFieldDescriptor* field = field_map->field_at(i);
duke@0 2906 char type = field->field_type();
duke@0 2907 if (!is_primitive_field_type(type)) {
never@2376 2908 oop fld_o = mirror->obj_field(field->field_offset());
never@2376 2909 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
duke@0 2910 if (fld_o != NULL) {
duke@0 2911 int slot = field->field_index();
duke@0 2912 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
duke@0 2913 delete field_map;
duke@0 2914 return false;
duke@0 2915 }
duke@0 2916 }
duke@0 2917 } else {
duke@0 2918 if (is_reporting_primitive_fields()) {
never@2377 2919 address addr = (address)mirror + field->field_offset();
duke@0 2920 int slot = field->field_index();
duke@0 2921 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
duke@0 2922 delete field_map;
duke@0 2923 return false;
duke@0 2924 }
duke@0 2925 }
duke@0 2926 }
duke@0 2927 }
duke@0 2928 delete field_map;
duke@0 2929
duke@0 2930 return true;
duke@0 2931 }
duke@0 2932
duke@0 2933 return true;
duke@0 2934 }
duke@0 2935
duke@0 2936 // an object references a class and its instance fields
duke@0 2937 // (static fields are ignored here as we report these as
duke@0 2938 // references from the class).
duke@0 2939 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
duke@0 2940 // reference to the class
hseigel@6983 2941 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
duke@0 2942 return false;
duke@0 2943 }
duke@0 2944
duke@0 2945 // iterate over instance fields
duke@0 2946 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
duke@0 2947 for (int i=0; i<field_map->field_count(); i++) {
duke@0 2948 ClassFieldDescriptor* field = field_map->field_at(i);
duke@0 2949 char type = field->field_type();
duke@0 2950 if (!is_primitive_field_type(type)) {
coleenp@113 2951 oop fld_o = o->obj_field(field->field_offset());
sspitsyn@4929 2952 // ignore any objects that aren't visible to profiler
sspitsyn@4929 2953 if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) {
coleenp@6725 2954 assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not "
coleenp@6725 2955 "have references to Klass* anymore");
duke@0 2956 int slot = field->field_index();
duke@0 2957 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
duke@0 2958 return false;
duke@0 2959 }
duke@0 2960 }
duke@0 2961 } else {
duke@0 2962 if (is_reporting_primitive_fields()) {
duke@0 2963 // primitive instance field
duke@0 2964 address addr = (address)o + field->field_offset();
duke@0 2965 int slot = field->field_index();
duke@0 2966 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
duke@0 2967 return false;
duke@0 2968 }
duke@0 2969 }
duke@0 2970 }
duke@0 2971 }
duke@0 2972
duke@0 2973 // if the object is a java.lang.String
duke@0 2974 if (is_reporting_string_values() &&
never@1142 2975 o->klass() == SystemDictionary::String_klass()) {
duke@0 2976 if (!CallbackInvoker::report_string_value(o)) {
duke@0 2977 return false;
duke@0 2978 }
duke@0 2979 }
duke@0 2980 return true;
duke@0 2981 }
duke@0 2982
duke@0 2983
dcubed@3816 2984 // Collects all simple (non-stack) roots except for threads;
dcubed@3816 2985 // threads are handled in collect_stack_roots() as an optimization.
duke@0 2986 // if there's a heap root callback provided then the callback is
duke@0 2987 // invoked for each simple root.
duke@0 2988 // if an object reference callback is provided then all simple
duke@0 2989 // roots are pushed onto the marking stack so that they can be
duke@0 2990 // processed later
duke@0 2991 //
duke@0 2992 inline bool VM_HeapWalkOperation::collect_simple_roots() {
duke@0 2993 SimpleRootsClosure blk;
duke@0 2994
duke@0 2995 // JNI globals
duke@0 2996 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
duke@0 2997 JNIHandles::oops_do(&blk);
duke@0 2998 if (blk.stopped()) {
duke@0 2999 return false;
duke@0 3000 }
duke@0 3001
duke@0 3002 // Preloaded classes and loader from the system dictionary
duke@0 3003 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
duke@0 3004 SystemDictionary::always_strong_oops_do(&blk);
stefank@6738 3005 KlassToOopClosure klass_blk(&blk);
stefank@6738 3006 ClassLoaderDataGraph::always_strong_oops_do(&blk, &klass_blk, false);
duke@0 3007 if (blk.stopped()) {
duke@0 3008 return false;
duke@0 3009 }
duke@0 3010
duke@0 3011 // Inflated monitors
duke@0 3012 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
duke@0 3013 ObjectSynchronizer::oops_do(&blk);
duke@0 3014 if (blk.stopped()) {
duke@0 3015 return false;
duke@0 3016 }
duke@0 3017
dcubed@3816 3018 // threads are now handled in collect_stack_roots()
duke@0 3019
duke@0 3020 // Other kinds of roots maintained by HotSpot
duke@0 3021 // Many of these won't be visible but others (such as instances of important
duke@0 3022 // exceptions) will be visible.
duke@0 3023 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
duke@0 3024 Universe::oops_do(&blk);
jrose@989 3025
jrose@989 3026 // If there are any non-perm roots in the code cache, visit them.
jrose@989 3027 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
stefank@20278 3028 CodeBlobToOopClosure look_in_blobs(&blk, !CodeBlobToOopClosure::FixRelocations);
jrose@989 3029 CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
jrose@989 3030
duke@0 3031 return true;
duke@0 3032 }
duke@0 3033
duke@0 3034 // Walk the stack of a given thread and find all references (locals
duke@0 3035 // and JNI calls) and report these as stack references
duke@0 3036 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
duke@0 3037 JNILocalRootsClosure* blk)
duke@0 3038 {
duke@0 3039 oop threadObj = java_thread->threadObj();
duke@0 3040 assert(threadObj != NULL, "sanity check");
duke@0 3041
duke@0 3042 // only need to get the thread's tag once per thread
duke@0 3043 jlong thread_tag = tag_for(_tag_map, threadObj);
duke@0 3044
duke@0 3045 // also need the thread id
duke@0 3046 jlong tid = java_lang_Thread::thread_id(threadObj);
duke@0 3047
duke@0 3048
duke@0 3049 if (java_thread->has_last_Java_frame()) {
duke@0 3050
duke@0 3051 // vframes are resource allocated
duke@0 3052 Thread* current_thread = Thread::current();
duke@0 3053 ResourceMark rm(current_thread);
duke@0 3054 HandleMark hm(current_thread);
duke@0 3055
duke@0 3056 RegisterMap reg_map(java_thread);
duke@0 3057 frame f = java_thread->last_frame();
duke@0 3058 vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
duke@0 3059
duke@0 3060 bool is_top_frame = true;
duke@0 3061 int depth = 0;
duke@0 3062 frame* last_entry_frame = NULL;
duke@0 3063
duke@0 3064 while (vf != NULL) {
duke@0 3065 if (vf->is_java_frame()) {
duke@0 3066
duke@0 3067 // java frame (interpreted, compiled, ...)
duke@0 3068 javaVFrame *jvf = javaVFrame::cast(vf);
duke@0 3069
duke@0 3070 // the jmethodID
duke@0 3071 jmethodID method = jvf->method()->jmethod_id();
duke@0 3072
duke@0 3073 if (!(jvf->method()->is_native())) {
duke@0 3074 jlocation bci = (jlocation)jvf->bci();
duke@0 3075 StackValueCollection* locals = jvf->locals();
duke@0 3076 for (int slot=0; slot<locals->size(); slot++) {
duke@0 3077 if (locals->at(slot)->type() == T_OBJECT) {
duke@0 3078 oop o = locals->obj_at(slot)();
duke@0 3079 if (o == NULL) {
duke@0 3080 continue;
duke@0 3081 }
duke@0 3082
duke@0 3083 // stack reference
duke@0 3084 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
duke@0 3085 bci, slot, o)) {
duke@0 3086 return false;
duke@0 3087 }
duke@0 3088 }
duke@0 3089 }
duke@0 3090 } else {
duke@0 3091 blk->set_context(thread_tag, tid, depth, method);
duke@0 3092 if (is_top_frame) {
duke@0 3093 // JNI locals for the top frame.
duke@0 3094 java_thread->active_handles()->oops_do(blk);
duke@0 3095 } else {
duke@0 3096 if (last_entry_frame != NULL) {
duke@0 3097 // JNI locals for the entry frame
duke@0 3098 assert(last_entry_frame->is_entry_frame(), "checking");
duke@0 3099 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
duke@0 3100 }
duke@0 3101 }
duke@0 3102 }
duke@0 3103 last_entry_frame = NULL;
duke@0 3104 depth++;
duke@0 3105 } else {
duke@0 3106 // externalVFrame - for an entry frame then we report the JNI locals
duke@0 3107 // when we find the corresponding javaVFrame
duke@0 3108 frame* fr = vf->frame_pointer();
duke@0 3109 assert(fr != NULL, "sanity check");
duke@0 3110 if (fr->is_entry_frame()) {
duke@0 3111 last_entry_frame = fr;
duke@0 3112 }
duke@0 3113 }
duke@0 3114
duke@0 3115 vf = vf->sender();
duke@0 3116 is_top_frame = false;
duke@0 3117 }
duke@0 3118 } else {
duke@0 3119 // no last java frame but there may be JNI locals
duke@0 3120 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
duke@0 3121 java_thread->active_handles()->oops_do(blk);
duke@0 3122 }
duke@0 3123 return true;
duke@0 3124 }
duke@0 3125
duke@0 3126
dcubed@3816 3127 // Collects the simple roots for all threads and collects all
dcubed@3816 3128 // stack roots - for each thread it walks the execution
duke@0 3129 // stack to find all references and local JNI refs.
duke@0 3130 inline bool VM_HeapWalkOperation::collect_stack_roots() {
duke@0 3131 JNILocalRootsClosure blk;
duke@0 3132 for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
duke@0 3133 oop threadObj = thread->threadObj();
duke@0 3134 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
dcubed@3816 3135 // Collect the simple root for this thread before we
dcubed@3816 3136 // collect its stack roots
dcubed@3816 3137 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
dcubed@3816 3138 threadObj)) {
dcubed@3816 3139 return false;
dcubed@3816 3140 }
duke@0 3141 if (!collect_stack_roots(thread, &blk)) {
duke@0 3142 return false;
duke@0 3143 }
duke@0 3144 }
duke@0 3145 }
duke@0 3146 return true;
duke@0 3147 }
duke@0 3148
duke@0 3149 // visit an object
duke@0 3150 // first mark the object as visited
duke@0 3151 // second get all the outbound references from this object (in other words, all
duke@0 3152 // the objects referenced by this object).
duke@0 3153 //
duke@0 3154 bool VM_HeapWalkOperation::visit(oop o) {
duke@0 3155 // mark object as visited
duke@0 3156 assert(!ObjectMarker::visited(o), "can't visit same object more than once");
duke@0 3157 ObjectMarker::mark(o);
duke@0 3158
duke@0 3159 // instance
duke@0 3160 if (o->is_instance()) {
never@1142 3161 if (o->klass() == SystemDictionary::Class_klass()) {
coleenp@6725 3162 if (!java_lang_Class::is_primitive(o)) {
duke@0 3163 // a java.lang.Class
coleenp@6725 3164 return iterate_over_class(o);
duke@0 3165 }
duke@0 3166 } else {
duke@0 3167 return iterate_over_object(o);
duke@0 3168 }
duke@0 3169 }
duke@0 3170
duke@0 3171 // object array
duke@0 3172 if (o->is_objArray()) {
duke@0 3173 return iterate_over_array(o);
duke@0 3174 }
duke@0 3175
duke@0 3176 // type array
duke@0 3177 if (o->is_typeArray()) {
duke@0 3178 return iterate_over_type_array(o);
duke@0 3179 }
duke@0 3180
duke@0 3181 return true;
duke@0 3182 }
duke@0 3183
duke@0 3184 void VM_HeapWalkOperation::doit() {
duke@0 3185 ResourceMark rm;
duke@0 3186 ObjectMarkerController marker;
duke@0 3187 ClassFieldMapCacheMark cm;
duke@0 3188
duke@0 3189 assert(visit_stack()->is_empty(), "visit stack must be empty");
duke@0 3190
duke@0 3191 // the heap walk starts with an initial object or the heap roots
duke@0 3192 if (initial_object().is_null()) {
dcubed@3815 3193 // If either collect_stack_roots() or collect_simple_roots()
dcubed@3815 3194 // returns false at this point, then there are no mark bits
dcubed@3815 3195 // to reset.
dcubed@3815 3196 ObjectMarker::set_needs_reset(false);
dcubed@3815 3197
dcubed@3816 3198 // Calling collect_stack_roots() before collect_simple_roots()
dcubed@3816 3199 // can result in a big performance boost for an agent that is
dcubed@3816 3200 // focused on analyzing references in the thread stacks.
dcubed@3816 3201 if (!collect_stack_roots()) return;
dcubed@3816 3202
duke@0 3203 if (!collect_simple_roots()) return;
dcubed@3815 3204
dcubed@3815 3205 // no early return so enable heap traversal to reset the mark bits
dcubed@3815 3206 ObjectMarker::set_needs_reset(true);
duke@0 3207 } else {
duke@0 3208 visit_stack()->push(initial_object()());
duke@0 3209 }
duke@0 3210
duke@0 3211 // object references required
duke@0 3212 if (is_following_references()) {
duke@0 3213
duke@0 3214 // visit each object until all reachable objects have been
duke@0 3215 // visited or the callback asked to terminate the iteration.
duke@0 3216 while (!visit_stack()->is_empty()) {
duke@0 3217 oop o = visit_stack()->pop();
duke@0 3218 if (!ObjectMarker::visited(o)) {
duke@0 3219 if (!visit(o)) {
duke@0 3220 break;
duke@0 3221 }
duke@0 3222 }
duke@0 3223 }
duke@0 3224 }
duke@0 3225 }
duke@0 3226
duke@0 3227 // iterate over all objects that are reachable from a set of roots
duke@0 3228 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
duke@0 3229 jvmtiStackReferenceCallback stack_ref_callback,
duke@0 3230 jvmtiObjectReferenceCallback object_ref_callback,
duke@0 3231 const void* user_data) {
duke@0 3232 MutexLocker ml(Heap_lock);
duke@0 3233 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
duke@0 3234 VM_HeapWalkOperation op(this, Handle(), context, user_data);
duke@0 3235 VMThread::execute(&op);
duke@0 3236 }
duke@0 3237
duke@0 3238 // iterate over all objects that are reachable from a given object
duke@0 3239 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
duke@0 3240 jvmtiObjectReferenceCallback object_ref_callback,
duke@0 3241 const void* user_data) {
duke@0 3242 oop obj = JNIHandles::resolve(object);
duke@0 3243 Handle initial_object(Thread::current(), obj);
duke@0 3244
duke@0 3245 MutexLocker ml(Heap_lock);
duke@0 3246 BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
duke@0 3247 VM_HeapWalkOperation op(this, initial_object, context, user_data);
duke@0 3248 VMThread::execute(&op);
duke@0 3249 }
duke@0 3250
duke@0 3251 // follow references from an initial object or the GC roots
duke@0 3252 void JvmtiTagMap::follow_references(jint heap_filter,
duke@0 3253 KlassHandle klass,
duke@0 3254 jobject object,
duke@0 3255 const jvmtiHeapCallbacks* callbacks,
duke@0 3256 const void* user_data)
duke@0 3257 {
duke@0 3258 oop obj = JNIHandles::resolve(object);
duke@0 3259 Handle initial_object(Thread::current(), obj);
duke@0 3260
duke@0 3261 MutexLocker ml(Heap_lock);
duke@0 3262 AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
duke@0 3263 VM_HeapWalkOperation op(this, initial_object, context, user_data);
duke@0 3264 VMThread::execute(&op);
duke@0 3265 }
duke@0 3266
duke@0 3267
kamg@2125 3268 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
dcubed@2145 3269 // No locks during VM bring-up (0 threads) and no safepoints after main
dcubed@2145 3270 // thread creation and before VMThread creation (1 thread); initial GC
dcubed@2145 3271 // verification can happen in that window which gets to here.
dcubed@2145 3272 assert(Threads::number_of_threads() <= 1 ||
dcubed@2145 3273 SafepointSynchronize::is_at_safepoint(),
kamg@2125 3274 "must be executed at a safepoint");
duke@0 3275 if (JvmtiEnv::environments_might_exist()) {
duke@0 3276 JvmtiEnvIterator it;
duke@0 3277 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
duke@0 3278 JvmtiTagMap* tag_map = env->tag_map();
duke@0 3279 if (tag_map != NULL && !tag_map->is_empty()) {
kamg@2125 3280 tag_map->do_weak_oops(is_alive, f);
duke@0 3281 }
duke@0 3282 }
duke@0 3283 }
duke@0 3284 }
duke@0 3285
kamg@2125 3286 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) {
duke@0 3287
duke@0 3288 // does this environment have the OBJECT_FREE event enabled
duke@0 3289 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
duke@0 3290
duke@0 3291 // counters used for trace message
duke@0 3292 int freed = 0;
duke@0 3293 int moved = 0;
kamg@2125 3294
kamg@2125 3295 JvmtiTagHashmap* hashmap = this->hashmap();
duke@0 3296
duke@0 3297 // reenable sizing (if disabled)
kamg@2125 3298 hashmap->set_resizing_enabled(true);
kamg@2125 3299
kamg@2125 3300 // if the hashmap is empty then we can skip it
kamg@2125 3301 if (hashmap->_entry_count == 0) {
kamg@2125 3302 return;
duke@0 3303 }
duke@0 3304
kamg@2125 3305 // now iterate through each entry in the table
kamg@2125 3306
kamg@2125 3307 JvmtiTagHashmapEntry** table = hashmap->table();
kamg@2125 3308 int size = hashmap->size();
kamg@2125 3309
kamg@2125 3310 JvmtiTagHashmapEntry* delayed_add = NULL;
kamg@2125 3311
kamg@2125 3312 for (int pos = 0; pos < size; ++pos) {
kamg@2125 3313 JvmtiTagHashmapEntry* entry = table[pos];
kamg@2125 3314 JvmtiTagHashmapEntry* prev = NULL;
kamg@2125 3315
kamg@2125 3316 while (entry != NULL) {
kamg@2125 3317 JvmtiTagHashmapEntry* next = entry->next();
kamg@2125 3318
kamg@2125 3319 oop* obj = entry->object_addr();
kamg@2125 3320
kamg@2125 3321 // has object been GC'ed
kamg@2125 3322 if (!is_alive->do_object_b(entry->object())) {
kamg@2125 3323 // grab the tag
kamg@2125 3324 jlong tag = entry->tag();
kamg@2125 3325 guarantee(tag != 0, "checking");
kamg@2125 3326
kamg@2125 3327 // remove GC'ed entry from hashmap and return the
kamg@2125 3328 // entry to the free list
kamg@2125 3329 hashmap->remove(prev, pos, entry);
kamg@2125 3330 destroy_entry(entry);
kamg@2125 3331
kamg@2125 3332 // post the event to the profiler
kamg@2125 3333 if (post_object_free) {
kamg@2125 3334 JvmtiExport::post_object_free(env(), tag);
kamg@2125 3335 }
kamg@2125 3336
kamg@2125 3337 ++freed;
kamg@2125 3338 } else {
kamg@2125 3339 f->do_oop(entry->object_addr());
kamg@2125 3340 oop new_oop = entry->object();
kamg@2125 3341
kamg@2125 3342 // if the object has moved then re-hash it and move its
kamg@2125 3343 // entry to its new location.
kamg@2125 3344 unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size);
kamg@2125 3345 if (new_pos != (unsigned int)pos) {
kamg@2125 3346 if (prev == NULL) {
kamg@2125 3347 table[pos] = next;
kamg@2125 3348 } else {
kamg@2125 3349 prev->set_next(next);
duke@0 3350 }
kamg@2125 3351 if (new_pos < (unsigned int)pos) {
duke@0 3352 entry->set_next(table[new_pos]);
duke@0 3353 table[new_pos] = entry;
duke@0 3354 } else {
kamg@2125 3355 // Delay adding this entry to it's new position as we'd end up
kamg@2125 3356 // hitting it again during this iteration.
kamg@2125 3357 entry->set_next(delayed_add);
kamg@2125 3358 delayed_add = entry;
duke@0 3359 }
kamg@2125 3360 moved++;
duke@0 3361 } else {
kamg@2125 3362 // object didn't move
kamg@2125 3363 prev = entry;
duke@0 3364 }
duke@0 3365 }
kamg@2125 3366
kamg@2125 3367 entry = next;
duke@0 3368 }
duke@0 3369 }
duke@0 3370
kamg@2125 3371 // Re-add all the entries which were kept aside
kamg@2125 3372 while (delayed_add != NULL) {
kamg@2125 3373 JvmtiTagHashmapEntry* next = delayed_add->next();
kamg@2125 3374 unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object(), size);
kamg@2125 3375 delayed_add->set_next(table[pos]);
kamg@2125 3376 table[pos] = delayed_add;
kamg@2125 3377 delayed_add = next;
duke@0 3378 }
duke@0 3379
duke@0 3380 // stats
duke@0 3381 if (TraceJVMTIObjectTagging) {
kamg@2125 3382 int post_total = hashmap->_entry_count;
duke@0 3383 int pre_total = post_total + freed;
duke@0 3384
kamg@2125 3385 tty->print_cr("(%d->%d, %d freed, %d total moves)",
kamg@2125 3386 pre_total, post_total, freed, moved);
duke@0 3387 }
duke@0 3388 }