annotate src/share/vm/memory/universe.cpp @ 453:c96030fff130

6684579: SoftReference processing can be made more efficient Summary: For current soft-ref clearing policies, we can decide at marking time if a soft-reference will definitely not be cleared, postponing the decision of whether it will definitely be cleared to the final reference processing phase. This can be especially beneficial in the case of concurrent collectors where the marking is usually concurrent but reference processing is usually not. Reviewed-by: jmasa
author ysr
date Thu, 20 Nov 2008 16:56:09 -0800
parents 1ee8caae33af
children 7d7a7c599c17
rev   line source
duke@0 1 /*
xdono@196 2 * Copyright 1997-2008 Sun Microsystems, Inc. All Rights Reserved.
duke@0 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@0 4 *
duke@0 5 * This code is free software; you can redistribute it and/or modify it
duke@0 6 * under the terms of the GNU General Public License version 2 only, as
duke@0 7 * published by the Free Software Foundation.
duke@0 8 *
duke@0 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@0 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@0 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@0 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@0 13 * accompanied this code).
duke@0 14 *
duke@0 15 * You should have received a copy of the GNU General Public License version
duke@0 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@0 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@0 18 *
duke@0 19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
duke@0 20 * CA 95054 USA or visit www.sun.com if you need additional information or
duke@0 21 * have any questions.
duke@0 22 *
duke@0 23 */
duke@0 24
duke@0 25 # include "incls/_precompiled.incl"
duke@0 26 # include "incls/_universe.cpp.incl"
duke@0 27
duke@0 28 // Known objects
duke@0 29 klassOop Universe::_boolArrayKlassObj = NULL;
duke@0 30 klassOop Universe::_byteArrayKlassObj = NULL;
duke@0 31 klassOop Universe::_charArrayKlassObj = NULL;
duke@0 32 klassOop Universe::_intArrayKlassObj = NULL;
duke@0 33 klassOop Universe::_shortArrayKlassObj = NULL;
duke@0 34 klassOop Universe::_longArrayKlassObj = NULL;
duke@0 35 klassOop Universe::_singleArrayKlassObj = NULL;
duke@0 36 klassOop Universe::_doubleArrayKlassObj = NULL;
duke@0 37 klassOop Universe::_typeArrayKlassObjs[T_VOID+1] = { NULL /*, NULL...*/ };
duke@0 38 klassOop Universe::_objectArrayKlassObj = NULL;
duke@0 39 klassOop Universe::_symbolKlassObj = NULL;
duke@0 40 klassOop Universe::_methodKlassObj = NULL;
duke@0 41 klassOop Universe::_constMethodKlassObj = NULL;
duke@0 42 klassOop Universe::_methodDataKlassObj = NULL;
duke@0 43 klassOop Universe::_klassKlassObj = NULL;
duke@0 44 klassOop Universe::_arrayKlassKlassObj = NULL;
duke@0 45 klassOop Universe::_objArrayKlassKlassObj = NULL;
duke@0 46 klassOop Universe::_typeArrayKlassKlassObj = NULL;
duke@0 47 klassOop Universe::_instanceKlassKlassObj = NULL;
duke@0 48 klassOop Universe::_constantPoolKlassObj = NULL;
duke@0 49 klassOop Universe::_constantPoolCacheKlassObj = NULL;
duke@0 50 klassOop Universe::_compiledICHolderKlassObj = NULL;
duke@0 51 klassOop Universe::_systemObjArrayKlassObj = NULL;
duke@0 52 oop Universe::_int_mirror = NULL;
duke@0 53 oop Universe::_float_mirror = NULL;
duke@0 54 oop Universe::_double_mirror = NULL;
duke@0 55 oop Universe::_byte_mirror = NULL;
duke@0 56 oop Universe::_bool_mirror = NULL;
duke@0 57 oop Universe::_char_mirror = NULL;
duke@0 58 oop Universe::_long_mirror = NULL;
duke@0 59 oop Universe::_short_mirror = NULL;
duke@0 60 oop Universe::_void_mirror = NULL;
duke@0 61 oop Universe::_mirrors[T_VOID+1] = { NULL /*, NULL...*/ };
duke@0 62 oop Universe::_main_thread_group = NULL;
duke@0 63 oop Universe::_system_thread_group = NULL;
duke@0 64 typeArrayOop Universe::_the_empty_byte_array = NULL;
duke@0 65 typeArrayOop Universe::_the_empty_short_array = NULL;
duke@0 66 typeArrayOop Universe::_the_empty_int_array = NULL;
duke@0 67 objArrayOop Universe::_the_empty_system_obj_array = NULL;
duke@0 68 objArrayOop Universe::_the_empty_class_klass_array = NULL;
duke@0 69 objArrayOop Universe::_the_array_interfaces_array = NULL;
duke@0 70 LatestMethodOopCache* Universe::_finalizer_register_cache = NULL;
duke@0 71 LatestMethodOopCache* Universe::_loader_addClass_cache = NULL;
duke@0 72 ActiveMethodOopsCache* Universe::_reflect_invoke_cache = NULL;
duke@0 73 oop Universe::_out_of_memory_error_java_heap = NULL;
duke@0 74 oop Universe::_out_of_memory_error_perm_gen = NULL;
duke@0 75 oop Universe::_out_of_memory_error_array_size = NULL;
duke@0 76 oop Universe::_out_of_memory_error_gc_overhead_limit = NULL;
duke@0 77 objArrayOop Universe::_preallocated_out_of_memory_error_array = NULL;
duke@0 78 volatile jint Universe::_preallocated_out_of_memory_error_avail_count = 0;
duke@0 79 bool Universe::_verify_in_progress = false;
duke@0 80 oop Universe::_null_ptr_exception_instance = NULL;
duke@0 81 oop Universe::_arithmetic_exception_instance = NULL;
duke@0 82 oop Universe::_virtual_machine_error_instance = NULL;
duke@0 83 oop Universe::_vm_exception = NULL;
duke@0 84 oop Universe::_emptySymbol = NULL;
duke@0 85
duke@0 86 // These variables are guarded by FullGCALot_lock.
duke@0 87 debug_only(objArrayOop Universe::_fullgc_alot_dummy_array = NULL;)
duke@0 88 debug_only(int Universe::_fullgc_alot_dummy_next = 0;)
duke@0 89
duke@0 90
duke@0 91 // Heap
duke@0 92 int Universe::_verify_count = 0;
duke@0 93
duke@0 94 int Universe::_base_vtable_size = 0;
duke@0 95 bool Universe::_bootstrapping = false;
duke@0 96 bool Universe::_fully_initialized = false;
duke@0 97
duke@0 98 size_t Universe::_heap_capacity_at_last_gc;
ysr@453 99 size_t Universe::_heap_used_at_last_gc = 0;
duke@0 100
duke@0 101 CollectedHeap* Universe::_collectedHeap = NULL;
coleenp@113 102 address Universe::_heap_base = NULL;
duke@0 103
duke@0 104
duke@0 105 void Universe::basic_type_classes_do(void f(klassOop)) {
duke@0 106 f(boolArrayKlassObj());
duke@0 107 f(byteArrayKlassObj());
duke@0 108 f(charArrayKlassObj());
duke@0 109 f(intArrayKlassObj());
duke@0 110 f(shortArrayKlassObj());
duke@0 111 f(longArrayKlassObj());
duke@0 112 f(singleArrayKlassObj());
duke@0 113 f(doubleArrayKlassObj());
duke@0 114 }
duke@0 115
duke@0 116
duke@0 117 void Universe::system_classes_do(void f(klassOop)) {
duke@0 118 f(symbolKlassObj());
duke@0 119 f(methodKlassObj());
duke@0 120 f(constMethodKlassObj());
duke@0 121 f(methodDataKlassObj());
duke@0 122 f(klassKlassObj());
duke@0 123 f(arrayKlassKlassObj());
duke@0 124 f(objArrayKlassKlassObj());
duke@0 125 f(typeArrayKlassKlassObj());
duke@0 126 f(instanceKlassKlassObj());
duke@0 127 f(constantPoolKlassObj());
duke@0 128 f(systemObjArrayKlassObj());
duke@0 129 }
duke@0 130
duke@0 131 void Universe::oops_do(OopClosure* f, bool do_all) {
duke@0 132
duke@0 133 f->do_oop((oop*) &_int_mirror);
duke@0 134 f->do_oop((oop*) &_float_mirror);
duke@0 135 f->do_oop((oop*) &_double_mirror);
duke@0 136 f->do_oop((oop*) &_byte_mirror);
duke@0 137 f->do_oop((oop*) &_bool_mirror);
duke@0 138 f->do_oop((oop*) &_char_mirror);
duke@0 139 f->do_oop((oop*) &_long_mirror);
duke@0 140 f->do_oop((oop*) &_short_mirror);
duke@0 141 f->do_oop((oop*) &_void_mirror);
duke@0 142
duke@0 143 // It's important to iterate over these guys even if they are null,
duke@0 144 // since that's how shared heaps are restored.
duke@0 145 for (int i = T_BOOLEAN; i < T_VOID+1; i++) {
duke@0 146 f->do_oop((oop*) &_mirrors[i]);
duke@0 147 }
duke@0 148 assert(_mirrors[0] == NULL && _mirrors[T_BOOLEAN - 1] == NULL, "checking");
duke@0 149
duke@0 150 // %%% Consider moving those "shared oops" over here with the others.
duke@0 151 f->do_oop((oop*)&_boolArrayKlassObj);
duke@0 152 f->do_oop((oop*)&_byteArrayKlassObj);
duke@0 153 f->do_oop((oop*)&_charArrayKlassObj);
duke@0 154 f->do_oop((oop*)&_intArrayKlassObj);
duke@0 155 f->do_oop((oop*)&_shortArrayKlassObj);
duke@0 156 f->do_oop((oop*)&_longArrayKlassObj);
duke@0 157 f->do_oop((oop*)&_singleArrayKlassObj);
duke@0 158 f->do_oop((oop*)&_doubleArrayKlassObj);
duke@0 159 f->do_oop((oop*)&_objectArrayKlassObj);
duke@0 160 {
duke@0 161 for (int i = 0; i < T_VOID+1; i++) {
duke@0 162 if (_typeArrayKlassObjs[i] != NULL) {
duke@0 163 assert(i >= T_BOOLEAN, "checking");
duke@0 164 f->do_oop((oop*)&_typeArrayKlassObjs[i]);
duke@0 165 } else if (do_all) {
duke@0 166 f->do_oop((oop*)&_typeArrayKlassObjs[i]);
duke@0 167 }
duke@0 168 }
duke@0 169 }
duke@0 170 f->do_oop((oop*)&_symbolKlassObj);
duke@0 171 f->do_oop((oop*)&_methodKlassObj);
duke@0 172 f->do_oop((oop*)&_constMethodKlassObj);
duke@0 173 f->do_oop((oop*)&_methodDataKlassObj);
duke@0 174 f->do_oop((oop*)&_klassKlassObj);
duke@0 175 f->do_oop((oop*)&_arrayKlassKlassObj);
duke@0 176 f->do_oop((oop*)&_objArrayKlassKlassObj);
duke@0 177 f->do_oop((oop*)&_typeArrayKlassKlassObj);
duke@0 178 f->do_oop((oop*)&_instanceKlassKlassObj);
duke@0 179 f->do_oop((oop*)&_constantPoolKlassObj);
duke@0 180 f->do_oop((oop*)&_constantPoolCacheKlassObj);
duke@0 181 f->do_oop((oop*)&_compiledICHolderKlassObj);
duke@0 182 f->do_oop((oop*)&_systemObjArrayKlassObj);
duke@0 183 f->do_oop((oop*)&_the_empty_byte_array);
duke@0 184 f->do_oop((oop*)&_the_empty_short_array);
duke@0 185 f->do_oop((oop*)&_the_empty_int_array);
duke@0 186 f->do_oop((oop*)&_the_empty_system_obj_array);
duke@0 187 f->do_oop((oop*)&_the_empty_class_klass_array);
duke@0 188 f->do_oop((oop*)&_the_array_interfaces_array);
duke@0 189 _finalizer_register_cache->oops_do(f);
duke@0 190 _loader_addClass_cache->oops_do(f);
duke@0 191 _reflect_invoke_cache->oops_do(f);
duke@0 192 f->do_oop((oop*)&_out_of_memory_error_java_heap);
duke@0 193 f->do_oop((oop*)&_out_of_memory_error_perm_gen);
duke@0 194 f->do_oop((oop*)&_out_of_memory_error_array_size);
duke@0 195 f->do_oop((oop*)&_out_of_memory_error_gc_overhead_limit);
duke@0 196 if (_preallocated_out_of_memory_error_array != (oop)NULL) { // NULL when DumpSharedSpaces
duke@0 197 f->do_oop((oop*)&_preallocated_out_of_memory_error_array);
duke@0 198 }
duke@0 199 f->do_oop((oop*)&_null_ptr_exception_instance);
duke@0 200 f->do_oop((oop*)&_arithmetic_exception_instance);
duke@0 201 f->do_oop((oop*)&_virtual_machine_error_instance);
duke@0 202 f->do_oop((oop*)&_main_thread_group);
duke@0 203 f->do_oop((oop*)&_system_thread_group);
duke@0 204 f->do_oop((oop*)&_vm_exception);
duke@0 205 f->do_oop((oop*)&_emptySymbol);
duke@0 206 debug_only(f->do_oop((oop*)&_fullgc_alot_dummy_array);)
duke@0 207 }
duke@0 208
duke@0 209
duke@0 210 void Universe::check_alignment(uintx size, uintx alignment, const char* name) {
duke@0 211 if (size < alignment || size % alignment != 0) {
duke@0 212 ResourceMark rm;
duke@0 213 stringStream st;
duke@0 214 st.print("Size of %s (%ld bytes) must be aligned to %ld bytes", name, size, alignment);
duke@0 215 char* error = st.as_string();
duke@0 216 vm_exit_during_initialization(error);
duke@0 217 }
duke@0 218 }
duke@0 219
duke@0 220
duke@0 221 void Universe::genesis(TRAPS) {
duke@0 222 ResourceMark rm;
duke@0 223 { FlagSetting fs(_bootstrapping, true);
duke@0 224
duke@0 225 { MutexLocker mc(Compile_lock);
duke@0 226
duke@0 227 // determine base vtable size; without that we cannot create the array klasses
duke@0 228 compute_base_vtable_size();
duke@0 229
duke@0 230 if (!UseSharedSpaces) {
duke@0 231 _klassKlassObj = klassKlass::create_klass(CHECK);
duke@0 232 _arrayKlassKlassObj = arrayKlassKlass::create_klass(CHECK);
duke@0 233
duke@0 234 _objArrayKlassKlassObj = objArrayKlassKlass::create_klass(CHECK);
duke@0 235 _instanceKlassKlassObj = instanceKlassKlass::create_klass(CHECK);
duke@0 236 _typeArrayKlassKlassObj = typeArrayKlassKlass::create_klass(CHECK);
duke@0 237
duke@0 238 _symbolKlassObj = symbolKlass::create_klass(CHECK);
duke@0 239
duke@0 240 _emptySymbol = oopFactory::new_symbol("", CHECK);
duke@0 241
duke@0 242 _boolArrayKlassObj = typeArrayKlass::create_klass(T_BOOLEAN, sizeof(jboolean), CHECK);
duke@0 243 _charArrayKlassObj = typeArrayKlass::create_klass(T_CHAR, sizeof(jchar), CHECK);
duke@0 244 _singleArrayKlassObj = typeArrayKlass::create_klass(T_FLOAT, sizeof(jfloat), CHECK);
duke@0 245 _doubleArrayKlassObj = typeArrayKlass::create_klass(T_DOUBLE, sizeof(jdouble), CHECK);
duke@0 246 _byteArrayKlassObj = typeArrayKlass::create_klass(T_BYTE, sizeof(jbyte), CHECK);
duke@0 247 _shortArrayKlassObj = typeArrayKlass::create_klass(T_SHORT, sizeof(jshort), CHECK);
duke@0 248 _intArrayKlassObj = typeArrayKlass::create_klass(T_INT, sizeof(jint), CHECK);
duke@0 249 _longArrayKlassObj = typeArrayKlass::create_klass(T_LONG, sizeof(jlong), CHECK);
duke@0 250
duke@0 251 _typeArrayKlassObjs[T_BOOLEAN] = _boolArrayKlassObj;
duke@0 252 _typeArrayKlassObjs[T_CHAR] = _charArrayKlassObj;
duke@0 253 _typeArrayKlassObjs[T_FLOAT] = _singleArrayKlassObj;
duke@0 254 _typeArrayKlassObjs[T_DOUBLE] = _doubleArrayKlassObj;
duke@0 255 _typeArrayKlassObjs[T_BYTE] = _byteArrayKlassObj;
duke@0 256 _typeArrayKlassObjs[T_SHORT] = _shortArrayKlassObj;
duke@0 257 _typeArrayKlassObjs[T_INT] = _intArrayKlassObj;
duke@0 258 _typeArrayKlassObjs[T_LONG] = _longArrayKlassObj;
duke@0 259
duke@0 260 _methodKlassObj = methodKlass::create_klass(CHECK);
duke@0 261 _constMethodKlassObj = constMethodKlass::create_klass(CHECK);
duke@0 262 _methodDataKlassObj = methodDataKlass::create_klass(CHECK);
duke@0 263 _constantPoolKlassObj = constantPoolKlass::create_klass(CHECK);
duke@0 264 _constantPoolCacheKlassObj = constantPoolCacheKlass::create_klass(CHECK);
duke@0 265
duke@0 266 _compiledICHolderKlassObj = compiledICHolderKlass::create_klass(CHECK);
duke@0 267 _systemObjArrayKlassObj = objArrayKlassKlass::cast(objArrayKlassKlassObj())->allocate_system_objArray_klass(CHECK);
duke@0 268
duke@0 269 _the_empty_byte_array = oopFactory::new_permanent_byteArray(0, CHECK);
duke@0 270 _the_empty_short_array = oopFactory::new_permanent_shortArray(0, CHECK);
duke@0 271 _the_empty_int_array = oopFactory::new_permanent_intArray(0, CHECK);
duke@0 272 _the_empty_system_obj_array = oopFactory::new_system_objArray(0, CHECK);
duke@0 273
duke@0 274 _the_array_interfaces_array = oopFactory::new_system_objArray(2, CHECK);
duke@0 275 _vm_exception = oopFactory::new_symbol("vm exception holder", CHECK);
duke@0 276 } else {
duke@0 277
duke@0 278 FileMapInfo *mapinfo = FileMapInfo::current_info();
duke@0 279 char* buffer = mapinfo->region_base(CompactingPermGenGen::md);
duke@0 280 void** vtbl_list = (void**)buffer;
duke@0 281 init_self_patching_vtbl_list(vtbl_list,
duke@0 282 CompactingPermGenGen::vtbl_list_size);
duke@0 283 }
duke@0 284 }
duke@0 285
duke@0 286 vmSymbols::initialize(CHECK);
duke@0 287
duke@0 288 SystemDictionary::initialize(CHECK);
duke@0 289
duke@0 290 klassOop ok = SystemDictionary::object_klass();
duke@0 291
duke@0 292 if (UseSharedSpaces) {
duke@0 293 // Verify shared interfaces array.
duke@0 294 assert(_the_array_interfaces_array->obj_at(0) ==
duke@0 295 SystemDictionary::cloneable_klass(), "u3");
duke@0 296 assert(_the_array_interfaces_array->obj_at(1) ==
duke@0 297 SystemDictionary::serializable_klass(), "u3");
duke@0 298
duke@0 299 // Verify element klass for system obj array klass
duke@0 300 assert(objArrayKlass::cast(_systemObjArrayKlassObj)->element_klass() == ok, "u1");
duke@0 301 assert(objArrayKlass::cast(_systemObjArrayKlassObj)->bottom_klass() == ok, "u2");
duke@0 302
duke@0 303 // Verify super class for the classes created above
duke@0 304 assert(Klass::cast(boolArrayKlassObj() )->super() == ok, "u3");
duke@0 305 assert(Klass::cast(charArrayKlassObj() )->super() == ok, "u3");
duke@0 306 assert(Klass::cast(singleArrayKlassObj() )->super() == ok, "u3");
duke@0 307 assert(Klass::cast(doubleArrayKlassObj() )->super() == ok, "u3");
duke@0 308 assert(Klass::cast(byteArrayKlassObj() )->super() == ok, "u3");
duke@0 309 assert(Klass::cast(shortArrayKlassObj() )->super() == ok, "u3");
duke@0 310 assert(Klass::cast(intArrayKlassObj() )->super() == ok, "u3");
duke@0 311 assert(Klass::cast(longArrayKlassObj() )->super() == ok, "u3");
duke@0 312 assert(Klass::cast(constantPoolKlassObj() )->super() == ok, "u3");
duke@0 313 assert(Klass::cast(systemObjArrayKlassObj())->super() == ok, "u3");
duke@0 314 } else {
duke@0 315 // Set up shared interfaces array. (Do this before supers are set up.)
duke@0 316 _the_array_interfaces_array->obj_at_put(0, SystemDictionary::cloneable_klass());
duke@0 317 _the_array_interfaces_array->obj_at_put(1, SystemDictionary::serializable_klass());
duke@0 318
duke@0 319 // Set element klass for system obj array klass
duke@0 320 objArrayKlass::cast(_systemObjArrayKlassObj)->set_element_klass(ok);
duke@0 321 objArrayKlass::cast(_systemObjArrayKlassObj)->set_bottom_klass(ok);
duke@0 322
duke@0 323 // Set super class for the classes created above
duke@0 324 Klass::cast(boolArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 325 Klass::cast(charArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 326 Klass::cast(singleArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 327 Klass::cast(doubleArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 328 Klass::cast(byteArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 329 Klass::cast(shortArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 330 Klass::cast(intArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 331 Klass::cast(longArrayKlassObj() )->initialize_supers(ok, CHECK);
duke@0 332 Klass::cast(constantPoolKlassObj() )->initialize_supers(ok, CHECK);
duke@0 333 Klass::cast(systemObjArrayKlassObj())->initialize_supers(ok, CHECK);
duke@0 334 Klass::cast(boolArrayKlassObj() )->set_super(ok);
duke@0 335 Klass::cast(charArrayKlassObj() )->set_super(ok);
duke@0 336 Klass::cast(singleArrayKlassObj() )->set_super(ok);
duke@0 337 Klass::cast(doubleArrayKlassObj() )->set_super(ok);
duke@0 338 Klass::cast(byteArrayKlassObj() )->set_super(ok);
duke@0 339 Klass::cast(shortArrayKlassObj() )->set_super(ok);
duke@0 340 Klass::cast(intArrayKlassObj() )->set_super(ok);
duke@0 341 Klass::cast(longArrayKlassObj() )->set_super(ok);
duke@0 342 Klass::cast(constantPoolKlassObj() )->set_super(ok);
duke@0 343 Klass::cast(systemObjArrayKlassObj())->set_super(ok);
duke@0 344 }
duke@0 345
duke@0 346 Klass::cast(boolArrayKlassObj() )->append_to_sibling_list();
duke@0 347 Klass::cast(charArrayKlassObj() )->append_to_sibling_list();
duke@0 348 Klass::cast(singleArrayKlassObj() )->append_to_sibling_list();
duke@0 349 Klass::cast(doubleArrayKlassObj() )->append_to_sibling_list();
duke@0 350 Klass::cast(byteArrayKlassObj() )->append_to_sibling_list();
duke@0 351 Klass::cast(shortArrayKlassObj() )->append_to_sibling_list();
duke@0 352 Klass::cast(intArrayKlassObj() )->append_to_sibling_list();
duke@0 353 Klass::cast(longArrayKlassObj() )->append_to_sibling_list();
duke@0 354 Klass::cast(constantPoolKlassObj() )->append_to_sibling_list();
duke@0 355 Klass::cast(systemObjArrayKlassObj())->append_to_sibling_list();
duke@0 356 } // end of core bootstrapping
duke@0 357
duke@0 358 // Initialize _objectArrayKlass after core bootstraping to make
duke@0 359 // sure the super class is set up properly for _objectArrayKlass.
duke@0 360 _objectArrayKlassObj = instanceKlass::
duke@0 361 cast(SystemDictionary::object_klass())->array_klass(1, CHECK);
duke@0 362 // Add the class to the class hierarchy manually to make sure that
duke@0 363 // its vtable is initialized after core bootstrapping is completed.
duke@0 364 Klass::cast(_objectArrayKlassObj)->append_to_sibling_list();
duke@0 365
duke@0 366 // Compute is_jdk version flags.
duke@0 367 // Only 1.3 or later has the java.lang.Shutdown class.
duke@0 368 // Only 1.4 or later has the java.lang.CharSequence interface.
duke@0 369 // Only 1.5 or later has the java.lang.management.MemoryUsage class.
kamg@242 370 if (JDK_Version::is_partially_initialized()) {
kamg@242 371 uint8_t jdk_version;
kamg@242 372 klassOop k = SystemDictionary::resolve_or_null(
kamg@242 373 vmSymbolHandles::java_lang_management_MemoryUsage(), THREAD);
duke@0 374 CLEAR_PENDING_EXCEPTION; // ignore exceptions
duke@0 375 if (k == NULL) {
kamg@242 376 k = SystemDictionary::resolve_or_null(
kamg@242 377 vmSymbolHandles::java_lang_CharSequence(), THREAD);
duke@0 378 CLEAR_PENDING_EXCEPTION; // ignore exceptions
duke@0 379 if (k == NULL) {
kamg@242 380 k = SystemDictionary::resolve_or_null(
kamg@242 381 vmSymbolHandles::java_lang_Shutdown(), THREAD);
duke@0 382 CLEAR_PENDING_EXCEPTION; // ignore exceptions
duke@0 383 if (k == NULL) {
kamg@242 384 jdk_version = 2;
duke@0 385 } else {
kamg@242 386 jdk_version = 3;
duke@0 387 }
duke@0 388 } else {
kamg@242 389 jdk_version = 4;
duke@0 390 }
duke@0 391 } else {
kamg@242 392 jdk_version = 5;
duke@0 393 }
kamg@242 394 JDK_Version::fully_initialize(jdk_version);
duke@0 395 }
duke@0 396
duke@0 397 #ifdef ASSERT
duke@0 398 if (FullGCALot) {
duke@0 399 // Allocate an array of dummy objects.
duke@0 400 // We'd like these to be at the bottom of the old generation,
duke@0 401 // so that when we free one and then collect,
duke@0 402 // (almost) the whole heap moves
duke@0 403 // and we find out if we actually update all the oops correctly.
duke@0 404 // But we can't allocate directly in the old generation,
duke@0 405 // so we allocate wherever, and hope that the first collection
duke@0 406 // moves these objects to the bottom of the old generation.
duke@0 407 // We can allocate directly in the permanent generation, so we do.
duke@0 408 int size;
duke@0 409 if (UseConcMarkSweepGC) {
duke@0 410 warning("Using +FullGCALot with concurrent mark sweep gc "
duke@0 411 "will not force all objects to relocate");
duke@0 412 size = FullGCALotDummies;
duke@0 413 } else {
duke@0 414 size = FullGCALotDummies * 2;
duke@0 415 }
duke@0 416 objArrayOop naked_array = oopFactory::new_system_objArray(size, CHECK);
duke@0 417 objArrayHandle dummy_array(THREAD, naked_array);
duke@0 418 int i = 0;
duke@0 419 while (i < size) {
duke@0 420 if (!UseConcMarkSweepGC) {
duke@0 421 // Allocate dummy in old generation
duke@0 422 oop dummy = instanceKlass::cast(SystemDictionary::object_klass())->allocate_instance(CHECK);
duke@0 423 dummy_array->obj_at_put(i++, dummy);
duke@0 424 }
duke@0 425 // Allocate dummy in permanent generation
duke@0 426 oop dummy = instanceKlass::cast(SystemDictionary::object_klass())->allocate_permanent_instance(CHECK);
duke@0 427 dummy_array->obj_at_put(i++, dummy);
duke@0 428 }
duke@0 429 {
duke@0 430 // Only modify the global variable inside the mutex.
duke@0 431 // If we had a race to here, the other dummy_array instances
duke@0 432 // and their elements just get dropped on the floor, which is fine.
duke@0 433 MutexLocker ml(FullGCALot_lock);
duke@0 434 if (_fullgc_alot_dummy_array == NULL) {
duke@0 435 _fullgc_alot_dummy_array = dummy_array();
duke@0 436 }
duke@0 437 }
duke@0 438 assert(i == _fullgc_alot_dummy_array->length(), "just checking");
duke@0 439 }
duke@0 440 #endif
duke@0 441 }
duke@0 442
duke@0 443
duke@0 444 static inline void add_vtable(void** list, int* n, Klass* o, int count) {
duke@0 445 list[(*n)++] = *(void**)&o->vtbl_value();
duke@0 446 guarantee((*n) <= count, "vtable list too small.");
duke@0 447 }
duke@0 448
duke@0 449
duke@0 450 void Universe::init_self_patching_vtbl_list(void** list, int count) {
duke@0 451 int n = 0;
duke@0 452 { klassKlass o; add_vtable(list, &n, &o, count); }
duke@0 453 { arrayKlassKlass o; add_vtable(list, &n, &o, count); }
duke@0 454 { objArrayKlassKlass o; add_vtable(list, &n, &o, count); }
duke@0 455 { instanceKlassKlass o; add_vtable(list, &n, &o, count); }
duke@0 456 { instanceKlass o; add_vtable(list, &n, &o, count); }
duke@0 457 { instanceRefKlass o; add_vtable(list, &n, &o, count); }
duke@0 458 { typeArrayKlassKlass o; add_vtable(list, &n, &o, count); }
duke@0 459 { symbolKlass o; add_vtable(list, &n, &o, count); }
duke@0 460 { typeArrayKlass o; add_vtable(list, &n, &o, count); }
duke@0 461 { methodKlass o; add_vtable(list, &n, &o, count); }
duke@0 462 { constMethodKlass o; add_vtable(list, &n, &o, count); }
duke@0 463 { constantPoolKlass o; add_vtable(list, &n, &o, count); }
duke@0 464 { constantPoolCacheKlass o; add_vtable(list, &n, &o, count); }
duke@0 465 { objArrayKlass o; add_vtable(list, &n, &o, count); }
duke@0 466 { methodDataKlass o; add_vtable(list, &n, &o, count); }
duke@0 467 { compiledICHolderKlass o; add_vtable(list, &n, &o, count); }
duke@0 468 }
duke@0 469
duke@0 470
duke@0 471 class FixupMirrorClosure: public ObjectClosure {
duke@0 472 public:
coleenp@113 473 virtual void do_object(oop obj) {
duke@0 474 if (obj->is_klass()) {
duke@0 475 EXCEPTION_MARK;
duke@0 476 KlassHandle k(THREAD, klassOop(obj));
duke@0 477 // We will never reach the CATCH below since Exceptions::_throw will cause
duke@0 478 // the VM to exit if an exception is thrown during initialization
duke@0 479 java_lang_Class::create_mirror(k, CATCH);
duke@0 480 // This call unconditionally creates a new mirror for k,
duke@0 481 // and links in k's component_mirror field if k is an array.
duke@0 482 // If k is an objArray, k's element type must already have
duke@0 483 // a mirror. In other words, this closure must process
duke@0 484 // the component type of an objArray k before it processes k.
duke@0 485 // This works because the permgen iterator presents arrays
duke@0 486 // and their component types in order of creation.
duke@0 487 }
duke@0 488 }
duke@0 489 };
duke@0 490
duke@0 491 void Universe::initialize_basic_type_mirrors(TRAPS) {
duke@0 492 if (UseSharedSpaces) {
duke@0 493 assert(_int_mirror != NULL, "already loaded");
duke@0 494 assert(_void_mirror == _mirrors[T_VOID], "consistently loaded");
duke@0 495 } else {
duke@0 496
duke@0 497 assert(_int_mirror==NULL, "basic type mirrors already initialized");
duke@0 498 _int_mirror =
duke@0 499 java_lang_Class::create_basic_type_mirror("int", T_INT, CHECK);
duke@0 500 _float_mirror =
duke@0 501 java_lang_Class::create_basic_type_mirror("float", T_FLOAT, CHECK);
duke@0 502 _double_mirror =
duke@0 503 java_lang_Class::create_basic_type_mirror("double", T_DOUBLE, CHECK);
duke@0 504 _byte_mirror =
duke@0 505 java_lang_Class::create_basic_type_mirror("byte", T_BYTE, CHECK);
duke@0 506 _bool_mirror =
duke@0 507 java_lang_Class::create_basic_type_mirror("boolean",T_BOOLEAN, CHECK);
duke@0 508 _char_mirror =
duke@0 509 java_lang_Class::create_basic_type_mirror("char", T_CHAR, CHECK);
duke@0 510 _long_mirror =
duke@0 511 java_lang_Class::create_basic_type_mirror("long", T_LONG, CHECK);
duke@0 512 _short_mirror =
duke@0 513 java_lang_Class::create_basic_type_mirror("short", T_SHORT, CHECK);
duke@0 514 _void_mirror =
duke@0 515 java_lang_Class::create_basic_type_mirror("void", T_VOID, CHECK);
duke@0 516
duke@0 517 _mirrors[T_INT] = _int_mirror;
duke@0 518 _mirrors[T_FLOAT] = _float_mirror;
duke@0 519 _mirrors[T_DOUBLE] = _double_mirror;
duke@0 520 _mirrors[T_BYTE] = _byte_mirror;
duke@0 521 _mirrors[T_BOOLEAN] = _bool_mirror;
duke@0 522 _mirrors[T_CHAR] = _char_mirror;
duke@0 523 _mirrors[T_LONG] = _long_mirror;
duke@0 524 _mirrors[T_SHORT] = _short_mirror;
duke@0 525 _mirrors[T_VOID] = _void_mirror;
duke@0 526 //_mirrors[T_OBJECT] = instanceKlass::cast(_object_klass)->java_mirror();
duke@0 527 //_mirrors[T_ARRAY] = instanceKlass::cast(_object_klass)->java_mirror();
duke@0 528 }
duke@0 529 }
duke@0 530
duke@0 531 void Universe::fixup_mirrors(TRAPS) {
duke@0 532 // Bootstrap problem: all classes gets a mirror (java.lang.Class instance) assigned eagerly,
duke@0 533 // but we cannot do that for classes created before java.lang.Class is loaded. Here we simply
duke@0 534 // walk over permanent objects created so far (mostly classes) and fixup their mirrors. Note
duke@0 535 // that the number of objects allocated at this point is very small.
duke@0 536 assert(SystemDictionary::class_klass_loaded(), "java.lang.Class should be loaded");
duke@0 537 FixupMirrorClosure blk;
duke@0 538 Universe::heap()->permanent_object_iterate(&blk);
duke@0 539 }
duke@0 540
duke@0 541
duke@0 542 static bool has_run_finalizers_on_exit = false;
duke@0 543
duke@0 544 void Universe::run_finalizers_on_exit() {
duke@0 545 if (has_run_finalizers_on_exit) return;
duke@0 546 has_run_finalizers_on_exit = true;
duke@0 547
duke@0 548 // Called on VM exit. This ought to be run in a separate thread.
duke@0 549 if (TraceReferenceGC) tty->print_cr("Callback to run finalizers on exit");
duke@0 550 {
duke@0 551 PRESERVE_EXCEPTION_MARK;
duke@0 552 KlassHandle finalizer_klass(THREAD, SystemDictionary::finalizer_klass());
duke@0 553 JavaValue result(T_VOID);
duke@0 554 JavaCalls::call_static(
duke@0 555 &result,
duke@0 556 finalizer_klass,
duke@0 557 vmSymbolHandles::run_finalizers_on_exit_name(),
duke@0 558 vmSymbolHandles::void_method_signature(),
duke@0 559 THREAD
duke@0 560 );
duke@0 561 // Ignore any pending exceptions
duke@0 562 CLEAR_PENDING_EXCEPTION;
duke@0 563 }
duke@0 564 }
duke@0 565
duke@0 566
duke@0 567 // initialize_vtable could cause gc if
duke@0 568 // 1) we specified true to initialize_vtable and
duke@0 569 // 2) this ran after gc was enabled
duke@0 570 // In case those ever change we use handles for oops
duke@0 571 void Universe::reinitialize_vtable_of(KlassHandle k_h, TRAPS) {
duke@0 572 // init vtable of k and all subclasses
duke@0 573 Klass* ko = k_h()->klass_part();
duke@0 574 klassVtable* vt = ko->vtable();
duke@0 575 if (vt) vt->initialize_vtable(false, CHECK);
duke@0 576 if (ko->oop_is_instance()) {
duke@0 577 instanceKlass* ik = (instanceKlass*)ko;
duke@0 578 for (KlassHandle s_h(THREAD, ik->subklass()); s_h() != NULL; s_h = (THREAD, s_h()->klass_part()->next_sibling())) {
duke@0 579 reinitialize_vtable_of(s_h, CHECK);
duke@0 580 }
duke@0 581 }
duke@0 582 }
duke@0 583
duke@0 584
duke@0 585 void initialize_itable_for_klass(klassOop k, TRAPS) {
duke@0 586 instanceKlass::cast(k)->itable()->initialize_itable(false, CHECK);
duke@0 587 }
duke@0 588
duke@0 589
duke@0 590 void Universe::reinitialize_itables(TRAPS) {
duke@0 591 SystemDictionary::classes_do(initialize_itable_for_klass, CHECK);
duke@0 592
duke@0 593 }
duke@0 594
duke@0 595
duke@0 596 bool Universe::on_page_boundary(void* addr) {
duke@0 597 return ((uintptr_t) addr) % os::vm_page_size() == 0;
duke@0 598 }
duke@0 599
duke@0 600
duke@0 601 bool Universe::should_fill_in_stack_trace(Handle throwable) {
duke@0 602 // never attempt to fill in the stack trace of preallocated errors that do not have
duke@0 603 // backtrace. These errors are kept alive forever and may be "re-used" when all
duke@0 604 // preallocated errors with backtrace have been consumed. Also need to avoid
duke@0 605 // a potential loop which could happen if an out of memory occurs when attempting
duke@0 606 // to allocate the backtrace.
duke@0 607 return ((throwable() != Universe::_out_of_memory_error_java_heap) &&
duke@0 608 (throwable() != Universe::_out_of_memory_error_perm_gen) &&
duke@0 609 (throwable() != Universe::_out_of_memory_error_array_size) &&
duke@0 610 (throwable() != Universe::_out_of_memory_error_gc_overhead_limit));
duke@0 611 }
duke@0 612
duke@0 613
duke@0 614 oop Universe::gen_out_of_memory_error(oop default_err) {
duke@0 615 // generate an out of memory error:
duke@0 616 // - if there is a preallocated error with backtrace available then return it wth
duke@0 617 // a filled in stack trace.
duke@0 618 // - if there are no preallocated errors with backtrace available then return
duke@0 619 // an error without backtrace.
duke@0 620 int next;
duke@0 621 if (_preallocated_out_of_memory_error_avail_count > 0) {
duke@0 622 next = (int)Atomic::add(-1, &_preallocated_out_of_memory_error_avail_count);
duke@0 623 assert(next < (int)PreallocatedOutOfMemoryErrorCount, "avail count is corrupt");
duke@0 624 } else {
duke@0 625 next = -1;
duke@0 626 }
duke@0 627 if (next < 0) {
duke@0 628 // all preallocated errors have been used.
duke@0 629 // return default
duke@0 630 return default_err;
duke@0 631 } else {
duke@0 632 // get the error object at the slot and set set it to NULL so that the
duke@0 633 // array isn't keeping it alive anymore.
duke@0 634 oop exc = preallocated_out_of_memory_errors()->obj_at(next);
duke@0 635 assert(exc != NULL, "slot has been used already");
duke@0 636 preallocated_out_of_memory_errors()->obj_at_put(next, NULL);
duke@0 637
duke@0 638 // use the message from the default error
duke@0 639 oop msg = java_lang_Throwable::message(default_err);
duke@0 640 assert(msg != NULL, "no message");
duke@0 641 java_lang_Throwable::set_message(exc, msg);
duke@0 642
duke@0 643 // populate the stack trace and return it.
duke@0 644 java_lang_Throwable::fill_in_stack_trace_of_preallocated_backtrace(exc);
duke@0 645 return exc;
duke@0 646 }
duke@0 647 }
duke@0 648
duke@0 649 static intptr_t non_oop_bits = 0;
duke@0 650
duke@0 651 void* Universe::non_oop_word() {
duke@0 652 // Neither the high bits nor the low bits of this value is allowed
duke@0 653 // to look like (respectively) the high or low bits of a real oop.
duke@0 654 //
duke@0 655 // High and low are CPU-specific notions, but low always includes
duke@0 656 // the low-order bit. Since oops are always aligned at least mod 4,
duke@0 657 // setting the low-order bit will ensure that the low half of the
duke@0 658 // word will never look like that of a real oop.
duke@0 659 //
duke@0 660 // Using the OS-supplied non-memory-address word (usually 0 or -1)
duke@0 661 // will take care of the high bits, however many there are.
duke@0 662
duke@0 663 if (non_oop_bits == 0) {
duke@0 664 non_oop_bits = (intptr_t)os::non_memory_address_word() | 1;
duke@0 665 }
duke@0 666
duke@0 667 return (void*)non_oop_bits;
duke@0 668 }
duke@0 669
duke@0 670 jint universe_init() {
duke@0 671 assert(!Universe::_fully_initialized, "called after initialize_vtables");
duke@0 672 guarantee(1 << LogHeapWordSize == sizeof(HeapWord),
duke@0 673 "LogHeapWordSize is incorrect.");
duke@0 674 guarantee(sizeof(oop) >= sizeof(HeapWord), "HeapWord larger than oop?");
duke@0 675 guarantee(sizeof(oop) % sizeof(HeapWord) == 0,
coleenp@113 676 "oop size is not not a multiple of HeapWord size");
duke@0 677 TraceTime timer("Genesis", TraceStartupTime);
duke@0 678 GC_locker::lock(); // do not allow gc during bootstrapping
duke@0 679 JavaClasses::compute_hard_coded_offsets();
duke@0 680
duke@0 681 // Get map info from shared archive file.
duke@0 682 if (DumpSharedSpaces)
duke@0 683 UseSharedSpaces = false;
duke@0 684
duke@0 685 FileMapInfo* mapinfo = NULL;
duke@0 686 if (UseSharedSpaces) {
duke@0 687 mapinfo = NEW_C_HEAP_OBJ(FileMapInfo);
duke@0 688 memset(mapinfo, 0, sizeof(FileMapInfo));
duke@0 689
duke@0 690 // Open the shared archive file, read and validate the header. If
duke@0 691 // initialization files, shared spaces [UseSharedSpaces] are
duke@0 692 // disabled and the file is closed.
duke@0 693
duke@0 694 if (mapinfo->initialize()) {
duke@0 695 FileMapInfo::set_current_info(mapinfo);
duke@0 696 } else {
duke@0 697 assert(!mapinfo->is_open() && !UseSharedSpaces,
duke@0 698 "archive file not closed or shared spaces not disabled.");
duke@0 699 }
duke@0 700 }
duke@0 701
duke@0 702 jint status = Universe::initialize_heap();
duke@0 703 if (status != JNI_OK) {
duke@0 704 return status;
duke@0 705 }
duke@0 706
duke@0 707 // We have a heap so create the methodOop caches before
duke@0 708 // CompactingPermGenGen::initialize_oops() tries to populate them.
duke@0 709 Universe::_finalizer_register_cache = new LatestMethodOopCache();
duke@0 710 Universe::_loader_addClass_cache = new LatestMethodOopCache();
duke@0 711 Universe::_reflect_invoke_cache = new ActiveMethodOopsCache();
duke@0 712
duke@0 713 if (UseSharedSpaces) {
duke@0 714
duke@0 715 // Read the data structures supporting the shared spaces (shared
duke@0 716 // system dictionary, symbol table, etc.). After that, access to
duke@0 717 // the file (other than the mapped regions) is no longer needed, and
duke@0 718 // the file is closed. Closing the file does not affect the
duke@0 719 // currently mapped regions.
duke@0 720
duke@0 721 CompactingPermGenGen::initialize_oops();
duke@0 722 mapinfo->close();
duke@0 723
duke@0 724 } else {
duke@0 725 SymbolTable::create_table();
duke@0 726 StringTable::create_table();
duke@0 727 ClassLoader::create_package_info_table();
duke@0 728 }
duke@0 729
duke@0 730 return JNI_OK;
duke@0 731 }
duke@0 732
duke@0 733 jint Universe::initialize_heap() {
duke@0 734
duke@0 735 if (UseParallelGC) {
duke@0 736 #ifndef SERIALGC
duke@0 737 Universe::_collectedHeap = new ParallelScavengeHeap();
duke@0 738 #else // SERIALGC
duke@0 739 fatal("UseParallelGC not supported in java kernel vm.");
duke@0 740 #endif // SERIALGC
duke@0 741
ysr@342 742 } else if (UseG1GC) {
ysr@342 743 #ifndef SERIALGC
ysr@342 744 G1CollectorPolicy* g1p = new G1CollectorPolicy_BestRegionsFirst();
ysr@342 745 G1CollectedHeap* g1h = new G1CollectedHeap(g1p);
ysr@342 746 Universe::_collectedHeap = g1h;
ysr@342 747 #else // SERIALGC
ysr@342 748 fatal("UseG1GC not supported in java kernel vm.");
ysr@342 749 #endif // SERIALGC
ysr@342 750
duke@0 751 } else {
duke@0 752 GenCollectorPolicy *gc_policy;
duke@0 753
duke@0 754 if (UseSerialGC) {
duke@0 755 gc_policy = new MarkSweepPolicy();
duke@0 756 } else if (UseConcMarkSweepGC) {
duke@0 757 #ifndef SERIALGC
duke@0 758 if (UseAdaptiveSizePolicy) {
duke@0 759 gc_policy = new ASConcurrentMarkSweepPolicy();
duke@0 760 } else {
duke@0 761 gc_policy = new ConcurrentMarkSweepPolicy();
duke@0 762 }
duke@0 763 #else // SERIALGC
duke@0 764 fatal("UseConcMarkSweepGC not supported in java kernel vm.");
duke@0 765 #endif // SERIALGC
duke@0 766 } else { // default old generation
duke@0 767 gc_policy = new MarkSweepPolicy();
duke@0 768 }
duke@0 769
duke@0 770 Universe::_collectedHeap = new GenCollectedHeap(gc_policy);
duke@0 771 }
duke@0 772
duke@0 773 jint status = Universe::heap()->initialize();
duke@0 774 if (status != JNI_OK) {
duke@0 775 return status;
duke@0 776 }
coleenp@113 777 if (UseCompressedOops) {
coleenp@113 778 // Subtract a page because something can get allocated at heap base.
coleenp@113 779 // This also makes implicit null checking work, because the
coleenp@113 780 // memory+1 page below heap_base needs to cause a signal.
coleenp@113 781 // See needs_explicit_null_check.
coleenp@113 782 // Only set the heap base for compressed oops because it indicates
coleenp@113 783 // compressed oops for pstack code.
coleenp@113 784 Universe::_heap_base = Universe::heap()->base() - os::vm_page_size();
coleenp@113 785 }
duke@0 786
duke@0 787 // We will never reach the CATCH below since Exceptions::_throw will cause
duke@0 788 // the VM to exit if an exception is thrown during initialization
duke@0 789
duke@0 790 if (UseTLAB) {
duke@0 791 assert(Universe::heap()->supports_tlab_allocation(),
duke@0 792 "Should support thread-local allocation buffers");
duke@0 793 ThreadLocalAllocBuffer::startup_initialization();
duke@0 794 }
duke@0 795 return JNI_OK;
duke@0 796 }
duke@0 797
duke@0 798 // It's the caller's repsonsibility to ensure glitch-freedom
duke@0 799 // (if required).
duke@0 800 void Universe::update_heap_info_at_gc() {
duke@0 801 _heap_capacity_at_last_gc = heap()->capacity();
duke@0 802 _heap_used_at_last_gc = heap()->used();
duke@0 803 }
duke@0 804
duke@0 805
duke@0 806
duke@0 807 void universe2_init() {
duke@0 808 EXCEPTION_MARK;
duke@0 809 Universe::genesis(CATCH);
duke@0 810 // Although we'd like to verify here that the state of the heap
duke@0 811 // is good, we can't because the main thread has not yet added
duke@0 812 // itself to the threads list (so, using current interfaces
duke@0 813 // we can't "fill" its TLAB), unless TLABs are disabled.
duke@0 814 if (VerifyBeforeGC && !UseTLAB &&
duke@0 815 Universe::heap()->total_collections() >= VerifyGCStartAt) {
duke@0 816 Universe::heap()->prepare_for_verify();
duke@0 817 Universe::verify(); // make sure we're starting with a clean slate
duke@0 818 }
duke@0 819 }
duke@0 820
duke@0 821
duke@0 822 // This function is defined in JVM.cpp
duke@0 823 extern void initialize_converter_functions();
duke@0 824
duke@0 825 bool universe_post_init() {
duke@0 826 Universe::_fully_initialized = true;
duke@0 827 EXCEPTION_MARK;
duke@0 828 { ResourceMark rm;
duke@0 829 Interpreter::initialize(); // needed for interpreter entry points
duke@0 830 if (!UseSharedSpaces) {
duke@0 831 KlassHandle ok_h(THREAD, SystemDictionary::object_klass());
duke@0 832 Universe::reinitialize_vtable_of(ok_h, CHECK_false);
duke@0 833 Universe::reinitialize_itables(CHECK_false);
duke@0 834 }
duke@0 835 }
duke@0 836
duke@0 837 klassOop k;
duke@0 838 instanceKlassHandle k_h;
duke@0 839 if (!UseSharedSpaces) {
duke@0 840 // Setup preallocated empty java.lang.Class array
duke@0 841 Universe::_the_empty_class_klass_array = oopFactory::new_objArray(SystemDictionary::class_klass(), 0, CHECK_false);
duke@0 842 // Setup preallocated OutOfMemoryError errors
duke@0 843 k = SystemDictionary::resolve_or_fail(vmSymbolHandles::java_lang_OutOfMemoryError(), true, CHECK_false);
duke@0 844 k_h = instanceKlassHandle(THREAD, k);
duke@0 845 Universe::_out_of_memory_error_java_heap = k_h->allocate_permanent_instance(CHECK_false);
duke@0 846 Universe::_out_of_memory_error_perm_gen = k_h->allocate_permanent_instance(CHECK_false);
duke@0 847 Universe::_out_of_memory_error_array_size = k_h->allocate_permanent_instance(CHECK_false);
duke@0 848 Universe::_out_of_memory_error_gc_overhead_limit =
duke@0 849 k_h->allocate_permanent_instance(CHECK_false);
duke@0 850
duke@0 851 // Setup preallocated NullPointerException
duke@0 852 // (this is currently used for a cheap & dirty solution in compiler exception handling)
duke@0 853 k = SystemDictionary::resolve_or_fail(vmSymbolHandles::java_lang_NullPointerException(), true, CHECK_false);
duke@0 854 Universe::_null_ptr_exception_instance = instanceKlass::cast(k)->allocate_permanent_instance(CHECK_false);
duke@0 855 // Setup preallocated ArithmeticException
duke@0 856 // (this is currently used for a cheap & dirty solution in compiler exception handling)
duke@0 857 k = SystemDictionary::resolve_or_fail(vmSymbolHandles::java_lang_ArithmeticException(), true, CHECK_false);
duke@0 858 Universe::_arithmetic_exception_instance = instanceKlass::cast(k)->allocate_permanent_instance(CHECK_false);
duke@0 859 // Virtual Machine Error for when we get into a situation we can't resolve
duke@0 860 k = SystemDictionary::resolve_or_fail(
duke@0 861 vmSymbolHandles::java_lang_VirtualMachineError(), true, CHECK_false);
duke@0 862 bool linked = instanceKlass::cast(k)->link_class_or_fail(CHECK_false);
duke@0 863 if (!linked) {
duke@0 864 tty->print_cr("Unable to link/verify VirtualMachineError class");
duke@0 865 return false; // initialization failed
duke@0 866 }
duke@0 867 Universe::_virtual_machine_error_instance =
duke@0 868 instanceKlass::cast(k)->allocate_permanent_instance(CHECK_false);
duke@0 869 }
duke@0 870 if (!DumpSharedSpaces) {
duke@0 871 // These are the only Java fields that are currently set during shared space dumping.
duke@0 872 // We prefer to not handle this generally, so we always reinitialize these detail messages.
duke@0 873 Handle msg = java_lang_String::create_from_str("Java heap space", CHECK_false);
duke@0 874 java_lang_Throwable::set_message(Universe::_out_of_memory_error_java_heap, msg());
duke@0 875
duke@0 876 msg = java_lang_String::create_from_str("PermGen space", CHECK_false);
duke@0 877 java_lang_Throwable::set_message(Universe::_out_of_memory_error_perm_gen, msg());
duke@0 878
duke@0 879 msg = java_lang_String::create_from_str("Requested array size exceeds VM limit", CHECK_false);
duke@0 880 java_lang_Throwable::set_message(Universe::_out_of_memory_error_array_size, msg());
duke@0 881
duke@0 882 msg = java_lang_String::create_from_str("GC overhead limit exceeded", CHECK_false);
duke@0 883 java_lang_Throwable::set_message(Universe::_out_of_memory_error_gc_overhead_limit, msg());
duke@0 884
duke@0 885 msg = java_lang_String::create_from_str("/ by zero", CHECK_false);
duke@0 886 java_lang_Throwable::set_message(Universe::_arithmetic_exception_instance, msg());
duke@0 887
duke@0 888 // Setup the array of errors that have preallocated backtrace
duke@0 889 k = Universe::_out_of_memory_error_java_heap->klass();
duke@0 890 assert(k->klass_part()->name() == vmSymbols::java_lang_OutOfMemoryError(), "should be out of memory error");
duke@0 891 k_h = instanceKlassHandle(THREAD, k);
duke@0 892
duke@0 893 int len = (StackTraceInThrowable) ? (int)PreallocatedOutOfMemoryErrorCount : 0;
duke@0 894 Universe::_preallocated_out_of_memory_error_array = oopFactory::new_objArray(k_h(), len, CHECK_false);
duke@0 895 for (int i=0; i<len; i++) {
duke@0 896 oop err = k_h->allocate_permanent_instance(CHECK_false);
duke@0 897 Handle err_h = Handle(THREAD, err);
duke@0 898 java_lang_Throwable::allocate_backtrace(err_h, CHECK_false);
duke@0 899 Universe::preallocated_out_of_memory_errors()->obj_at_put(i, err_h());
duke@0 900 }
duke@0 901 Universe::_preallocated_out_of_memory_error_avail_count = (jint)len;
duke@0 902 }
duke@0 903
duke@0 904
duke@0 905 // Setup static method for registering finalizers
duke@0 906 // The finalizer klass must be linked before looking up the method, in
duke@0 907 // case it needs to get rewritten.
duke@0 908 instanceKlass::cast(SystemDictionary::finalizer_klass())->link_class(CHECK_false);
duke@0 909 methodOop m = instanceKlass::cast(SystemDictionary::finalizer_klass())->find_method(
duke@0 910 vmSymbols::register_method_name(),
duke@0 911 vmSymbols::register_method_signature());
duke@0 912 if (m == NULL || !m->is_static()) {
duke@0 913 THROW_MSG_(vmSymbols::java_lang_NoSuchMethodException(),
duke@0 914 "java.lang.ref.Finalizer.register", false);
duke@0 915 }
duke@0 916 Universe::_finalizer_register_cache->init(
duke@0 917 SystemDictionary::finalizer_klass(), m, CHECK_false);
duke@0 918
duke@0 919 // Resolve on first use and initialize class.
duke@0 920 // Note: No race-condition here, since a resolve will always return the same result
duke@0 921
duke@0 922 // Setup method for security checks
duke@0 923 k = SystemDictionary::resolve_or_fail(vmSymbolHandles::java_lang_reflect_Method(), true, CHECK_false);
duke@0 924 k_h = instanceKlassHandle(THREAD, k);
duke@0 925 k_h->link_class(CHECK_false);
duke@0 926 m = k_h->find_method(vmSymbols::invoke_name(), vmSymbols::object_array_object_object_signature());
duke@0 927 if (m == NULL || m->is_static()) {
duke@0 928 THROW_MSG_(vmSymbols::java_lang_NoSuchMethodException(),
duke@0 929 "java.lang.reflect.Method.invoke", false);
duke@0 930 }
duke@0 931 Universe::_reflect_invoke_cache->init(k_h(), m, CHECK_false);
duke@0 932
duke@0 933 // Setup method for registering loaded classes in class loader vector
duke@0 934 instanceKlass::cast(SystemDictionary::classloader_klass())->link_class(CHECK_false);
duke@0 935 m = instanceKlass::cast(SystemDictionary::classloader_klass())->find_method(vmSymbols::addClass_name(), vmSymbols::class_void_signature());
duke@0 936 if (m == NULL || m->is_static()) {
duke@0 937 THROW_MSG_(vmSymbols::java_lang_NoSuchMethodException(),
duke@0 938 "java.lang.ClassLoader.addClass", false);
duke@0 939 }
duke@0 940 Universe::_loader_addClass_cache->init(
duke@0 941 SystemDictionary::classloader_klass(), m, CHECK_false);
duke@0 942
duke@0 943 // The folowing is initializing converter functions for serialization in
duke@0 944 // JVM.cpp. If we clean up the StrictMath code above we may want to find
duke@0 945 // a better solution for this as well.
duke@0 946 initialize_converter_functions();
duke@0 947
duke@0 948 // This needs to be done before the first scavenge/gc, since
duke@0 949 // it's an input to soft ref clearing policy.
ysr@342 950 {
ysr@342 951 MutexLocker x(Heap_lock);
ysr@342 952 Universe::update_heap_info_at_gc();
ysr@342 953 }
duke@0 954
duke@0 955 // ("weak") refs processing infrastructure initialization
duke@0 956 Universe::heap()->post_initialize();
duke@0 957
duke@0 958 GC_locker::unlock(); // allow gc after bootstrapping
duke@0 959
duke@0 960 MemoryService::set_universe_heap(Universe::_collectedHeap);
duke@0 961 return true;
duke@0 962 }
duke@0 963
duke@0 964
duke@0 965 void Universe::compute_base_vtable_size() {
duke@0 966 _base_vtable_size = ClassLoader::compute_Object_vtable();
duke@0 967 }
duke@0 968
duke@0 969
duke@0 970 // %%% The Universe::flush_foo methods belong in CodeCache.
duke@0 971
duke@0 972 // Flushes compiled methods dependent on dependee.
duke@0 973 void Universe::flush_dependents_on(instanceKlassHandle dependee) {
duke@0 974 assert_lock_strong(Compile_lock);
duke@0 975
duke@0 976 if (CodeCache::number_of_nmethods_with_dependencies() == 0) return;
duke@0 977
duke@0 978 // CodeCache can only be updated by a thread_in_VM and they will all be
duke@0 979 // stopped dring the safepoint so CodeCache will be safe to update without
duke@0 980 // holding the CodeCache_lock.
duke@0 981
duke@0 982 DepChange changes(dependee);
duke@0 983
duke@0 984 // Compute the dependent nmethods
duke@0 985 if (CodeCache::mark_for_deoptimization(changes) > 0) {
duke@0 986 // At least one nmethod has been marked for deoptimization
duke@0 987 VM_Deoptimize op;
duke@0 988 VMThread::execute(&op);
duke@0 989 }
duke@0 990 }
duke@0 991
duke@0 992 #ifdef HOTSWAP
duke@0 993 // Flushes compiled methods dependent on dependee in the evolutionary sense
duke@0 994 void Universe::flush_evol_dependents_on(instanceKlassHandle ev_k_h) {
duke@0 995 // --- Compile_lock is not held. However we are at a safepoint.
duke@0 996 assert_locked_or_safepoint(Compile_lock);
duke@0 997 if (CodeCache::number_of_nmethods_with_dependencies() == 0) return;
duke@0 998
duke@0 999 // CodeCache can only be updated by a thread_in_VM and they will all be
duke@0 1000 // stopped dring the safepoint so CodeCache will be safe to update without
duke@0 1001 // holding the CodeCache_lock.
duke@0 1002
duke@0 1003 // Compute the dependent nmethods
duke@0 1004 if (CodeCache::mark_for_evol_deoptimization(ev_k_h) > 0) {
duke@0 1005 // At least one nmethod has been marked for deoptimization
duke@0 1006
duke@0 1007 // All this already happens inside a VM_Operation, so we'll do all the work here.
duke@0 1008 // Stuff copied from VM_Deoptimize and modified slightly.
duke@0 1009
duke@0 1010 // We do not want any GCs to happen while we are in the middle of this VM operation
duke@0 1011 ResourceMark rm;
duke@0 1012 DeoptimizationMarker dm;
duke@0 1013
duke@0 1014 // Deoptimize all activations depending on marked nmethods
duke@0 1015 Deoptimization::deoptimize_dependents();
duke@0 1016
duke@0 1017 // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
duke@0 1018 CodeCache::make_marked_nmethods_not_entrant();
duke@0 1019 }
duke@0 1020 }
duke@0 1021 #endif // HOTSWAP
duke@0 1022
duke@0 1023
duke@0 1024 // Flushes compiled methods dependent on dependee
duke@0 1025 void Universe::flush_dependents_on_method(methodHandle m_h) {
duke@0 1026 // --- Compile_lock is not held. However we are at a safepoint.
duke@0 1027 assert_locked_or_safepoint(Compile_lock);
duke@0 1028
duke@0 1029 // CodeCache can only be updated by a thread_in_VM and they will all be
duke@0 1030 // stopped dring the safepoint so CodeCache will be safe to update without
duke@0 1031 // holding the CodeCache_lock.
duke@0 1032
duke@0 1033 // Compute the dependent nmethods
duke@0 1034 if (CodeCache::mark_for_deoptimization(m_h()) > 0) {
duke@0 1035 // At least one nmethod has been marked for deoptimization
duke@0 1036
duke@0 1037 // All this already happens inside a VM_Operation, so we'll do all the work here.
duke@0 1038 // Stuff copied from VM_Deoptimize and modified slightly.
duke@0 1039
duke@0 1040 // We do not want any GCs to happen while we are in the middle of this VM operation
duke@0 1041 ResourceMark rm;
duke@0 1042 DeoptimizationMarker dm;
duke@0 1043
duke@0 1044 // Deoptimize all activations depending on marked nmethods
duke@0 1045 Deoptimization::deoptimize_dependents();
duke@0 1046
duke@0 1047 // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
duke@0 1048 CodeCache::make_marked_nmethods_not_entrant();
duke@0 1049 }
duke@0 1050 }
duke@0 1051
duke@0 1052 void Universe::print() { print_on(gclog_or_tty); }
duke@0 1053
duke@0 1054 void Universe::print_on(outputStream* st) {
duke@0 1055 st->print_cr("Heap");
duke@0 1056 heap()->print_on(st);
duke@0 1057 }
duke@0 1058
duke@0 1059 void Universe::print_heap_at_SIGBREAK() {
duke@0 1060 if (PrintHeapAtSIGBREAK) {
duke@0 1061 MutexLocker hl(Heap_lock);
duke@0 1062 print_on(tty);
duke@0 1063 tty->cr();
duke@0 1064 tty->flush();
duke@0 1065 }
duke@0 1066 }
duke@0 1067
duke@0 1068 void Universe::print_heap_before_gc(outputStream* st) {
duke@0 1069 st->print_cr("{Heap before GC invocations=%u (full %u):",
duke@0 1070 heap()->total_collections(),
duke@0 1071 heap()->total_full_collections());
duke@0 1072 heap()->print_on(st);
duke@0 1073 }
duke@0 1074
duke@0 1075 void Universe::print_heap_after_gc(outputStream* st) {
duke@0 1076 st->print_cr("Heap after GC invocations=%u (full %u):",
duke@0 1077 heap()->total_collections(),
duke@0 1078 heap()->total_full_collections());
duke@0 1079 heap()->print_on(st);
duke@0 1080 st->print_cr("}");
duke@0 1081 }
duke@0 1082
duke@0 1083 void Universe::verify(bool allow_dirty, bool silent) {
duke@0 1084 if (SharedSkipVerify) {
duke@0 1085 return;
duke@0 1086 }
duke@0 1087
duke@0 1088 // The use of _verify_in_progress is a temporary work around for
duke@0 1089 // 6320749. Don't bother with a creating a class to set and clear
duke@0 1090 // it since it is only used in this method and the control flow is
duke@0 1091 // straight forward.
duke@0 1092 _verify_in_progress = true;
duke@0 1093
duke@0 1094 COMPILER2_PRESENT(
duke@0 1095 assert(!DerivedPointerTable::is_active(),
duke@0 1096 "DPT should not be active during verification "
duke@0 1097 "(of thread stacks below)");
duke@0 1098 )
duke@0 1099
duke@0 1100 ResourceMark rm;
duke@0 1101 HandleMark hm; // Handles created during verification can be zapped
duke@0 1102 _verify_count++;
duke@0 1103
duke@0 1104 if (!silent) gclog_or_tty->print("[Verifying ");
duke@0 1105 if (!silent) gclog_or_tty->print("threads ");
duke@0 1106 Threads::verify();
duke@0 1107 heap()->verify(allow_dirty, silent);
duke@0 1108
duke@0 1109 if (!silent) gclog_or_tty->print("syms ");
duke@0 1110 SymbolTable::verify();
duke@0 1111 if (!silent) gclog_or_tty->print("strs ");
duke@0 1112 StringTable::verify();
duke@0 1113 {
duke@0 1114 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
duke@0 1115 if (!silent) gclog_or_tty->print("zone ");
duke@0 1116 CodeCache::verify();
duke@0 1117 }
duke@0 1118 if (!silent) gclog_or_tty->print("dict ");
duke@0 1119 SystemDictionary::verify();
duke@0 1120 if (!silent) gclog_or_tty->print("hand ");
duke@0 1121 JNIHandles::verify();
duke@0 1122 if (!silent) gclog_or_tty->print("C-heap ");
duke@0 1123 os::check_heap();
duke@0 1124 if (!silent) gclog_or_tty->print_cr("]");
duke@0 1125
duke@0 1126 _verify_in_progress = false;
duke@0 1127 }
duke@0 1128
duke@0 1129 // Oop verification (see MacroAssembler::verify_oop)
duke@0 1130
duke@0 1131 static uintptr_t _verify_oop_data[2] = {0, (uintptr_t)-1};
duke@0 1132 static uintptr_t _verify_klass_data[2] = {0, (uintptr_t)-1};
duke@0 1133
duke@0 1134
duke@0 1135 static void calculate_verify_data(uintptr_t verify_data[2],
duke@0 1136 HeapWord* low_boundary,
duke@0 1137 HeapWord* high_boundary) {
duke@0 1138 assert(low_boundary < high_boundary, "bad interval");
duke@0 1139
duke@0 1140 // decide which low-order bits we require to be clear:
duke@0 1141 size_t alignSize = MinObjAlignmentInBytes;
duke@0 1142 size_t min_object_size = oopDesc::header_size();
duke@0 1143
duke@0 1144 // make an inclusive limit:
duke@0 1145 uintptr_t max = (uintptr_t)high_boundary - min_object_size*wordSize;
duke@0 1146 uintptr_t min = (uintptr_t)low_boundary;
duke@0 1147 assert(min < max, "bad interval");
duke@0 1148 uintptr_t diff = max ^ min;
duke@0 1149
duke@0 1150 // throw away enough low-order bits to make the diff vanish
duke@0 1151 uintptr_t mask = (uintptr_t)(-1);
duke@0 1152 while ((mask & diff) != 0)
duke@0 1153 mask <<= 1;
duke@0 1154 uintptr_t bits = (min & mask);
duke@0 1155 assert(bits == (max & mask), "correct mask");
duke@0 1156 // check an intermediate value between min and max, just to make sure:
duke@0 1157 assert(bits == ((min + (max-min)/2) & mask), "correct mask");
duke@0 1158
duke@0 1159 // require address alignment, too:
duke@0 1160 mask |= (alignSize - 1);
duke@0 1161
duke@0 1162 if (!(verify_data[0] == 0 && verify_data[1] == (uintptr_t)-1)) {
duke@0 1163 assert(verify_data[0] == mask && verify_data[1] == bits, "mask stability");
duke@0 1164 }
duke@0 1165 verify_data[0] = mask;
duke@0 1166 verify_data[1] = bits;
duke@0 1167 }
duke@0 1168
duke@0 1169
duke@0 1170 // Oop verification (see MacroAssembler::verify_oop)
duke@0 1171 #ifndef PRODUCT
duke@0 1172
duke@0 1173 uintptr_t Universe::verify_oop_mask() {
duke@0 1174 MemRegion m = heap()->reserved_region();
duke@0 1175 calculate_verify_data(_verify_oop_data,
duke@0 1176 m.start(),
duke@0 1177 m.end());
duke@0 1178 return _verify_oop_data[0];
duke@0 1179 }
duke@0 1180
duke@0 1181
duke@0 1182
duke@0 1183 uintptr_t Universe::verify_oop_bits() {
duke@0 1184 verify_oop_mask();
duke@0 1185 return _verify_oop_data[1];
duke@0 1186 }
duke@0 1187
duke@0 1188
duke@0 1189 uintptr_t Universe::verify_klass_mask() {
duke@0 1190 /* $$$
duke@0 1191 // A klass can never live in the new space. Since the new and old
duke@0 1192 // spaces can change size, we must settle for bounds-checking against
duke@0 1193 // the bottom of the world, plus the smallest possible new and old
duke@0 1194 // space sizes that may arise during execution.
duke@0 1195 size_t min_new_size = Universe::new_size(); // in bytes
duke@0 1196 size_t min_old_size = Universe::old_size(); // in bytes
duke@0 1197 calculate_verify_data(_verify_klass_data,
duke@0 1198 (HeapWord*)((uintptr_t)_new_gen->low_boundary + min_new_size + min_old_size),
duke@0 1199 _perm_gen->high_boundary);
duke@0 1200 */
duke@0 1201 // Why doesn't the above just say that klass's always live in the perm
duke@0 1202 // gen? I'll see if that seems to work...
duke@0 1203 MemRegion permanent_reserved;
duke@0 1204 switch (Universe::heap()->kind()) {
duke@0 1205 default:
duke@0 1206 // ???: What if a CollectedHeap doesn't have a permanent generation?
duke@0 1207 ShouldNotReachHere();
duke@0 1208 break;
ysr@342 1209 case CollectedHeap::GenCollectedHeap:
ysr@342 1210 case CollectedHeap::G1CollectedHeap: {
ysr@342 1211 SharedHeap* sh = (SharedHeap*) Universe::heap();
ysr@342 1212 permanent_reserved = sh->perm_gen()->reserved();
ysr@342 1213 break;
duke@0 1214 }
duke@0 1215 #ifndef SERIALGC
duke@0 1216 case CollectedHeap::ParallelScavengeHeap: {
duke@0 1217 ParallelScavengeHeap* psh = (ParallelScavengeHeap*) Universe::heap();
duke@0 1218 permanent_reserved = psh->perm_gen()->reserved();
duke@0 1219 break;
duke@0 1220 }
duke@0 1221 #endif // SERIALGC
duke@0 1222 }
duke@0 1223 calculate_verify_data(_verify_klass_data,
duke@0 1224 permanent_reserved.start(),
duke@0 1225 permanent_reserved.end());
duke@0 1226
duke@0 1227 return _verify_klass_data[0];
duke@0 1228 }
duke@0 1229
duke@0 1230
duke@0 1231
duke@0 1232 uintptr_t Universe::verify_klass_bits() {
duke@0 1233 verify_klass_mask();
duke@0 1234 return _verify_klass_data[1];
duke@0 1235 }
duke@0 1236
duke@0 1237
duke@0 1238 uintptr_t Universe::verify_mark_mask() {
duke@0 1239 return markOopDesc::lock_mask_in_place;
duke@0 1240 }
duke@0 1241
duke@0 1242
duke@0 1243
duke@0 1244 uintptr_t Universe::verify_mark_bits() {
duke@0 1245 intptr_t mask = verify_mark_mask();
duke@0 1246 intptr_t bits = (intptr_t)markOopDesc::prototype();
duke@0 1247 assert((bits & ~mask) == 0, "no stray header bits");
duke@0 1248 return bits;
duke@0 1249 }
duke@0 1250 #endif // PRODUCT
duke@0 1251
duke@0 1252
duke@0 1253 void Universe::compute_verify_oop_data() {
duke@0 1254 verify_oop_mask();
duke@0 1255 verify_oop_bits();
duke@0 1256 verify_mark_mask();
duke@0 1257 verify_mark_bits();
duke@0 1258 verify_klass_mask();
duke@0 1259 verify_klass_bits();
duke@0 1260 }
duke@0 1261
duke@0 1262
duke@0 1263 void CommonMethodOopCache::init(klassOop k, methodOop m, TRAPS) {
duke@0 1264 if (!UseSharedSpaces) {
duke@0 1265 _klass = k;
duke@0 1266 }
duke@0 1267 #ifndef PRODUCT
duke@0 1268 else {
duke@0 1269 // sharing initilization should have already set up _klass
duke@0 1270 assert(_klass != NULL, "just checking");
duke@0 1271 }
duke@0 1272 #endif
duke@0 1273
duke@0 1274 _method_idnum = m->method_idnum();
duke@0 1275 assert(_method_idnum >= 0, "sanity check");
duke@0 1276 }
duke@0 1277
duke@0 1278
duke@0 1279 ActiveMethodOopsCache::~ActiveMethodOopsCache() {
duke@0 1280 if (_prev_methods != NULL) {
duke@0 1281 for (int i = _prev_methods->length() - 1; i >= 0; i--) {
duke@0 1282 jweak method_ref = _prev_methods->at(i);
duke@0 1283 if (method_ref != NULL) {
duke@0 1284 JNIHandles::destroy_weak_global(method_ref);
duke@0 1285 }
duke@0 1286 }
duke@0 1287 delete _prev_methods;
duke@0 1288 _prev_methods = NULL;
duke@0 1289 }
duke@0 1290 }
duke@0 1291
duke@0 1292
duke@0 1293 void ActiveMethodOopsCache::add_previous_version(const methodOop method) {
duke@0 1294 assert(Thread::current()->is_VM_thread(),
duke@0 1295 "only VMThread can add previous versions");
duke@0 1296
duke@0 1297 if (_prev_methods == NULL) {
duke@0 1298 // This is the first previous version so make some space.
duke@0 1299 // Start with 2 elements under the assumption that the class
duke@0 1300 // won't be redefined much.
duke@0 1301 _prev_methods = new (ResourceObj::C_HEAP) GrowableArray<jweak>(2, true);
duke@0 1302 }
duke@0 1303
duke@0 1304 // RC_TRACE macro has an embedded ResourceMark
duke@0 1305 RC_TRACE(0x00000100,
duke@0 1306 ("add: %s(%s): adding prev version ref for cached method @%d",
duke@0 1307 method->name()->as_C_string(), method->signature()->as_C_string(),
duke@0 1308 _prev_methods->length()));
duke@0 1309
duke@0 1310 methodHandle method_h(method);
duke@0 1311 jweak method_ref = JNIHandles::make_weak_global(method_h);
duke@0 1312 _prev_methods->append(method_ref);
duke@0 1313
duke@0 1314 // Using weak references allows previous versions of the cached
duke@0 1315 // method to be GC'ed when they are no longer needed. Since the
duke@0 1316 // caller is the VMThread and we are at a safepoint, this is a good
duke@0 1317 // time to clear out unused weak references.
duke@0 1318
duke@0 1319 for (int i = _prev_methods->length() - 1; i >= 0; i--) {
duke@0 1320 jweak method_ref = _prev_methods->at(i);
duke@0 1321 assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
duke@0 1322 if (method_ref == NULL) {
duke@0 1323 _prev_methods->remove_at(i);
duke@0 1324 // Since we are traversing the array backwards, we don't have to
duke@0 1325 // do anything special with the index.
duke@0 1326 continue; // robustness
duke@0 1327 }
duke@0 1328
duke@0 1329 methodOop m = (methodOop)JNIHandles::resolve(method_ref);
duke@0 1330 if (m == NULL) {
duke@0 1331 // this method entry has been GC'ed so remove it
duke@0 1332 JNIHandles::destroy_weak_global(method_ref);
duke@0 1333 _prev_methods->remove_at(i);
duke@0 1334 } else {
duke@0 1335 // RC_TRACE macro has an embedded ResourceMark
duke@0 1336 RC_TRACE(0x00000400, ("add: %s(%s): previous cached method @%d is alive",
duke@0 1337 m->name()->as_C_string(), m->signature()->as_C_string(), i));
duke@0 1338 }
duke@0 1339 }
duke@0 1340 } // end add_previous_version()
duke@0 1341
duke@0 1342
duke@0 1343 bool ActiveMethodOopsCache::is_same_method(const methodOop method) const {
duke@0 1344 instanceKlass* ik = instanceKlass::cast(klass());
duke@0 1345 methodOop check_method = ik->method_with_idnum(method_idnum());
duke@0 1346 assert(check_method != NULL, "sanity check");
duke@0 1347 if (check_method == method) {
duke@0 1348 // done with the easy case
duke@0 1349 return true;
duke@0 1350 }
duke@0 1351
duke@0 1352 if (_prev_methods != NULL) {
duke@0 1353 // The cached method has been redefined at least once so search
duke@0 1354 // the previous versions for a match.
duke@0 1355 for (int i = 0; i < _prev_methods->length(); i++) {
duke@0 1356 jweak method_ref = _prev_methods->at(i);
duke@0 1357 assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
duke@0 1358 if (method_ref == NULL) {
duke@0 1359 continue; // robustness
duke@0 1360 }
duke@0 1361
duke@0 1362 check_method = (methodOop)JNIHandles::resolve(method_ref);
duke@0 1363 if (check_method == method) {
duke@0 1364 // a previous version matches
duke@0 1365 return true;
duke@0 1366 }
duke@0 1367 }
duke@0 1368 }
duke@0 1369
duke@0 1370 // either no previous versions or no previous version matched
duke@0 1371 return false;
duke@0 1372 }
duke@0 1373
duke@0 1374
duke@0 1375 methodOop LatestMethodOopCache::get_methodOop() {
duke@0 1376 instanceKlass* ik = instanceKlass::cast(klass());
duke@0 1377 methodOop m = ik->method_with_idnum(method_idnum());
duke@0 1378 assert(m != NULL, "sanity check");
duke@0 1379 return m;
duke@0 1380 }
duke@0 1381
duke@0 1382
duke@0 1383 #ifdef ASSERT
duke@0 1384 // Release dummy object(s) at bottom of heap
duke@0 1385 bool Universe::release_fullgc_alot_dummy() {
duke@0 1386 MutexLocker ml(FullGCALot_lock);
duke@0 1387 if (_fullgc_alot_dummy_array != NULL) {
duke@0 1388 if (_fullgc_alot_dummy_next >= _fullgc_alot_dummy_array->length()) {
duke@0 1389 // No more dummies to release, release entire array instead
duke@0 1390 _fullgc_alot_dummy_array = NULL;
duke@0 1391 return false;
duke@0 1392 }
duke@0 1393 if (!UseConcMarkSweepGC) {
duke@0 1394 // Release dummy at bottom of old generation
duke@0 1395 _fullgc_alot_dummy_array->obj_at_put(_fullgc_alot_dummy_next++, NULL);
duke@0 1396 }
duke@0 1397 // Release dummy at bottom of permanent generation
duke@0 1398 _fullgc_alot_dummy_array->obj_at_put(_fullgc_alot_dummy_next++, NULL);
duke@0 1399 }
duke@0 1400 return true;
duke@0 1401 }
duke@0 1402
duke@0 1403 #endif // ASSERT