annotate src/share/vm/prims/unsafe.cpp @ 13526:f5dd157e3889

8186439: [MVT] ClassFileParser should ignore JVM_ACC_VALUE for class file version < 53.1 Reviewed-by: dsimms
author thartmann
date Mon, 21 Aug 2017 12:26:21 +0200
parents a2753984d2c1
children
rev   line source
duke@0 1 /*
psandoz@12991 2 * Copyright (c) 2000, 2017, Oracle and/or its affiliates. All rights reserved.
duke@0 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@0 4 *
duke@0 5 * This code is free software; you can redistribute it and/or modify it
duke@0 6 * under the terms of the GNU General Public License version 2 only, as
duke@0 7 * published by the Free Software Foundation.
duke@0 8 *
duke@0 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@0 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@0 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@0 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@0 13 * accompanied this code).
duke@0 14 *
duke@0 15 * You should have received a copy of the GNU General Public License version
duke@0 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@0 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@0 18 *
trims@1472 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1472 20 * or visit www.oracle.com if you need additional information or have any
trims@1472 21 * questions.
duke@0 22 *
duke@0 23 */
duke@0 24
stefank@1879 25 #include "precompiled.hpp"
mgronlun@9684 26 #include "classfile/classFileStream.hpp"
stefank@1879 27 #include "classfile/vmSymbols.hpp"
stefank@1879 28 #include "memory/allocation.inline.hpp"
jprovino@10762 29 #include "memory/resourceArea.hpp"
redestad@13212 30 #include "oops/fieldStreams.hpp"
stefank@7864 31 #include "oops/objArrayOop.inline.hpp"
stefank@7864 32 #include "oops/oop.inline.hpp"
stefank@1879 33 #include "prims/jni.h"
stefank@1879 34 #include "prims/jvm.h"
mikael@10425 35 #include "prims/unsafe.hpp"
dholmes@11857 36 #include "runtime/atomic.hpp"
stefank@1879 37 #include "runtime/globals.hpp"
stefank@1879 38 #include "runtime/interfaceSupport.hpp"
goetz@6402 39 #include "runtime/orderAccess.inline.hpp"
stefank@1879 40 #include "runtime/reflection.hpp"
goetz@6760 41 #include "runtime/vm_version.hpp"
stefank@1879 42 #include "services/threadService.hpp"
sla@4802 43 #include "trace/tracing.hpp"
stefank@13269 44 #include "utilities/align.hpp"
stefank@1879 45 #include "utilities/copy.hpp"
stefank@1879 46 #include "utilities/dtrace.hpp"
stefank@7425 47 #include "utilities/macros.hpp"
stefank@7425 48 #if INCLUDE_ALL_GCS
pliden@8413 49 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
stefank@7425 50 #endif // INCLUDE_ALL_GCS
stefank@1879 51
mikael@10425 52 /**
mikael@10425 53 * Implementation of the jdk.internal.misc.Unsafe class
duke@0 54 */
duke@0 55
fparain@1324 56
duke@0 57 #define MAX_OBJECT_SIZE \
duke@0 58 ( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
duke@0 59 + ((julong)max_jint * sizeof(double)) )
duke@0 60
duke@0 61
duke@0 62 #define UNSAFE_ENTRY(result_type, header) \
mikael@10425 63 JVM_ENTRY(static result_type, header)
duke@0 64
mikael@10425 65 #define UNSAFE_LEAF(result_type, header) \
mikael@10425 66 JVM_LEAF(static result_type, header)
duke@0 67
duke@0 68 #define UNSAFE_END JVM_END
duke@0 69
duke@0 70
mikael@10425 71 static inline void* addr_from_java(jlong addr) {
duke@0 72 // This assert fails in a variety of ways on 32-bit systems.
duke@0 73 // It is impossible to predict whether native code that converts
duke@0 74 // pointers to longs will sign-extend or zero-extend the addresses.
duke@0 75 //assert(addr == (uintptr_t)addr, "must not be odd high bits");
duke@0 76 return (void*)(uintptr_t)addr;
duke@0 77 }
duke@0 78
mikael@10425 79 static inline jlong addr_to_java(void* p) {
duke@0 80 assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
duke@0 81 return (uintptr_t)p;
duke@0 82 }
duke@0 83
duke@0 84
duke@0 85 // Note: The VM's obj_field and related accessors use byte-scaled
duke@0 86 // ("unscaled") offsets, just as the unsafe methods do.
duke@0 87
duke@0 88 // However, the method Unsafe.fieldOffset explicitly declines to
duke@0 89 // guarantee this. The field offset values manipulated by the Java user
duke@0 90 // through the Unsafe API are opaque cookies that just happen to be byte
duke@0 91 // offsets. We represent this state of affairs by passing the cookies
duke@0 92 // through conversion functions when going between the VM and the Unsafe API.
duke@0 93 // The conversion functions just happen to be no-ops at present.
duke@0 94
mikael@10425 95 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
duke@0 96 return field_offset;
duke@0 97 }
duke@0 98
mikael@10425 99 static inline jlong field_offset_from_byte_offset(jlong byte_offset) {
duke@0 100 return byte_offset;
duke@0 101 }
duke@0 102
mikael@10425 103 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
mikael@10425 104 jlong byte_offset = field_offset_to_byte_offset(field_offset);
duke@0 105
duke@0 106 #ifdef ASSERT
duke@0 107 if (p != NULL) {
duke@0 108 assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
duke@0 109 if (byte_offset == (jint)byte_offset) {
duke@0 110 void* ptr_plus_disp = (address)p + byte_offset;
coleenp@113 111 assert((void*)p->obj_field_addr<oop>((jint)byte_offset) == ptr_plus_disp,
duke@0 112 "raw [ptr+disp] must be consistent with oop::field_base");
duke@0 113 }
kvn@3765 114 jlong p_size = HeapWordSize * (jlong)(p->size());
jwilhelm@13274 115 assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
duke@0 116 }
duke@0 117 #endif
mikael@10425 118
mikael@10425 119 if (sizeof(char*) == sizeof(jint)) { // (this constant folds!)
duke@0 120 return (address)p + (jint) byte_offset;
mikael@10425 121 } else {
duke@0 122 return (address)p + byte_offset;
mikael@10425 123 }
duke@0 124 }
duke@0 125
duke@0 126 // Externally callable versions:
duke@0 127 // (Use these in compiler intrinsics which emulate unsafe primitives.)
duke@0 128 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
duke@0 129 return field_offset;
duke@0 130 }
duke@0 131 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
duke@0 132 return byte_offset;
duke@0 133 }
duke@0 134
duke@0 135
mikael@11157 136 ///// Data read/writes on the Java heap and in native (off-heap) memory
duke@0 137
mikael@11157 138 /**
mikael@11157 139 * Helper class for accessing memory.
mikael@11157 140 *
mikael@11157 141 * Normalizes values and wraps accesses in
mikael@11157 142 * JavaThread::doing_unsafe_access() if needed.
mikael@11157 143 */
mikael@11157 144 class MemoryAccess : StackObj {
mikael@11157 145 JavaThread* _thread;
mikael@11157 146 jobject _obj;
mikael@11157 147 jlong _offset;
coleenp@10885 148
mikael@11157 149 // Resolves and returns the address of the memory access
mikael@11157 150 void* addr() {
mikael@11157 151 return index_oop_from_field_offset_long(JNIHandles::resolve(_obj), _offset);
mikael@11157 152 }
duke@0 153
mikael@11157 154 template <typename T>
zmajo@11961 155 T normalize_for_write(T x) {
mikael@11157 156 return x;
mikael@11157 157 }
duke@0 158
zmajo@11961 159 jboolean normalize_for_write(jboolean x) {
mikael@11157 160 return x & 1;
mikael@11157 161 }
duke@0 162
zmajo@11961 163 template <typename T>
zmajo@11961 164 T normalize_for_read(T x) {
zmajo@11961 165 return x;
zmajo@11961 166 }
zmajo@11961 167
zmajo@11961 168 jboolean normalize_for_read(jboolean x) {
zmajo@11961 169 return x != 0;
zmajo@11961 170 }
zmajo@11961 171
mikael@11157 172 /**
mikael@11157 173 * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
mikael@11157 174 */
mikael@11157 175 class GuardUnsafeAccess {
mikael@11157 176 JavaThread* _thread;
mikael@11157 177 bool _active;
duke@0 178
mikael@11157 179 public:
mikael@11157 180 GuardUnsafeAccess(JavaThread* thread, jobject _obj) : _thread(thread) {
mikael@11157 181 if (JNIHandles::resolve(_obj) == NULL) {
mikael@11157 182 // native/off-heap access which may raise SIGBUS if accessing
mikael@11157 183 // memory mapped file data in a region of the file which has
mikael@11157 184 // been truncated and is now invalid
mikael@11157 185 _thread->set_doing_unsafe_access(true);
mikael@11157 186 _active = true;
mikael@11157 187 } else {
mikael@11157 188 _active = false;
mikael@11157 189 }
mikael@11157 190 }
coleenp@113 191
mikael@11157 192 ~GuardUnsafeAccess() {
mikael@11157 193 if (_active) {
mikael@11157 194 _thread->set_doing_unsafe_access(false);
mikael@11157 195 }
mikael@11157 196 }
mikael@11157 197 };
mikael@11157 198
mikael@11157 199 public:
mikael@11157 200 MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
mikael@11157 201 : _thread(thread), _obj(obj), _offset(offset) {
mikael@11157 202 }
mikael@11157 203
mikael@11157 204 template <typename T>
mikael@11157 205 T get() {
mikael@11157 206 GuardUnsafeAccess guard(_thread, _obj);
mikael@11157 207
mikael@11157 208 T* p = (T*)addr();
mikael@11157 209
zmajo@11961 210 T x = normalize_for_read(*p);
mikael@11157 211
mikael@11157 212 return x;
mikael@11157 213 }
mikael@11157 214
mikael@11157 215 template <typename T>
mikael@11157 216 void put(T x) {
mikael@11157 217 GuardUnsafeAccess guard(_thread, _obj);
mikael@11157 218
mikael@11157 219 T* p = (T*)addr();
mikael@11157 220
zmajo@11961 221 *p = normalize_for_write(x);
mikael@11157 222 }
mikael@11157 223
mikael@11157 224
mikael@11157 225 template <typename T>
mikael@11157 226 T get_volatile() {
mikael@11157 227 GuardUnsafeAccess guard(_thread, _obj);
mikael@11157 228
mikael@11157 229 T* p = (T*)addr();
mikael@11157 230
mikael@11157 231 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
mikael@11157 232 OrderAccess::fence();
mikael@11157 233 }
mikael@11157 234
mikael@11157 235 T x = OrderAccess::load_acquire((volatile T*)p);
mikael@11157 236
zmajo@11961 237 return normalize_for_read(x);
mikael@11157 238 }
mikael@11157 239
mikael@11157 240 template <typename T>
mikael@11157 241 void put_volatile(T x) {
mikael@11157 242 GuardUnsafeAccess guard(_thread, _obj);
mikael@11157 243
mikael@11157 244 T* p = (T*)addr();
mikael@11157 245
zmajo@11961 246 OrderAccess::release_store_fence((volatile T*)p, normalize_for_write(x));
mikael@11157 247 }
mikael@11157 248
mikael@11157 249
mikael@11157 250 #ifndef SUPPORTS_NATIVE_CX8
mikael@11157 251 jlong get_jlong_locked() {
mikael@11157 252 GuardUnsafeAccess guard(_thread, _obj);
mikael@11157 253
mikael@11157 254 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
mikael@11157 255
mikael@11157 256 jlong* p = (jlong*)addr();
mikael@11157 257
mikael@11157 258 jlong x = Atomic::load(p);
mikael@11157 259
mikael@11157 260 return x;
mikael@11157 261 }
mikael@11157 262
mikael@11157 263 void put_jlong_locked(jlong x) {
mikael@11157 264 GuardUnsafeAccess guard(_thread, _obj);
mikael@11157 265
mikael@11157 266 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
mikael@11157 267
mikael@11157 268 jlong* p = (jlong*)addr();
mikael@11157 269
zmajo@11961 270 Atomic::store(normalize_for_write(x), p);
mikael@11157 271 }
mikael@11157 272 #endif
mikael@11157 273 };
mikael@11157 274
mikael@11157 275 // Get/PutObject must be special-cased, since it works with handles.
duke@0 276
mdoerr@12016 277 // We could be accessing the referent field in a reference
mdoerr@12016 278 // object. If G1 is enabled then we need to register non-null
mdoerr@12016 279 // referent with the SATB barrier.
mdoerr@12016 280
mdoerr@12016 281 #if INCLUDE_ALL_GCS
mdoerr@12016 282 static bool is_java_lang_ref_Reference_access(oop o, jlong offset) {
mdoerr@12016 283 if (offset == java_lang_ref_Reference::referent_offset && o != NULL) {
mdoerr@12016 284 Klass* k = o->klass();
mdoerr@12016 285 if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
mdoerr@12016 286 assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
mdoerr@12016 287 return true;
mdoerr@12016 288 }
mdoerr@12016 289 }
mdoerr@12016 290 return false;
mdoerr@12016 291 }
mdoerr@12016 292 #endif
mdoerr@12016 293
mdoerr@12016 294 static void ensure_satb_referent_alive(oop o, jlong offset, oop v) {
mdoerr@12016 295 #if INCLUDE_ALL_GCS
mdoerr@12016 296 if (UseG1GC && v != NULL && is_java_lang_ref_Reference_access(o, offset)) {
mdoerr@12016 297 G1SATBCardTableModRefBS::enqueue(v);
mdoerr@12016 298 }
mdoerr@12016 299 #endif
mdoerr@12016 300 }
mdoerr@12016 301
hseigel@6592 302 // These functions allow a null base pointer with an arbitrary address.
duke@0 303 // But if the base pointer is non-null, the offset should make some sense.
duke@0 304 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
mikael@10425 305 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
twisti@8273 306 oop p = JNIHandles::resolve(obj);
twisti@8273 307 oop v;
mikael@10425 308
twisti@8273 309 if (UseCompressedOops) {
twisti@8273 310 narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
twisti@8273 311 v = oopDesc::decode_heap_oop(n);
twisti@8273 312 } else {
twisti@8273 313 v = *(oop*)index_oop_from_field_offset_long(p, offset);
twisti@8273 314 }
mikael@10425 315
mdoerr@12016 316 ensure_satb_referent_alive(p, offset, v);
mikael@10425 317
mdoerr@12016 318 return JNIHandles::make_local(env, v);
mikael@10425 319 } UNSAFE_END
duke@0 320
mikael@11157 321 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
duke@0 322 oop x = JNIHandles::resolve(x_h);
duke@0 323 oop p = JNIHandles::resolve(obj);
mikael@10425 324
coleenp@113 325 if (UseCompressedOops) {
coleenp@113 326 oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
coleenp@113 327 } else {
coleenp@113 328 oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
coleenp@113 329 }
mikael@10425 330 } UNSAFE_END
duke@0 331
mikael@10425 332 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
twisti@3493 333 oop p = JNIHandles::resolve(obj);
twisti@3493 334 void* addr = index_oop_from_field_offset_long(p, offset);
mikael@10425 335
twisti@3493 336 volatile oop v;
mikael@10425 337
mdoerr@11941 338 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
mdoerr@11941 339 OrderAccess::fence();
mdoerr@11941 340 }
mdoerr@11941 341
twisti@3493 342 if (UseCompressedOops) {
twisti@3493 343 volatile narrowOop n = *(volatile narrowOop*) addr;
hseigel@5349 344 (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
twisti@3493 345 } else {
hseigel@5349 346 (void)const_cast<oop&>(v = *(volatile oop*) addr);
twisti@3493 347 }
mikael@10425 348
mdoerr@12016 349 ensure_satb_referent_alive(p, offset, v);
mdoerr@12016 350
twisti@3493 351 OrderAccess::acquire();
duke@0 352 return JNIHandles::make_local(env, v);
mikael@10425 353 } UNSAFE_END
duke@0 354
mikael@11157 355 UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
duke@0 356 oop x = JNIHandles::resolve(x_h);
duke@0 357 oop p = JNIHandles::resolve(obj);
kvn@1999 358 void* addr = index_oop_from_field_offset_long(p, offset);
kvn@1999 359 OrderAccess::release();
mikael@10425 360
coleenp@113 361 if (UseCompressedOops) {
kvn@1999 362 oop_store((narrowOop*)addr, x);
coleenp@113 363 } else {
kvn@1999 364 oop_store((oop*)addr, x);
coleenp@113 365 }
mikael@10425 366
duke@0 367 OrderAccess::fence();
mikael@10425 368 } UNSAFE_END
duke@0 369
mikael@10425 370 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
twisti@8273 371 oop v = *(oop*) (address) addr;
mikael@10425 372
twisti@8273 373 return JNIHandles::make_local(env, v);
mikael@10425 374 } UNSAFE_END
twisti@8273 375
bpittore@4632 376 #ifndef SUPPORTS_NATIVE_CX8
kvn@1999 377
dholmes@7456 378 // VM_Version::supports_cx8() is a surrogate for 'supports atomic long memory ops'.
dholmes@7456 379 //
dholmes@7456 380 // On platforms which do not support atomic compare-and-swap of jlong (8 byte)
dholmes@7456 381 // values we have to use a lock-based scheme to enforce atomicity. This has to be
dholmes@7456 382 // applied to all Unsafe operations that set the value of a jlong field. Even so
psandoz@12991 383 // the compareAndSetLong operation will not be atomic with respect to direct stores
dholmes@7456 384 // to the field from Java code. It is important therefore that any Java code that
dholmes@7456 385 // utilizes these Unsafe jlong operations does not perform direct stores. To permit
dholmes@7456 386 // direct loads of the field from Java code we must also use Atomic::store within the
dholmes@7456 387 // locked regions. And for good measure, in case there are direct stores, we also
dholmes@7456 388 // employ Atomic::load within those regions. Note that the field in question must be
dholmes@7456 389 // volatile and so must have atomic load/store accesses applied at the Java level.
dholmes@7456 390 //
dholmes@7456 391 // The locking scheme could utilize a range of strategies for controlling the locking
dholmes@7456 392 // granularity: from a lock per-field through to a single global lock. The latter is
dholmes@7456 393 // the simplest and is used for the current implementation. Note that the Java object
dholmes@7456 394 // that contains the field, can not, in general, be used for locking. To do so can lead
dholmes@7456 395 // to deadlocks as we may introduce locking into what appears to the Java code to be a
dholmes@7456 396 // lock-free path.
dholmes@7456 397 //
dholmes@7456 398 // As all the locked-regions are very short and themselves non-blocking we can treat
dholmes@7456 399 // them as leaf routines and elide safepoint checks (ie we don't perform any thread
dholmes@7456 400 // state transitions even when blocking for the lock). Note that if we do choose to
dholmes@7456 401 // add safepoint checks and thread state transitions, we must ensure that we calculate
dholmes@7456 402 // the address of the field _after_ we have acquired the lock, else the object may have
dholmes@7456 403 // been moved by the GC
duke@0 404
mikael@10425 405 UNSAFE_ENTRY(jlong, Unsafe_GetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
mikael@10425 406 if (VM_Version::supports_cx8()) {
mikael@11157 407 return MemoryAccess(thread, obj, offset).get_volatile<jlong>();
mikael@10425 408 } else {
mikael@11157 409 return MemoryAccess(thread, obj, offset).get_jlong_locked();
duke@0 410 }
mikael@10425 411 } UNSAFE_END
duke@0 412
mikael@11157 413 UNSAFE_ENTRY(void, Unsafe_PutLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x)) {
mikael@10425 414 if (VM_Version::supports_cx8()) {
mikael@11157 415 MemoryAccess(thread, obj, offset).put_volatile<jlong>(x);
mikael@10425 416 } else {
mikael@11157 417 MemoryAccess(thread, obj, offset).put_jlong_locked(x);
duke@0 418 }
mikael@10425 419 } UNSAFE_END
duke@0 420
bpittore@4632 421 #endif // not SUPPORTS_NATIVE_CX8
duke@0 422
mikael@10425 423 UNSAFE_LEAF(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe)) {
aph@8194 424 #ifdef VM_LITTLE_ENDIAN
mikael@10425 425 return false;
aph@8194 426 #else
mikael@10425 427 return true;
aph@8194 428 #endif
mikael@10425 429 } UNSAFE_END
aph@8194 430
mikael@10425 431 UNSAFE_LEAF(jint, Unsafe_unalignedAccess0(JNIEnv *env, jobject unsafe)) {
mikael@10425 432 return UseUnalignedAccesses;
mikael@10425 433 } UNSAFE_END
aph@8194 434
mikael@11157 435 #define DEFINE_GETSETOOP(java_type, Type) \
duke@0 436 \
mikael@10425 437 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
mikael@11157 438 return MemoryAccess(thread, obj, offset).get<java_type>(); \
mikael@10425 439 } UNSAFE_END \
duke@0 440 \
mikael@11157 441 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
mikael@11157 442 MemoryAccess(thread, obj, offset).put<java_type>(x); \
mikael@10425 443 } UNSAFE_END \
duke@0 444 \
duke@0 445 // END DEFINE_GETSETOOP.
duke@0 446
kvn@1999 447 DEFINE_GETSETOOP(jboolean, Boolean)
kvn@1999 448 DEFINE_GETSETOOP(jbyte, Byte)
kvn@1999 449 DEFINE_GETSETOOP(jshort, Short);
kvn@1999 450 DEFINE_GETSETOOP(jchar, Char);
kvn@1999 451 DEFINE_GETSETOOP(jint, Int);
kvn@1999 452 DEFINE_GETSETOOP(jlong, Long);
kvn@1999 453 DEFINE_GETSETOOP(jfloat, Float);
kvn@1999 454 DEFINE_GETSETOOP(jdouble, Double);
kvn@1999 455
kvn@1999 456 #undef DEFINE_GETSETOOP
duke@0 457
mikael@10425 458 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
duke@0 459 \
mikael@10425 460 UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
mikael@11157 461 return MemoryAccess(thread, obj, offset).get_volatile<java_type>(); \
mikael@10425 462 } UNSAFE_END \
duke@0 463 \
mikael@11157 464 UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
mikael@11157 465 MemoryAccess(thread, obj, offset).put_volatile<java_type>(x); \
mikael@10425 466 } UNSAFE_END \
duke@0 467 \
duke@0 468 // END DEFINE_GETSETOOP_VOLATILE.
duke@0 469
duke@0 470 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
duke@0 471 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
duke@0 472 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
duke@0 473 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
duke@0 474 DEFINE_GETSETOOP_VOLATILE(jint, Int);
duke@0 475 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
duke@0 476 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
duke@0 477
bpittore@4632 478 #ifdef SUPPORTS_NATIVE_CX8
kvn@1999 479 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
kvn@1999 480 #endif
kvn@1999 481
kvn@1999 482 #undef DEFINE_GETSETOOP_VOLATILE
duke@0 483
mikael@10425 484 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
kvn@3926 485 OrderAccess::acquire();
mikael@10425 486 } UNSAFE_END
kvn@3926 487
mikael@10425 488 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
kvn@3926 489 OrderAccess::release();
mikael@10425 490 } UNSAFE_END
kvn@3926 491
mikael@10425 492 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
kvn@3926 493 OrderAccess::fence();
mikael@10425 494 } UNSAFE_END
kvn@3926 495
duke@0 496 ////// Allocation requests
duke@0 497
mikael@10425 498 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
mikael@10425 499 ThreadToNativeFromVM ttnfv(thread);
mikael@10425 500 return env->AllocObject(cls);
mikael@10425 501 } UNSAFE_END
duke@0 502
mikael@10425 503 UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
duke@0 504 size_t sz = (size_t)size;
mikael@10425 505
stefank@13264 506 sz = align_up(sz, HeapWordSize);
zgu@3465 507 void* x = os::malloc(sz, mtInternal);
mikael@10425 508
duke@0 509 return addr_to_java(x);
mikael@10425 510 } UNSAFE_END
duke@0 511
mikael@10425 512 UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
duke@0 513 void* p = addr_from_java(addr);
duke@0 514 size_t sz = (size_t)size;
stefank@13264 515 sz = align_up(sz, HeapWordSize);
mikael@10425 516
mikael@10425 517 void* x = os::realloc(p, sz, mtInternal);
mikael@10425 518
duke@0 519 return addr_to_java(x);
mikael@10425 520 } UNSAFE_END
duke@0 521
mikael@10425 522 UNSAFE_ENTRY(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
duke@0 523 void* p = addr_from_java(addr);
mikael@10425 524
duke@0 525 os::free(p);
mikael@10425 526 } UNSAFE_END
duke@0 527
mikael@10425 528 UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
duke@0 529 size_t sz = (size_t)size;
mikael@10425 530
duke@0 531 oop base = JNIHandles::resolve(obj);
duke@0 532 void* p = index_oop_from_field_offset_long(base, offset);
mikael@10425 533
duke@0 534 Copy::fill_to_memory_atomic(p, sz, value);
mikael@10425 535 } UNSAFE_END
duke@0 536
mikael@10425 537 UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
duke@0 538 size_t sz = (size_t)size;
mikael@10425 539
duke@0 540 oop srcp = JNIHandles::resolve(srcObj);
duke@0 541 oop dstp = JNIHandles::resolve(dstObj);
mikael@10425 542
duke@0 543 void* src = index_oop_from_field_offset_long(srcp, srcOffset);
duke@0 544 void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
mikael@10425 545
duke@0 546 Copy::conjoint_memory_atomic(src, dst, sz);
mikael@10425 547 } UNSAFE_END
duke@0 548
mikael@10248 549 // This function is a leaf since if the source and destination are both in native memory
mikael@10248 550 // the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
mikael@10248 551 // If either source or destination (or both) are on the heap, the function will enter VM using
mikael@10248 552 // JVM_ENTRY_FROM_LEAF
mikael@10425 553 UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
mikael@10248 554 size_t sz = (size_t)size;
mikael@10248 555 size_t esz = (size_t)elemSize;
mikael@10248 556
mikael@10248 557 if (srcObj == NULL && dstObj == NULL) {
mikael@10248 558 // Both src & dst are in native memory
mikael@10248 559 address src = (address)srcOffset;
mikael@10248 560 address dst = (address)dstOffset;
mikael@10248 561
mikael@10248 562 Copy::conjoint_swap(src, dst, sz, esz);
mikael@10248 563 } else {
mikael@10248 564 // At least one of src/dst are on heap, transition to VM to access raw pointers
mikael@10248 565
mikael@10248 566 JVM_ENTRY_FROM_LEAF(env, void, Unsafe_CopySwapMemory0) {
mikael@10248 567 oop srcp = JNIHandles::resolve(srcObj);
mikael@10248 568 oop dstp = JNIHandles::resolve(dstObj);
mikael@10248 569
mikael@10248 570 address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
mikael@10248 571 address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
mikael@10248 572
mikael@10248 573 Copy::conjoint_swap(src, dst, sz, esz);
mikael@10248 574 } JVM_END
mikael@10248 575 }
mikael@10425 576 } UNSAFE_END
duke@0 577
duke@0 578 ////// Random queries
duke@0 579
mikael@10425 580 UNSAFE_LEAF(jint, Unsafe_AddressSize0(JNIEnv *env, jobject unsafe)) {
duke@0 581 return sizeof(void*);
mikael@10425 582 } UNSAFE_END
duke@0 583
mikael@10425 584 UNSAFE_LEAF(jint, Unsafe_PageSize()) {
duke@0 585 return os::vm_page_size();
mikael@10425 586 } UNSAFE_END
duke@0 587
redestad@13212 588 static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
redestad@13212 589 assert(clazz != NULL, "clazz must not be NULL");
redestad@13212 590 assert(name != NULL, "name must not be NULL");
redestad@13212 591
redestad@13212 592 ResourceMark rm(THREAD);
redestad@13212 593 char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
redestad@13212 594
redestad@13212 595 InstanceKlass* k = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
redestad@13212 596
redestad@13212 597 jint offset = -1;
redestad@13212 598 for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
redestad@13212 599 Symbol *name = fs.name();
redestad@13212 600 if (name->equals(utf_name)) {
redestad@13212 601 offset = fs.offset();
redestad@13212 602 break;
redestad@13212 603 }
redestad@13212 604 }
redestad@13212 605 if (offset < 0) {
redestad@13212 606 THROW_0(vmSymbols::java_lang_InternalError());
redestad@13212 607 }
redestad@13212 608 return field_offset_from_byte_offset(offset);
redestad@13212 609 }
redestad@13212 610
redestad@13212 611 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
mikael@10425 612 assert(field != NULL, "field must not be NULL");
duke@0 613
duke@0 614 oop reflected = JNIHandles::resolve_non_null(field);
duke@0 615 oop mirror = java_lang_reflect_Field::clazz(reflected);
mikael@10425 616 Klass* k = java_lang_Class::as_Klass(mirror);
duke@0 617 int slot = java_lang_reflect_Field::slot(reflected);
duke@0 618 int modifiers = java_lang_reflect_Field::modifiers(reflected);
duke@0 619
duke@0 620 if (must_be_static >= 0) {
duke@0 621 int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
duke@0 622 if (must_be_static != really_is_static) {
duke@0 623 THROW_0(vmSymbols::java_lang_IllegalArgumentException());
duke@0 624 }
duke@0 625 }
duke@0 626
coleenp@3602 627 int offset = InstanceKlass::cast(k)->field_offset(slot);
duke@0 628 return field_offset_from_byte_offset(offset);
duke@0 629 }
duke@0 630
mikael@10425 631 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
duke@0 632 return find_field_offset(field, 0, THREAD);
mikael@10425 633 } UNSAFE_END
duke@0 634
redestad@13212 635 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset1(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
redestad@13212 636 return find_field_offset(c, name, THREAD);
redestad@13212 637 } UNSAFE_END
redestad@13212 638
mikael@10425 639 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
duke@0 640 return find_field_offset(field, 1, THREAD);
mikael@10425 641 } UNSAFE_END
duke@0 642
mikael@10425 643 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
mikael@10425 644 assert(field != NULL, "field must not be NULL");
mikael@10425 645
duke@0 646 // Note: In this VM implementation, a field address is always a short
duke@0 647 // offset from the base of a a klass metaobject. Thus, the full dynamic
duke@0 648 // range of the return type is never used. However, some implementations
duke@0 649 // might put the static field inside an array shared by many classes,
duke@0 650 // or even at a fixed address, in which case the address could be quite
duke@0 651 // large. In that last case, this function would return NULL, since
duke@0 652 // the address would operate alone, without any base pointer.
duke@0 653
duke@0 654 oop reflected = JNIHandles::resolve_non_null(field);
duke@0 655 oop mirror = java_lang_reflect_Field::clazz(reflected);
duke@0 656 int modifiers = java_lang_reflect_Field::modifiers(reflected);
duke@0 657
duke@0 658 if ((modifiers & JVM_ACC_STATIC) == 0) {
duke@0 659 THROW_0(vmSymbols::java_lang_IllegalArgumentException());
duke@0 660 }
duke@0 661
never@2223 662 return JNIHandles::make_local(env, mirror);
mikael@10425 663 } UNSAFE_END
duke@0 664
mikael@10425 665 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
mikael@10425 666 assert(clazz != NULL, "clazz must not be NULL");
mikael@10425 667
duke@0 668 oop mirror = JNIHandles::resolve_non_null(clazz);
twisti@3534 669
coleenp@3602 670 Klass* klass = java_lang_Class::as_Klass(mirror);
hseigel@3843 671 if (klass != NULL && klass->should_be_initialized()) {
coleenp@3602 672 InstanceKlass* k = InstanceKlass::cast(klass);
duke@0 673 k->initialize(CHECK);
duke@0 674 }
twisti@3534 675 }
twisti@3534 676 UNSAFE_END
twisti@3534 677
mikael@10425 678 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
mikael@10425 679 assert(clazz != NULL, "clazz must not be NULL");
mikael@10425 680
twisti@3534 681 oop mirror = JNIHandles::resolve_non_null(clazz);
coleenp@3602 682 Klass* klass = java_lang_Class::as_Klass(mirror);
mikael@10425 683
hseigel@3843 684 if (klass != NULL && klass->should_be_initialized()) {
twisti@3534 685 return true;
twisti@3534 686 }
mikael@10425 687
twisti@3534 688 return false;
twisti@3534 689 }
duke@0 690 UNSAFE_END
duke@0 691
mikael@10425 692 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
mikael@10425 693 assert(clazz != NULL, "clazz must not be NULL");
mikael@10425 694
mikael@10425 695 oop mirror = JNIHandles::resolve_non_null(clazz);
mikael@10425 696 Klass* k = java_lang_Class::as_Klass(mirror);
mikael@10425 697
coleenp@9266 698 if (k == NULL || !k->is_array_klass()) {
duke@0 699 THROW(vmSymbols::java_lang_InvalidClassException());
coleenp@9266 700 } else if (k->is_objArray_klass()) {
duke@0 701 base = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
coleenp@113 702 scale = heapOopSize;
coleenp@9266 703 } else if (k->is_typeArray_klass()) {
coleenp@3707 704 TypeArrayKlass* tak = TypeArrayKlass::cast(k);
duke@0 705 base = tak->array_header_in_bytes();
duke@0 706 assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
duke@0 707 scale = (1 << tak->log2_element_size());
duke@0 708 } else {
duke@0 709 ShouldNotReachHere();
duke@0 710 }
duke@0 711 }
duke@0 712
mikael@10425 713 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
goetz@9244 714 int base = 0, scale = 0;
mikael@10425 715 getBaseAndScale(base, scale, clazz, CHECK_0);
mikael@10425 716
duke@0 717 return field_offset_from_byte_offset(base);
mikael@10425 718 } UNSAFE_END
duke@0 719
duke@0 720
mikael@10425 721 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
goetz@9244 722 int base = 0, scale = 0;
mikael@10425 723 getBaseAndScale(base, scale, clazz, CHECK_0);
mikael@10425 724
duke@0 725 // This VM packs both fields and array elements down to the byte.
duke@0 726 // But watch out: If this changes, so that array references for
duke@0 727 // a given primitive type (say, T_BOOLEAN) use different memory units
duke@0 728 // than fields, this method MUST return zero for such arrays.
duke@0 729 // For example, the VM used to store sub-word sized fields in full
duke@0 730 // words in the object layout, so that accessors like getByte(Object,int)
duke@0 731 // did not really do what one might expect for arrays. Therefore,
duke@0 732 // this function used to report a zero scale factor, so that the user
duke@0 733 // would know not to attempt to access sub-word array elements.
duke@0 734 // // Code for unpacked fields:
duke@0 735 // if (scale < wordSize) return 0;
duke@0 736
duke@0 737 // The following allows for a pretty general fieldOffset cookie scheme,
duke@0 738 // but requires it to be linear in byte offset.
duke@0 739 return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
mikael@10425 740 } UNSAFE_END
duke@0 741
duke@0 742
duke@0 743 static inline void throw_new(JNIEnv *env, const char *ename) {
redestad@13212 744 jclass cls = env->FindClass(ename);
ccheung@5924 745 if (env->ExceptionCheck()) {
ccheung@5924 746 env->ExceptionClear();
redestad@13212 747 tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
ccheung@5924 748 return;
ccheung@5924 749 }
mikael@10425 750
mikael@10425 751 env->ThrowNew(cls, NULL);
duke@0 752 }
duke@0 753
twisti@4431 754 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
mikael@10425 755 // Code lifted from JDK 1.3 ClassLoader.c
duke@0 756
mikael@10425 757 jbyte *body;
mikael@10425 758 char *utfName = NULL;
mikael@10425 759 jclass result = 0;
mikael@10425 760 char buf[128];
duke@0 761
mikael@10425 762 assert(data != NULL, "Class bytes must not be NULL");
mikael@10425 763 assert(length >= 0, "length must not be negative: %d", length);
mikael@10425 764
mikael@10425 765 if (UsePerfData) {
mikael@10425 766 ClassLoader::unsafe_defineClassCallCounter()->inc();
mikael@10425 767 }
mikael@10425 768
mikael@10425 769 body = NEW_C_HEAP_ARRAY(jbyte, length, mtInternal);
mikael@10425 770 if (body == NULL) {
redestad@13212 771 throw_new(env, "java/lang/OutOfMemoryError");
mikael@10425 772 return 0;
mikael@10425 773 }
mikael@10425 774
mikael@10425 775 env->GetByteArrayRegion(data, offset, length, body);
mikael@10425 776 if (env->ExceptionOccurred()) {
mikael@10425 777 goto free_body;
mikael@10425 778 }
mikael@10425 779
mikael@10425 780 if (name != NULL) {
mikael@10425 781 uint len = env->GetStringUTFLength(name);
mikael@10425 782 int unicode_len = env->GetStringLength(name);
mikael@10425 783
mikael@10425 784 if (len >= sizeof(buf)) {
mikael@10425 785 utfName = NEW_C_HEAP_ARRAY(char, len + 1, mtInternal);
mikael@10425 786 if (utfName == NULL) {
redestad@13212 787 throw_new(env, "java/lang/OutOfMemoryError");
mikael@10425 788 goto free_body;
mikael@10425 789 }
mikael@10425 790 } else {
mikael@10425 791 utfName = buf;
duke@0 792 }
duke@0 793
mikael@10425 794 env->GetStringUTFRegion(name, 0, unicode_len, utfName);
mikael@10425 795
mikael@10425 796 for (uint i = 0; i < len; i++) {
mikael@10425 797 if (utfName[i] == '.') utfName[i] = '/';
duke@0 798 }
mikael@10425 799 }
duke@0 800
mikael@10425 801 result = JVM_DefineClass(env, utfName, loader, body, length, pd);
duke@0 802
mikael@10425 803 if (utfName && utfName != buf) {
mikael@10425 804 FREE_C_HEAP_ARRAY(char, utfName);
mikael@10425 805 }
duke@0 806
duke@0 807 free_body:
mikael@10425 808 FREE_C_HEAP_ARRAY(jbyte, body);
mikael@10425 809 return result;
duke@0 810 }
duke@0 811
duke@0 812
mikael@10425 813 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
mikael@10425 814 ThreadToNativeFromVM ttnfv(thread);
mikael@10425 815
mikael@10425 816 return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
mikael@10425 817 } UNSAFE_END
twisti@4431 818
twisti@4431 819
jrose@431 820 // define a class but do not make it known to the class loader or system dictionary
jrose@431 821 // - host_class: supplies context for linkage, access control, protection domain, and class loader
hseigel@11946 822 // if host_class is itself anonymous then it is replaced with its host class.
jrose@431 823 // - data: bytes of a class file, a raw memory address (length gives the number of bytes)
jrose@431 824 // - cp_patches: where non-null entries exist, they replace corresponding CP entries in data
jrose@431 825
jrose@431 826 // When you load an anonymous class U, it works as if you changed its name just before loading,
jrose@431 827 // to a name that you will never use again. Since the name is lost, no other class can directly
jrose@431 828 // link to any member of U. Just after U is loaded, the only way to use it is reflectively,
jrose@431 829 // through java.lang.Class methods like Class.newInstance.
jrose@431 830
hseigel@11946 831 // The package of an anonymous class must either match its host's class's package or be in the
hseigel@11946 832 // unnamed package. If it is in the unnamed package then it will be put in its host class's
hseigel@11946 833 // package.
hseigel@11946 834 //
hseigel@11946 835
jrose@431 836 // Access checks for linkage sites within U continue to follow the same rules as for named classes.
jrose@431 837 // An anonymous class also has special privileges to access any member of its host class.
jrose@431 838 // This is the main reason why this loading operation is unsafe. The purpose of this is to
jrose@431 839 // allow language implementations to simulate "open classes"; a host class in effect gets
jrose@431 840 // new code when an anonymous class is loaded alongside it. A less convenient but more
jrose@431 841 // standard way to do this is with reflection, which can also be set to ignore access
jrose@431 842 // restrictions.
jrose@431 843
jrose@431 844 // Access into an anonymous class is possible only through reflection. Therefore, there
jrose@431 845 // are no special access rules for calling into an anonymous class. The relaxed access
jrose@431 846 // rule for the host class is applied in the opposite direction: A host class reflectively
jrose@431 847 // access one of its anonymous classes.
jrose@431 848
jrose@431 849 // If you load the same bytecodes twice, you get two different classes. You can reload
jrose@431 850 // the same bytecodes with or without varying CP patches.
jrose@431 851
jrose@431 852 // By using the CP patching array, you can have a new anonymous class U2 refer to an older one U1.
jrose@431 853 // The bytecodes for U2 should refer to U1 by a symbolic name (doesn't matter what the name is).
jrose@431 854 // The CONSTANT_Class entry for that name can be patched to refer directly to U1.
jrose@431 855
jrose@431 856 // This allows, for example, U2 to use U1 as a superclass or super-interface, or as
jrose@431 857 // an outer class (so that U2 is an anonymous inner class of anonymous U1).
jrose@431 858 // It is not possible for a named class, or an older anonymous class, to refer by
jrose@431 859 // name (via its CP) to a newer anonymous class.
jrose@431 860
jrose@431 861 // CP patching may also be used to modify (i.e., hack) the names of methods, classes,
jrose@431 862 // or type descriptors used in the loaded anonymous class.
jrose@431 863
jrose@431 864 // Finally, CP patching may be used to introduce "live" objects into the constant pool,
jrose@431 865 // instead of "dead" strings. A compiled statement like println((Object)"hello") can
jrose@431 866 // be changed to println(greeting), where greeting is an arbitrary object created before
jrose@431 867 // the anonymous class is loaded. This is useful in dynamic languages, in which
jrose@431 868 // various kinds of metaobjects must be introduced as constants into bytecode.
jrose@431 869 // Note the cast (Object), which tells the verifier to expect an arbitrary object,
jrose@431 870 // not just a literal string. For such ldc instructions, the verifier uses the
jrose@431 871 // type Object instead of String, if the loaded constant is not in fact a String.
jrose@431 872
coleenp@12742 873 static InstanceKlass*
jrose@431 874 Unsafe_DefineAnonymousClass_impl(JNIEnv *env,
jrose@431 875 jclass host_class, jbyteArray data, jobjectArray cp_patches_jh,
mikael@10425 876 u1** temp_alloc,
jrose@431 877 TRAPS) {
mikael@10425 878 assert(host_class != NULL, "host_class must not be NULL");
mikael@10425 879 assert(data != NULL, "data must not be NULL");
jrose@431 880
jrose@431 881 if (UsePerfData) {
jrose@431 882 ClassLoader::unsafe_defineClassCallCounter()->inc();
jrose@431 883 }
jrose@431 884
mikael@10425 885 jint length = typeArrayOop(JNIHandles::resolve_non_null(data))->length();
mikael@10425 886 assert(length >= 0, "class_bytes_length must not be negative: %d", length);
jrose@431 887
mikael@10425 888 int class_bytes_length = (int) length;
mikael@10425 889
mikael@10425 890 u1* class_bytes = NEW_C_HEAP_ARRAY(u1, length, mtInternal);
mikael@10425 891 if (class_bytes == NULL) {
jrose@431 892 THROW_0(vmSymbols::java_lang_OutOfMemoryError());
jrose@431 893 }
jrose@431 894
jrose@431 895 // caller responsible to free it:
mikael@10425 896 *temp_alloc = class_bytes;
jrose@431 897
mikael@10425 898 jbyte* array_base = typeArrayOop(JNIHandles::resolve_non_null(data))->byte_at_addr(0);
mikael@10425 899 Copy::conjoint_jbytes(array_base, class_bytes, length);
jrose@431 900
jrose@431 901 objArrayHandle cp_patches_h;
jrose@431 902 if (cp_patches_jh != NULL) {
jrose@431 903 oop p = JNIHandles::resolve_non_null(cp_patches_jh);
mikael@10425 904 assert(p->is_objArray(), "cp_patches must be an object[]");
jrose@431 905 cp_patches_h = objArrayHandle(THREAD, (objArrayOop)p);
jrose@431 906 }
jrose@431 907
mgronlun@9684 908 const Klass* host_klass = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(host_class));
hseigel@11819 909
hseigel@11819 910 // Make sure it's the real host class, not another anonymous class.
hseigel@11819 911 while (host_klass != NULL && host_klass->is_instance_klass() &&
hseigel@11819 912 InstanceKlass::cast(host_klass)->is_anonymous()) {
hseigel@11819 913 host_klass = InstanceKlass::cast(host_klass)->host_klass();
hseigel@11819 914 }
hseigel@11819 915
hseigel@10698 916 // Primitive types have NULL Klass* fields in their java.lang.Class instances.
hseigel@10698 917 if (host_klass == NULL) {
hseigel@11946 918 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), "Host class is null");
hseigel@10698 919 }
mgronlun@9684 920
hseigel@11946 921 assert(host_klass->is_instance_klass(), "Host class must be an instance class");
hseigel@11946 922
jrose@431 923 const char* host_source = host_klass->external_name();
jrose@431 924 Handle host_loader(THREAD, host_klass->class_loader());
jrose@431 925 Handle host_domain(THREAD, host_klass->protection_domain());
jrose@431 926
jrose@431 927 GrowableArray<Handle>* cp_patches = NULL;
mikael@10425 928
jrose@431 929 if (cp_patches_h.not_null()) {
jrose@431 930 int alen = cp_patches_h->length();
mikael@10425 931
jrose@431 932 for (int i = alen-1; i >= 0; i--) {
jrose@431 933 oop p = cp_patches_h->obj_at(i);
jrose@431 934 if (p != NULL) {
jrose@431 935 Handle patch(THREAD, p);
mikael@10425 936
mikael@10425 937 if (cp_patches == NULL) {
jrose@431 938 cp_patches = new GrowableArray<Handle>(i+1, i+1, Handle());
mikael@10425 939 }
mikael@10425 940
jrose@431 941 cp_patches->at_put(i, patch);
jrose@431 942 }
jrose@431 943 }
jrose@431 944 }
jrose@431 945
mikael@10425 946 ClassFileStream st(class_bytes, class_bytes_length, host_source, ClassFileStream::verify);
mikael@10425 947
mikael@10425 948 Symbol* no_class_name = NULL;
mikael@10425 949 Klass* anonk = SystemDictionary::parse_stream(no_class_name,
mikael@10425 950 host_loader,
mikael@10425 951 host_domain,
mikael@10425 952 &st,
hseigel@11946 953 InstanceKlass::cast(host_klass),
mikael@10425 954 cp_patches,
mikael@10425 955 CHECK_NULL);
mikael@10425 956 if (anonk == NULL) {
mikael@10425 957 return NULL;
mikael@10425 958 }
mikael@10425 959
coleenp@12742 960 return InstanceKlass::cast(anonk);
mikael@10425 961 }
mikael@10425 962
mikael@10425 963 UNSAFE_ENTRY(jclass, Unsafe_DefineAnonymousClass0(JNIEnv *env, jobject unsafe, jclass host_class, jbyteArray data, jobjectArray cp_patches_jh)) {
mikael@10425 964 ResourceMark rm(THREAD);
jrose@431 965
mikael@10425 966 jobject res_jh = NULL;
mikael@10425 967 u1* temp_alloc = NULL;
mikael@10425 968
coleenp@12742 969 InstanceKlass* anon_klass = Unsafe_DefineAnonymousClass_impl(env, host_class, data, cp_patches_jh, &temp_alloc, THREAD);
coleenp@12742 970 if (anon_klass != NULL) {
mikael@10425 971 res_jh = JNIHandles::make_local(env, anon_klass->java_mirror());
jrose@431 972 }
jrose@431 973
jrose@431 974 // try/finally clause:
jrose@431 975 if (temp_alloc != NULL) {
mikael@10425 976 FREE_C_HEAP_ARRAY(u1, temp_alloc);
jrose@431 977 }
jrose@431 978
coleenp@3869 979 // The anonymous class loader data has been artificially been kept alive to
coleenp@3869 980 // this point. The mirror and any instances of this class have to keep
coleenp@3869 981 // it alive afterwards.
coleenp@12742 982 if (anon_klass != NULL) {
lfoltan@10960 983 anon_klass->class_loader_data()->dec_keep_alive();
coleenp@3869 984 }
coleenp@3869 985
coleenp@3869 986 // let caller initialize it as needed...
coleenp@3869 987
jrose@431 988 return (jclass) res_jh;
mikael@10425 989 } UNSAFE_END
jrose@431 990
jrose@431 991
duke@0 992
mikael@10425 993 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
mikael@10425 994 ThreadToNativeFromVM ttnfv(thread);
mikael@10425 995 env->Throw(thr);
mikael@10425 996 } UNSAFE_END
duke@0 997
duke@0 998 // JSR166 ------------------------------------------------------------------
duke@0 999
mikael@10425 1000 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
shade@10328 1001 oop x = JNIHandles::resolve(x_h);
shade@10328 1002 oop e = JNIHandles::resolve(e_h);
shade@10328 1003 oop p = JNIHandles::resolve(obj);
shade@10328 1004 HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
shade@10328 1005 oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
mikael@10425 1006 if (res == e) {
shade@10328 1007 update_barrier_set((void*)addr, x);
mikael@10425 1008 }
shade@10328 1009 return JNIHandles::make_local(env, res);
mikael@10425 1010 } UNSAFE_END
shade@10328 1011
mikael@10425 1012 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
shade@10328 1013 oop p = JNIHandles::resolve(obj);
shade@10328 1014 jint* addr = (jint *) index_oop_from_field_offset_long(p, offset);
mikael@10425 1015
shade@10328 1016 return (jint)(Atomic::cmpxchg(x, addr, e));
mikael@10425 1017 } UNSAFE_END
shade@10328 1018
mikael@10425 1019 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
mikael@11157 1020 Handle p(THREAD, JNIHandles::resolve(obj));
mikael@11157 1021 jlong* addr = (jlong*)index_oop_from_field_offset_long(p(), offset);
mikael@10425 1022
shade@10328 1023 #ifdef SUPPORTS_NATIVE_CX8
shade@10328 1024 return (jlong)(Atomic::cmpxchg(x, addr, e));
shade@10328 1025 #else
mikael@10425 1026 if (VM_Version::supports_cx8()) {
shade@10328 1027 return (jlong)(Atomic::cmpxchg(x, addr, e));
mikael@10425 1028 } else {
shade@10328 1029 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
mikael@10425 1030
shade@10328 1031 jlong val = Atomic::load(addr);
mikael@10425 1032 if (val == e) {
shade@10328 1033 Atomic::store(x, addr);
mikael@10425 1034 }
shade@10328 1035 return val;
shade@10328 1036 }
shade@10328 1037 #endif
mikael@10425 1038 } UNSAFE_END
shade@10328 1039
psandoz@12991 1040 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
duke@0 1041 oop x = JNIHandles::resolve(x_h);
duke@0 1042 oop e = JNIHandles::resolve(e_h);
duke@0 1043 oop p = JNIHandles::resolve(obj);
coleenp@113 1044 HeapWord* addr = (HeapWord *)index_oop_from_field_offset_long(p, offset);
coleenp@3602 1045 oop res = oopDesc::atomic_compare_exchange_oop(x, addr, e, true);
mikael@10425 1046 if (res != e) {
mikael@10425 1047 return false;
mikael@10425 1048 }
duke@0 1049
mikael@10425 1050 update_barrier_set((void*)addr, x);
mikael@10425 1051
mikael@10425 1052 return true;
mikael@10425 1053 } UNSAFE_END
mikael@10425 1054
psandoz@12991 1055 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
duke@0 1056 oop p = JNIHandles::resolve(obj);
mikael@11157 1057 jint* addr = (jint *)index_oop_from_field_offset_long(p, offset);
mikael@10425 1058
duke@0 1059 return (jint)(Atomic::cmpxchg(x, addr, e)) == e;
mikael@10425 1060 } UNSAFE_END
duke@0 1061
psandoz@12991 1062 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
mikael@10425 1063 Handle p(THREAD, JNIHandles::resolve(obj));
mikael@10425 1064 jlong* addr = (jlong*)index_oop_from_field_offset_long(p(), offset);
mikael@10425 1065
dholmes@7456 1066 #ifdef SUPPORTS_NATIVE_CX8
dholmes@7456 1067 return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
dholmes@7456 1068 #else
mikael@10425 1069 if (VM_Version::supports_cx8()) {
duke@0 1070 return (jlong)(Atomic::cmpxchg(x, addr, e)) == e;
mikael@10425 1071 } else {
dholmes@7456 1072 MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
mikael@10425 1073
dholmes@7456 1074 jlong val = Atomic::load(addr);
mikael@10425 1075 if (val != e) {
mikael@10425 1076 return false;
mikael@10425 1077 }
mikael@10425 1078
mikael@10425 1079 Atomic::store(x, addr);
mikael@10425 1080 return true;
duke@0 1081 }
dholmes@7456 1082 #endif
mikael@10425 1083 } UNSAFE_END
duke@0 1084
mikael@10425 1085 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
sla@4802 1086 EventThreadPark event;
sla@5934 1087 HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
sla@5934 1088
duke@0 1089 JavaThreadParkedState jtps(thread, time != 0);
duke@0 1090 thread->parker()->park(isAbsolute != 0, time);
sla@5934 1091
sla@5934 1092 HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
mikael@10425 1093
sla@4802 1094 if (event.should_commit()) {
sla@4802 1095 oop obj = thread->current_park_blocker();
egahlin@11866 1096 event.set_parkedClass((obj != NULL) ? obj->klass() : NULL);
sla@4802 1097 event.set_timeout(time);
hseigel@5349 1098 event.set_address((obj != NULL) ? (TYPE_ADDRESS) cast_from_oop<uintptr_t>(obj) : 0);
sla@4802 1099 event.commit();
sla@4802 1100 }
mikael@10425 1101 } UNSAFE_END
duke@0 1102
mikael@10425 1103 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
duke@0 1104 Parker* p = NULL;
mikael@10425 1105
duke@0 1106 if (jthread != NULL) {
duke@0 1107 oop java_thread = JNIHandles::resolve_non_null(jthread);
duke@0 1108 if (java_thread != NULL) {
duke@0 1109 jlong lp = java_lang_Thread::park_event(java_thread);
duke@0 1110 if (lp != 0) {
duke@0 1111 // This cast is OK even though the jlong might have been read
duke@0 1112 // non-atomically on 32bit systems, since there, one word will
duke@0 1113 // always be zero anyway and the value set is always the same
duke@0 1114 p = (Parker*)addr_from_java(lp);
duke@0 1115 } else {
duke@0 1116 // Grab lock if apparently null or using older version of library
duke@0 1117 MutexLocker mu(Threads_lock);
duke@0 1118 java_thread = JNIHandles::resolve_non_null(jthread);
mikael@10425 1119
duke@0 1120 if (java_thread != NULL) {
duke@0 1121 JavaThread* thr = java_lang_Thread::thread(java_thread);
duke@0 1122 if (thr != NULL) {
duke@0 1123 p = thr->parker();
duke@0 1124 if (p != NULL) { // Bind to Java thread for next time.
duke@0 1125 java_lang_Thread::set_park_event(java_thread, addr_to_java(p));
duke@0 1126 }
duke@0 1127 }
duke@0 1128 }
duke@0 1129 }
duke@0 1130 }
duke@0 1131 }
mikael@10425 1132
duke@0 1133 if (p != NULL) {
sla@5934 1134 HOTSPOT_THREAD_UNPARK((uintptr_t) p);
duke@0 1135 p->unpark();
duke@0 1136 }
mikael@10425 1137 } UNSAFE_END
duke@0 1138
mikael@10425 1139 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
duke@0 1140 const int max_nelem = 3;
duke@0 1141 double la[max_nelem];
duke@0 1142 jint ret;
duke@0 1143
duke@0 1144 typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
duke@0 1145 assert(a->is_typeArray(), "must be type array");
duke@0 1146
mikael@10425 1147 ret = os::loadavg(la, nelem);
mikael@10425 1148 if (ret == -1) {
duke@0 1149 return -1;
duke@0 1150 }
duke@0 1151
duke@0 1152 // if successful, ret is the number of samples actually retrieved.
duke@0 1153 assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
duke@0 1154 switch(ret) {
duke@0 1155 case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
duke@0 1156 case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
duke@0 1157 case 1: a->double_at_put(0, (jdouble)la[0]); break;
duke@0 1158 }
mikael@10425 1159
duke@0 1160 return ret;
mikael@10425 1161 } UNSAFE_END
duke@0 1162
duke@0 1163
duke@0 1164 /// JVM_RegisterUnsafeMethods
duke@0 1165
duke@0 1166 #define ADR "J"
duke@0 1167
duke@0 1168 #define LANG "Ljava/lang/"
duke@0 1169
bpittore@8638 1170 #define OBJ LANG "Object;"
bpittore@8638 1171 #define CLS LANG "Class;"
bpittore@8638 1172 #define FLD LANG "reflect/Field;"
bpittore@8638 1173 #define THR LANG "Throwable;"
duke@0 1174
bpittore@8638 1175 #define DC_Args LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
bpittore@8638 1176 #define DAC_Args CLS "[B[" OBJ
duke@0 1177
duke@0 1178 #define CC (char*) /*cast a literal from (const char*)*/
duke@0 1179 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
duke@0 1180
mikael@10425 1181 #define DECLARE_GETPUTOOP(Type, Desc) \
mikael@10425 1182 {CC "get" #Type, CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type)}, \
mikael@11157 1183 {CC "put" #Type, CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type)}, \
mikael@10425 1184 {CC "get" #Type "Volatile", CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type##Volatile)}, \
mikael@11157 1185 {CC "put" #Type "Volatile", CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type##Volatile)}
duke@0 1186
duke@0 1187
psandoz@9769 1188 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
bpittore@8638 1189 {CC "getObject", CC "(" OBJ "J)" OBJ "", FN_PTR(Unsafe_GetObject)},
mikael@11157 1190 {CC "putObject", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutObject)},
bpittore@8638 1191 {CC "getObjectVolatile",CC "(" OBJ "J)" OBJ "", FN_PTR(Unsafe_GetObjectVolatile)},
mikael@11157 1192 {CC "putObjectVolatile",CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutObjectVolatile)},
duke@0 1193
bpittore@8638 1194 {CC "getUncompressedObject", CC "(" ADR ")" OBJ, FN_PTR(Unsafe_GetUncompressedObject)},
twisti@8273 1195
psandoz@7841 1196 DECLARE_GETPUTOOP(Boolean, Z),
psandoz@7841 1197 DECLARE_GETPUTOOP(Byte, B),
psandoz@7841 1198 DECLARE_GETPUTOOP(Short, S),
psandoz@7841 1199 DECLARE_GETPUTOOP(Char, C),
psandoz@7841 1200 DECLARE_GETPUTOOP(Int, I),
psandoz@7841 1201 DECLARE_GETPUTOOP(Long, J),
psandoz@7841 1202 DECLARE_GETPUTOOP(Float, F),
psandoz@7841 1203 DECLARE_GETPUTOOP(Double, D),
duke@0 1204
mikael@10425 1205 {CC "allocateMemory0", CC "(J)" ADR, FN_PTR(Unsafe_AllocateMemory0)},
mikael@10425 1206 {CC "reallocateMemory0", CC "(" ADR "J)" ADR, FN_PTR(Unsafe_ReallocateMemory0)},
mikael@10425 1207 {CC "freeMemory0", CC "(" ADR ")V", FN_PTR(Unsafe_FreeMemory0)},
twisti@4431 1208
mikael@10425 1209 {CC "objectFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_ObjectFieldOffset0)},
redestad@13212 1210 {CC "objectFieldOffset1", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_ObjectFieldOffset1)},
mikael@10425 1211 {CC "staticFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_StaticFieldOffset0)},
mikael@10425 1212 {CC "staticFieldBase0", CC "(" FLD ")" OBJ, FN_PTR(Unsafe_StaticFieldBase0)},
mikael@10425 1213 {CC "ensureClassInitialized0", CC "(" CLS ")V", FN_PTR(Unsafe_EnsureClassInitialized0)},
mikael@10425 1214 {CC "arrayBaseOffset0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayBaseOffset0)},
mikael@10425 1215 {CC "arrayIndexScale0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayIndexScale0)},
mikael@10425 1216 {CC "addressSize0", CC "()I", FN_PTR(Unsafe_AddressSize0)},
bpittore@8638 1217 {CC "pageSize", CC "()I", FN_PTR(Unsafe_PageSize)},
twisti@4431 1218
mikael@10425 1219 {CC "defineClass0", CC "(" DC_Args ")" CLS, FN_PTR(Unsafe_DefineClass0)},
bpittore@8638 1220 {CC "allocateInstance", CC "(" CLS ")" OBJ, FN_PTR(Unsafe_AllocateInstance)},
bpittore@8638 1221 {CC "throwException", CC "(" THR ")V", FN_PTR(Unsafe_ThrowException)},
psandoz@12991 1222 {CC "compareAndSetObject",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetObject)},
psandoz@12991 1223 {CC "compareAndSetInt", CC "(" OBJ "J""I""I"")Z", FN_PTR(Unsafe_CompareAndSetInt)},
psandoz@12991 1224 {CC "compareAndSetLong", CC "(" OBJ "J""J""J"")Z", FN_PTR(Unsafe_CompareAndSetLong)},
psandoz@12991 1225 {CC "compareAndExchangeObject", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeObject)},
psandoz@12991 1226 {CC "compareAndExchangeInt", CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
psandoz@12991 1227 {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
shade@10328 1228
bpittore@8638 1229 {CC "park", CC "(ZJ)V", FN_PTR(Unsafe_Park)},
bpittore@8638 1230 {CC "unpark", CC "(" OBJ ")V", FN_PTR(Unsafe_Unpark)},
duke@0 1231
mikael@10425 1232 {CC "getLoadAverage0", CC "([DI)I", FN_PTR(Unsafe_GetLoadAverage0)},
duke@0 1233
mikael@10425 1234 {CC "copyMemory0", CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
mikael@10248 1235 {CC "copySwapMemory0", CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
mikael@10425 1236 {CC "setMemory0", CC "(" OBJ "JJB)V", FN_PTR(Unsafe_SetMemory0)},
duke@0 1237
mikael@10425 1238 {CC "defineAnonymousClass0", CC "(" DAC_Args ")" CLS, FN_PTR(Unsafe_DefineAnonymousClass0)},
duke@0 1239
mikael@10425 1240 {CC "shouldBeInitialized0", CC "(" CLS ")Z", FN_PTR(Unsafe_ShouldBeInitialized0)},
duke@0 1241
bpittore@8638 1242 {CC "loadFence", CC "()V", FN_PTR(Unsafe_LoadFence)},
bpittore@8638 1243 {CC "storeFence", CC "()V", FN_PTR(Unsafe_StoreFence)},
bpittore@8638 1244 {CC "fullFence", CC "()V", FN_PTR(Unsafe_FullFence)},
aph@8194 1245
bpittore@8638 1246 {CC "isBigEndian0", CC "()Z", FN_PTR(Unsafe_isBigEndian0)},
bpittore@8638 1247 {CC "unalignedAccess0", CC "()Z", FN_PTR(Unsafe_unalignedAccess0)}
twisti@4431 1248 };
twisti@4431 1249
duke@0 1250 #undef CC
duke@0 1251 #undef FN_PTR
duke@0 1252
duke@0 1253 #undef ADR
duke@0 1254 #undef LANG
duke@0 1255 #undef OBJ
duke@0 1256 #undef CLS
duke@0 1257 #undef FLD
duke@0 1258 #undef THR
twisti@4431 1259 #undef DC_Args
psandoz@7841 1260 #undef DAC_Args
duke@0 1261
psandoz@7841 1262 #undef DECLARE_GETPUTOOP
twisti@4431 1263
twisti@4431 1264
mikael@10425 1265 // This function is exported, used by NativeLookup.
duke@0 1266 // The Unsafe_xxx functions above are called only from the interpreter.
duke@0 1267 // The optimizer looks at names and signatures to recognize
duke@0 1268 // individual functions.
duke@0 1269
mikael@10425 1270 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
mikael@10425 1271 ThreadToNativeFromVM ttnfv(thread);
twisti@4431 1272
mikael@10425 1273 int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
mikael@10425 1274 guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
mikael@10425 1275 } JVM_END