annotate src/cpu/s390/vm/methodHandles_s390.cpp @ 12547:0c6ed760800c

8172049: [s390] Implement "JEP 270: Reserved Stack Areas for Critical Sections". Reviewed-by: mdoerr
author goetz
date Tue, 27 Dec 2016 16:10:59 +0100
parents
children d342027d6321
rev   line source
goetz@12256 1 /*
goetz@12256 2 * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
goetz@12256 3 * Copyright (c) 2016 SAP SE. All rights reserved.
goetz@12256 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
goetz@12256 5 *
goetz@12256 6 * This code is free software; you can redistribute it and/or modify it
goetz@12256 7 * under the terms of the GNU General Public License version 2 only, as
goetz@12256 8 * published by the Free Software Foundation.
goetz@12256 9 *
goetz@12256 10 * This code is distributed in the hope that it will be useful, but WITHOUT
goetz@12256 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
goetz@12256 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
goetz@12256 13 * version 2 for more details (a copy is included in the LICENSE file that
goetz@12256 14 * accompanied this code).
goetz@12256 15 *
goetz@12256 16 * You should have received a copy of the GNU General Public License version
goetz@12256 17 * 2 along with this work; if not, write to the Free Software Foundation,
goetz@12256 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
goetz@12256 19 *
goetz@12256 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
goetz@12256 21 * or visit www.oracle.com if you need additional information or have any
goetz@12256 22 * questions.
goetz@12256 23 *
goetz@12256 24 */
goetz@12256 25
goetz@12256 26 #include "precompiled.hpp"
goetz@12256 27 #include "asm/macroAssembler.inline.hpp"
goetz@12256 28 #include "classfile/javaClasses.inline.hpp"
goetz@12256 29 #include "interpreter/interpreter.hpp"
goetz@12256 30 #include "memory/allocation.inline.hpp"
goetz@12256 31 #include "memory/resourceArea.hpp"
goetz@12256 32 #include "prims/methodHandles.hpp"
goetz@12256 33
goetz@12256 34 #ifdef PRODUCT
goetz@12256 35 #define __ _masm->
goetz@12256 36 #define BLOCK_COMMENT(str) /* nothing */
goetz@12256 37 #else
goetz@12256 38 #define __ (Verbose ? (_masm->block_comment(FILE_AND_LINE),_masm):_masm)->
goetz@12256 39 #define BLOCK_COMMENT(str) __ block_comment(str)
goetz@12256 40 #endif
goetz@12256 41
goetz@12256 42 #define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
goetz@12256 43
goetz@12256 44 // Workaround for C++ overloading nastiness on '0' for RegisterOrConstant.
goetz@12256 45 static RegisterOrConstant constant(int value) {
goetz@12256 46 return RegisterOrConstant(value);
goetz@12256 47 }
goetz@12256 48
goetz@12256 49 void MethodHandles::load_klass_from_Class(MacroAssembler* _masm, Register klass_reg,
goetz@12256 50 Register temp_reg, Register temp2_reg) {
goetz@12256 51 if (VerifyMethodHandles) {
goetz@12256 52 verify_klass(_masm, klass_reg, SystemDictionary::WK_KLASS_ENUM_NAME(java_lang_Class),
goetz@12256 53 temp_reg, temp2_reg, "MH argument is a Class");
goetz@12256 54 }
goetz@12256 55 __ z_lg(klass_reg, Address(klass_reg, java_lang_Class::klass_offset_in_bytes()));
goetz@12256 56 }
goetz@12256 57
goetz@12256 58
goetz@12256 59 #ifdef ASSERT
goetz@12256 60 static int check_nonzero(const char* xname, int x) {
goetz@12256 61 assert(x != 0, "%s should be nonzero", xname);
goetz@12256 62 return x;
goetz@12256 63 }
goetz@12256 64 #define NONZERO(x) check_nonzero(#x, x)
goetz@12256 65 #else
goetz@12256 66 #define NONZERO(x) (x)
goetz@12256 67 #endif
goetz@12256 68
goetz@12256 69 #ifdef ASSERT
goetz@12256 70 void MethodHandles::verify_klass(MacroAssembler* _masm,
goetz@12256 71 Register obj_reg, SystemDictionary::WKID klass_id,
goetz@12256 72 Register temp_reg, Register temp2_reg,
goetz@12256 73 const char* error_message) {
goetz@12256 74
goetz@12256 75 InstanceKlass** klass_addr = SystemDictionary::well_known_klass_addr(klass_id);
goetz@12256 76 KlassHandle klass = SystemDictionary::well_known_klass(klass_id);
goetz@12256 77
goetz@12256 78 assert(temp_reg != Z_R0 && // Is used as base register!
goetz@12256 79 temp_reg != noreg && temp2_reg != noreg, "need valid registers!");
goetz@12256 80
goetz@12256 81 NearLabel L_ok, L_bad;
goetz@12256 82
goetz@12256 83 BLOCK_COMMENT("verify_klass {");
goetz@12256 84
goetz@12256 85 __ verify_oop(obj_reg);
goetz@12256 86 __ compareU64_and_branch(obj_reg, (intptr_t)0L, Assembler::bcondEqual, L_bad);
goetz@12256 87 __ load_klass(temp_reg, obj_reg);
goetz@12256 88 // klass_addr is a klass in allstatic SystemDictionaryHandles. Can't get GCed.
goetz@12256 89 __ load_const_optimized(temp2_reg, (address)klass_addr);
goetz@12256 90 __ z_lg(temp2_reg, Address(temp2_reg));
goetz@12256 91 __ compareU64_and_branch(temp_reg, temp2_reg, Assembler::bcondEqual, L_ok);
goetz@12256 92
goetz@12256 93 intptr_t super_check_offset = klass->super_check_offset();
goetz@12256 94 __ z_lg(temp_reg, Address(temp_reg, super_check_offset));
goetz@12256 95 __ compareU64_and_branch(temp_reg, temp2_reg, Assembler::bcondEqual, L_ok);
goetz@12256 96 __ BIND(L_bad);
goetz@12256 97 __ stop(error_message);
goetz@12256 98 __ BIND(L_ok);
goetz@12256 99
goetz@12256 100 BLOCK_COMMENT("} verify_klass");
goetz@12256 101 }
goetz@12256 102
goetz@12256 103 void MethodHandles::verify_ref_kind(MacroAssembler* _masm, int ref_kind,
goetz@12256 104 Register member_reg, Register temp ) {
goetz@12256 105 NearLabel L;
goetz@12256 106 BLOCK_COMMENT("verify_ref_kind {");
goetz@12256 107
goetz@12256 108 __ z_llgf(temp,
goetz@12256 109 Address(member_reg,
goetz@12256 110 NONZERO(java_lang_invoke_MemberName::flags_offset_in_bytes())));
goetz@12256 111 __ z_srl(temp, java_lang_invoke_MemberName::MN_REFERENCE_KIND_SHIFT);
goetz@12256 112 __ z_nilf(temp, java_lang_invoke_MemberName::MN_REFERENCE_KIND_MASK);
goetz@12256 113 __ compare32_and_branch(temp, constant(ref_kind), Assembler::bcondEqual, L);
goetz@12256 114
goetz@12256 115 {
goetz@12256 116 char *buf = NEW_C_HEAP_ARRAY(char, 100, mtInternal);
goetz@12256 117
goetz@12256 118 jio_snprintf(buf, 100, "verify_ref_kind expected %x", ref_kind);
goetz@12256 119 if (ref_kind == JVM_REF_invokeVirtual || ref_kind == JVM_REF_invokeSpecial) {
goetz@12256 120 // Could do this for all ref_kinds, but would explode assembly code size.
goetz@12256 121 trace_method_handle(_masm, buf);
goetz@12256 122 }
goetz@12256 123 __ stop(buf);
goetz@12256 124 }
goetz@12256 125
goetz@12256 126 BLOCK_COMMENT("} verify_ref_kind");
goetz@12256 127
goetz@12256 128 __ bind(L);
goetz@12256 129 }
goetz@12256 130 #endif // ASSERT
goetz@12256 131
goetz@12256 132 void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register target,
goetz@12256 133 Register temp, bool for_compiler_entry) {
goetz@12256 134 assert(method == Z_method, "interpreter calling convention");
goetz@12256 135 __ verify_method_ptr(method);
goetz@12256 136
goetz@12256 137 assert(target != method, "don 't you kill the method reg!");
goetz@12256 138
goetz@12256 139 Label L_no_such_method;
goetz@12256 140
goetz@12256 141 if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
goetz@12256 142 // JVMTI events, such as single-stepping, are implemented partly
goetz@12256 143 // by avoiding running compiled code in threads for which the
goetz@12256 144 // event is enabled. Check here for interp_only_mode if these
goetz@12256 145 // events CAN be enabled.
goetz@12256 146 __ verify_thread();
goetz@12256 147
goetz@12256 148 Label run_compiled_code;
goetz@12256 149
goetz@12256 150 __ load_and_test_int(temp, Address(Z_thread, JavaThread::interp_only_mode_offset()));
goetz@12256 151 __ z_bre(run_compiled_code);
goetz@12256 152
goetz@12256 153 // Null method test is replicated below in compiled case,
goetz@12256 154 // it might be able to address across the verify_thread().
goetz@12256 155 __ z_ltgr(temp, method);
goetz@12256 156 __ z_bre(L_no_such_method);
goetz@12256 157
goetz@12256 158 __ z_lg(target, Address(method, Method::interpreter_entry_offset()));
goetz@12256 159 __ z_br(target);
goetz@12256 160
goetz@12256 161 __ bind(run_compiled_code);
goetz@12256 162 }
goetz@12256 163
goetz@12256 164 // Compiled case, either static or fall-through from runtime conditional.
goetz@12256 165 __ z_ltgr(temp, method);
goetz@12256 166 __ z_bre(L_no_such_method);
goetz@12256 167
goetz@12256 168 ByteSize offset = for_compiler_entry ?
goetz@12256 169 Method::from_compiled_offset() : Method::from_interpreted_offset();
goetz@12256 170 Address method_from(method, offset);
goetz@12256 171
goetz@12256 172 __ z_lg(target, method_from);
goetz@12256 173 __ z_br(target);
goetz@12256 174
goetz@12256 175 __ bind(L_no_such_method);
goetz@12256 176 assert(StubRoutines::throw_AbstractMethodError_entry() != NULL, "not yet generated!");
goetz@12256 177 __ load_const_optimized(target, StubRoutines::throw_AbstractMethodError_entry());
goetz@12256 178 __ z_br(target);
goetz@12256 179 }
goetz@12256 180
goetz@12256 181 void MethodHandles::jump_to_lambda_form(MacroAssembler* _masm,
goetz@12256 182 Register recv, Register method_temp,
goetz@12256 183 Register temp2, Register temp3,
goetz@12256 184 bool for_compiler_entry) {
goetz@12256 185
goetz@12256 186 // This is the initial entry point of a lazy method handle.
goetz@12256 187 // After type checking, it picks up the invoker from the LambdaForm.
goetz@12256 188 assert_different_registers(recv, method_temp, temp2, temp3);
goetz@12256 189 assert(method_temp == Z_method, "required register for loading method");
goetz@12256 190
goetz@12256 191 BLOCK_COMMENT("jump_to_lambda_form {");
goetz@12256 192
goetz@12256 193 // Load the invoker, as MH -> MH.form -> LF.vmentry
goetz@12256 194 __ verify_oop(recv);
goetz@12256 195 __ load_heap_oop(method_temp,
goetz@12256 196 Address(recv,
goetz@12256 197 NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())));
goetz@12256 198 __ verify_oop(method_temp);
goetz@12256 199 __ load_heap_oop(method_temp,
goetz@12256 200 Address(method_temp,
goetz@12256 201 NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())));
goetz@12256 202 __ verify_oop(method_temp);
goetz@12256 203 // The following assumes that a method is normally compressed in the vmtarget field.
goetz@12256 204 __ z_lg(method_temp,
goetz@12256 205 Address(method_temp,
goetz@12256 206 NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes())));
goetz@12256 207
goetz@12256 208 if (VerifyMethodHandles && !for_compiler_entry) {
goetz@12256 209 // Make sure recv is already on stack.
goetz@12256 210 NearLabel L;
goetz@12256 211 Address paramSize(temp2, ConstMethod::size_of_parameters_offset());
goetz@12256 212
goetz@12256 213 __ z_lg(temp2, Address(method_temp, Method::const_offset()));
goetz@12256 214 __ load_sized_value(temp2, paramSize, sizeof(u2), /*is_signed*/ false);
goetz@12256 215 // if (temp2 != recv) stop
goetz@12256 216 __ z_lg(temp2, __ argument_address(temp2, temp2, 0));
goetz@12256 217 __ compare64_and_branch(temp2, recv, Assembler::bcondEqual, L);
goetz@12256 218 __ stop("receiver not on stack");
goetz@12256 219 __ BIND(L);
goetz@12256 220 }
goetz@12256 221
goetz@12256 222 jump_from_method_handle(_masm, method_temp, temp2, Z_R0, for_compiler_entry);
goetz@12256 223
goetz@12256 224 BLOCK_COMMENT("} jump_to_lambda_form");
goetz@12256 225 }
goetz@12256 226
goetz@12256 227 // code generation
goetz@12256 228 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm,
goetz@12256 229 vmIntrinsics::ID iid) {
goetz@12256 230 const bool not_for_compiler_entry = false; // This is the interpreter entry.
goetz@12256 231 assert(is_signature_polymorphic(iid), "expected invoke iid");
goetz@12256 232
goetz@12256 233 if (iid == vmIntrinsics::_invokeGeneric || iid == vmIntrinsics::_compiledLambdaForm) {
goetz@12256 234 // Perhaps surprisingly, the symbolic references visible to Java
goetz@12256 235 // are not directly used. They are linked to Java-generated
goetz@12256 236 // adapters via MethodHandleNatives.linkMethod. They all allow an
goetz@12256 237 // appendix argument.
goetz@12256 238 __ should_not_reach_here(); // Empty stubs make SG sick.
goetz@12256 239 return NULL;
goetz@12256 240 }
goetz@12256 241
goetz@12256 242 // Z_R10: sender SP (must preserve; see prepare_to_jump_from_interprted)
goetz@12256 243 // Z_method: method
goetz@12256 244 // Z_ARG1 (Gargs): incoming argument list (must preserve)
goetz@12256 245 Register Z_R4_param_size = Z_R4; // size of parameters
goetz@12256 246 address code_start = __ pc();
goetz@12256 247
goetz@12256 248 // Here is where control starts out:
goetz@12256 249 __ align(CodeEntryAlignment);
goetz@12256 250
goetz@12256 251 address entry_point = __ pc();
goetz@12256 252
goetz@12256 253 if (VerifyMethodHandles) {
goetz@12256 254 Label L;
goetz@12256 255 BLOCK_COMMENT("verify_intrinsic_id {");
goetz@12256 256
goetz@12256 257 // Supplement to 8139891: _intrinsic_id exceeded 1-byte size limit.
goetz@12256 258 if (Method::intrinsic_id_size_in_bytes() == 1) {
goetz@12256 259 __ z_cli(Address(Z_method, Method::intrinsic_id_offset_in_bytes()), (int)iid);
goetz@12256 260 } else {
goetz@12256 261 assert(Method::intrinsic_id_size_in_bytes() == 2, "size error: check Method::_intrinsic_id");
goetz@12256 262 __ z_lh(Z_R0_scratch, Address(Z_method, Method::intrinsic_id_offset_in_bytes()));
goetz@12256 263 __ z_chi(Z_R0_scratch, (int)iid);
goetz@12256 264 }
goetz@12256 265 __ z_bre(L);
goetz@12256 266
goetz@12256 267 if (iid == vmIntrinsics::_linkToVirtual || iid == vmIntrinsics::_linkToSpecial) {
goetz@12256 268 // Could do this for all kinds, but would explode assembly code size.
goetz@12256 269 trace_method_handle(_masm, "bad Method::intrinsic_id");
goetz@12256 270 }
goetz@12256 271
goetz@12256 272 __ stop("bad Method::intrinsic_id");
goetz@12256 273 __ bind(L);
goetz@12256 274
goetz@12256 275 BLOCK_COMMENT("} verify_intrinsic_id");
goetz@12256 276 }
goetz@12256 277
goetz@12256 278 // First task: Find out how big the argument list is.
goetz@12256 279 Address Z_R4_first_arg_addr;
goetz@12256 280 int ref_kind = signature_polymorphic_intrinsic_ref_kind(iid);
goetz@12256 281
goetz@12256 282 assert(ref_kind != 0 || iid == vmIntrinsics::_invokeBasic,
goetz@12256 283 "must be _invokeBasic or a linkTo intrinsic");
goetz@12256 284
goetz@12256 285 if (ref_kind == 0 || MethodHandles::ref_kind_has_receiver(ref_kind)) {
goetz@12256 286 Address paramSize(Z_R1_scratch, ConstMethod::size_of_parameters_offset());
goetz@12256 287
goetz@12256 288 __ z_lg(Z_R1_scratch, Address(Z_method, Method::const_offset()));
goetz@12256 289 __ load_sized_value(Z_R4_param_size, paramSize, sizeof(u2), /*is_signed*/ false);
goetz@12256 290 Z_R4_first_arg_addr = __ argument_address(Z_R4_param_size, Z_R4_param_size, 0);
goetz@12256 291 } else {
goetz@12256 292 DEBUG_ONLY(Z_R4_param_size = noreg);
goetz@12256 293 }
goetz@12256 294
goetz@12256 295 Register Z_mh = noreg;
goetz@12256 296 if (!is_signature_polymorphic_static(iid)) {
goetz@12256 297 Z_mh = Z_ARG4;
goetz@12256 298 __ z_lg(Z_mh, Z_R4_first_arg_addr);
goetz@12256 299 DEBUG_ONLY(Z_R4_param_size = noreg);
goetz@12256 300 }
goetz@12256 301
goetz@12256 302 // Z_R4_first_arg_addr is live!
goetz@12256 303
goetz@12256 304 trace_method_handle_interpreter_entry(_masm, iid);
goetz@12256 305
goetz@12256 306 if (iid == vmIntrinsics::_invokeBasic) {
goetz@12256 307 __ pc(); // just for the block comment
goetz@12256 308 generate_method_handle_dispatch(_masm, iid, Z_mh, noreg, not_for_compiler_entry);
goetz@12256 309 } else {
goetz@12256 310 // Adjust argument list by popping the trailing MemberName argument.
goetz@12256 311 Register Z_recv = noreg;
goetz@12256 312
goetz@12256 313 if (MethodHandles::ref_kind_has_receiver(ref_kind)) {
goetz@12256 314 // Load the receiver (not the MH; the actual MemberName's receiver)
goetz@12256 315 // up from the interpreter stack.
goetz@12256 316 __ z_lg(Z_recv = Z_R5, Z_R4_first_arg_addr);
goetz@12256 317 DEBUG_ONLY(Z_R4_param_size = noreg);
goetz@12256 318 }
goetz@12256 319
goetz@12256 320 Register Z_member = Z_method; // MemberName ptr; incoming method ptr is dead now
goetz@12256 321
goetz@12256 322 __ z_lg(Z_member, __ argument_address(constant(1)));
goetz@12256 323 __ add2reg(Z_esp, Interpreter::stackElementSize);
goetz@12256 324 generate_method_handle_dispatch(_masm, iid, Z_recv, Z_member, not_for_compiler_entry);
goetz@12256 325 }
goetz@12256 326
goetz@12256 327 return entry_point;
goetz@12256 328 }
goetz@12256 329
goetz@12256 330 void MethodHandles::generate_method_handle_dispatch(MacroAssembler* _masm,
goetz@12256 331 vmIntrinsics::ID iid,
goetz@12256 332 Register receiver_reg,
goetz@12256 333 Register member_reg,
goetz@12256 334 bool for_compiler_entry) {
goetz@12256 335 assert(is_signature_polymorphic(iid), "expected invoke iid");
goetz@12256 336
goetz@12256 337 Register temp1 = for_compiler_entry ? Z_R10 : Z_R6;
goetz@12256 338 Register temp2 = Z_R12;
goetz@12256 339 Register temp3 = Z_R11;
goetz@12256 340 Register temp4 = Z_R13;
goetz@12256 341
goetz@12256 342 if (for_compiler_entry) {
goetz@12256 343 assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : Z_ARG1),
goetz@12256 344 "only valid assignment");
goetz@12256 345 }
goetz@12256 346 if (receiver_reg != noreg) {
goetz@12256 347 assert_different_registers(temp1, temp2, temp3, temp4, receiver_reg);
goetz@12256 348 }
goetz@12256 349 if (member_reg != noreg) {
goetz@12256 350 assert_different_registers(temp1, temp2, temp3, temp4, member_reg);
goetz@12256 351 }
goetz@12256 352 if (!for_compiler_entry) { // Don't trash last SP.
goetz@12256 353 assert_different_registers(temp1, temp2, temp3, temp4, Z_R10);
goetz@12256 354 }
goetz@12256 355
goetz@12256 356 if (iid == vmIntrinsics::_invokeBasic) {
goetz@12256 357 __ pc(); // Just for the block comment.
goetz@12256 358 // Indirect through MH.form.vmentry.vmtarget.
goetz@12256 359 jump_to_lambda_form(_masm, receiver_reg, Z_method, Z_R1, temp3, for_compiler_entry);
goetz@12256 360 return;
goetz@12256 361 }
goetz@12256 362
goetz@12256 363 // The method is a member invoker used by direct method handles.
goetz@12256 364 if (VerifyMethodHandles) {
goetz@12256 365 // Make sure the trailing argument really is a MemberName (caller responsibility).
goetz@12256 366 verify_klass(_masm, member_reg,
goetz@12256 367 SystemDictionary::WK_KLASS_ENUM_NAME(MemberName_klass),
goetz@12256 368 temp1, temp2,
goetz@12256 369 "MemberName required for invokeVirtual etc.");
goetz@12256 370 }
goetz@12256 371
goetz@12256 372 Address member_clazz( member_reg, NONZERO(java_lang_invoke_MemberName::clazz_offset_in_bytes()));
goetz@12256 373 Address member_vmindex( member_reg, NONZERO(java_lang_invoke_MemberName::vmindex_offset_in_bytes()));
goetz@12256 374 Address member_vmtarget(member_reg, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes()));
goetz@12256 375 Register temp1_recv_klass = temp1;
goetz@12256 376
goetz@12256 377 if (iid != vmIntrinsics::_linkToStatic) {
goetz@12256 378 __ verify_oop(receiver_reg);
goetz@12256 379 if (iid == vmIntrinsics::_linkToSpecial) {
goetz@12256 380 // Don't actually load the klass; just null-check the receiver.
goetz@12256 381 __ null_check(receiver_reg);
goetz@12256 382 } else {
goetz@12256 383 // Load receiver klass itself.
goetz@12256 384 __ null_check(receiver_reg, Z_R0, oopDesc::klass_offset_in_bytes());
goetz@12256 385 __ load_klass(temp1_recv_klass, receiver_reg);
goetz@12256 386 __ verify_klass_ptr(temp1_recv_klass);
goetz@12256 387 }
goetz@12256 388 BLOCK_COMMENT("check_receiver {");
goetz@12256 389 // The receiver for the MemberName must be in receiver_reg.
goetz@12256 390 // Check the receiver against the MemberName.clazz.
goetz@12256 391 if (VerifyMethodHandles && iid == vmIntrinsics::_linkToSpecial) {
goetz@12256 392 // Did not load it above...
goetz@12256 393 __ load_klass(temp1_recv_klass, receiver_reg);
goetz@12256 394 __ verify_klass_ptr(temp1_recv_klass);
goetz@12256 395 }
goetz@12256 396
goetz@12256 397 if (VerifyMethodHandles && iid != vmIntrinsics::_linkToInterface) {
goetz@12256 398 NearLabel L_ok;
goetz@12256 399 Register temp2_defc = temp2;
goetz@12256 400
goetz@12256 401 __ load_heap_oop(temp2_defc, member_clazz);
goetz@12256 402 load_klass_from_Class(_masm, temp2_defc, temp3, temp4);
goetz@12256 403 __ verify_klass_ptr(temp2_defc);
goetz@12256 404 __ check_klass_subtype(temp1_recv_klass, temp2_defc, temp3, temp4, L_ok);
goetz@12256 405 // If we get here, the type check failed!
goetz@12256 406 __ stop("receiver class disagrees with MemberName.clazz");
goetz@12256 407 __ bind(L_ok);
goetz@12256 408 }
goetz@12256 409 BLOCK_COMMENT("} check_receiver");
goetz@12256 410 }
goetz@12256 411 if (iid == vmIntrinsics::_linkToSpecial || iid == vmIntrinsics::_linkToStatic) {
goetz@12256 412 DEBUG_ONLY(temp1_recv_klass = noreg); // These guys didn't load the recv_klass.
goetz@12256 413 }
goetz@12256 414
goetz@12256 415 // Live registers at this point:
goetz@12256 416 // member_reg - MemberName that was the trailing argument.
goetz@12256 417 // temp1_recv_klass - Klass of stacked receiver, if needed.
goetz@12256 418 // Z_R10 - Interpreter linkage if interpreted.
goetz@12256 419
goetz@12256 420 bool method_is_live = false;
goetz@12256 421
goetz@12256 422 switch (iid) {
goetz@12256 423 case vmIntrinsics::_linkToSpecial:
goetz@12256 424 if (VerifyMethodHandles) {
goetz@12256 425 verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp3);
goetz@12256 426 }
goetz@12256 427 __ z_lg(Z_method, member_vmtarget);
goetz@12256 428 method_is_live = true;
goetz@12256 429 break;
goetz@12256 430
goetz@12256 431 case vmIntrinsics::_linkToStatic:
goetz@12256 432 if (VerifyMethodHandles) {
goetz@12256 433 verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp3);
goetz@12256 434 }
goetz@12256 435 __ z_lg(Z_method, member_vmtarget);
goetz@12256 436 method_is_live = true;
goetz@12256 437 break;
goetz@12256 438
goetz@12256 439 case vmIntrinsics::_linkToVirtual: {
goetz@12256 440 // Same as TemplateTable::invokevirtual, minus the CP setup and profiling.
goetz@12256 441 if (VerifyMethodHandles) {
goetz@12256 442 verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp3);
goetz@12256 443 }
goetz@12256 444
goetz@12256 445 // Pick out the vtable index from the MemberName, and then we can discard it.
goetz@12256 446 Register temp2_index = temp2;
goetz@12256 447 __ z_lg(temp2_index, member_vmindex);
goetz@12256 448
goetz@12256 449 if (VerifyMethodHandles) {
goetz@12256 450 // if (member_vmindex < 0) stop
goetz@12256 451 NearLabel L_index_ok;
goetz@12256 452 __ compare32_and_branch(temp2_index, constant(0), Assembler::bcondNotLow, L_index_ok);
goetz@12256 453 __ stop("no virtual index");
goetz@12256 454 __ BIND(L_index_ok);
goetz@12256 455 }
goetz@12256 456
goetz@12256 457 // Note: The verifier invariants allow us to ignore MemberName.clazz and vmtarget
goetz@12256 458 // at this point. And VerifyMethodHandles has already checked clazz, if needed.
goetz@12256 459
goetz@12256 460 // Get target method and entry point.
goetz@12256 461 __ lookup_virtual_method(temp1_recv_klass, temp2_index, Z_method);
goetz@12256 462 method_is_live = true;
goetz@12256 463 break;
goetz@12256 464 }
goetz@12256 465
goetz@12256 466 case vmIntrinsics::_linkToInterface: {
goetz@12256 467 // Same as TemplateTable::invokeinterface, minus the CP setup
goetz@12256 468 // and profiling, with different argument motion.
goetz@12256 469 if (VerifyMethodHandles) {
goetz@12256 470 verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp3);
goetz@12256 471 }
goetz@12256 472
goetz@12256 473 Register temp3_intf = temp3;
goetz@12256 474
goetz@12256 475 __ load_heap_oop(temp3_intf, member_clazz);
goetz@12256 476 load_klass_from_Class(_masm, temp3_intf, temp2, temp4);
goetz@12256 477
goetz@12256 478 Register Z_index = Z_method;
goetz@12256 479
goetz@12256 480 __ z_lg(Z_index, member_vmindex);
goetz@12256 481
goetz@12256 482 if (VerifyMethodHandles) {
goetz@12256 483 NearLabel L;
goetz@12256 484 // if (member_vmindex < 0) stop
goetz@12256 485 __ compare32_and_branch(Z_index, constant(0), Assembler::bcondNotLow, L);
goetz@12256 486 __ stop("invalid vtable index for MH.invokeInterface");
goetz@12256 487 __ bind(L);
goetz@12256 488 }
goetz@12256 489
goetz@12256 490 // Given interface, index, and recv klass, dispatch to the implementation method.
goetz@12256 491 Label L_no_such_interface;
goetz@12256 492 __ lookup_interface_method(temp1_recv_klass, temp3_intf,
goetz@12256 493 // Note: next two args must be the same:
goetz@12256 494 Z_index, Z_method, temp2, noreg,
goetz@12256 495 L_no_such_interface);
goetz@12256 496 jump_from_method_handle(_masm, Z_method, temp2, Z_R0, for_compiler_entry);
goetz@12256 497
goetz@12256 498 __ bind(L_no_such_interface);
goetz@12256 499
goetz@12256 500 // Throw exception.
goetz@12256 501 __ load_const_optimized(Z_R1, StubRoutines::throw_IncompatibleClassChangeError_entry());
goetz@12256 502 __ z_br(Z_R1);
goetz@12256 503 break;
goetz@12256 504 }
goetz@12256 505
goetz@12256 506 default:
goetz@12256 507 fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid));
goetz@12256 508 break;
goetz@12256 509 }
goetz@12256 510
goetz@12256 511 if (method_is_live) {
goetz@12256 512 // Live at this point: Z_method, O5_savedSP (if interpreted).
goetz@12256 513
goetz@12256 514 // After figuring out which concrete method to call, jump into it.
goetz@12256 515 // Note that this works in the interpreter with no data motion.
goetz@12256 516 // But the compiled version will require that rcx_recv be shifted out.
goetz@12256 517 jump_from_method_handle(_masm, Z_method, temp1, Z_R0, for_compiler_entry);
goetz@12256 518 }
goetz@12256 519 }
goetz@12256 520
goetz@12256 521 #ifndef PRODUCT
goetz@12256 522 void trace_method_handle_stub(const char* adaptername,
goetz@12256 523 oopDesc* mh,
goetz@12256 524 intptr_t* sender_sp,
goetz@12256 525 intptr_t* args,
goetz@12256 526 intptr_t* tracing_fp) {
goetz@12256 527 bool has_mh = (strstr(adaptername, "/static") == NULL &&
goetz@12256 528 strstr(adaptername, "linkTo") == NULL); // Static linkers don't have MH.
goetz@12256 529 const char* mh_reg_name = has_mh ? "Z_R4_mh" : "Z_R4";
goetz@12256 530 tty->print_cr("MH %s %s=" INTPTR_FORMAT " sender_sp=" INTPTR_FORMAT " args=" INTPTR_FORMAT,
goetz@12256 531 adaptername, mh_reg_name,
goetz@12256 532 p2i(mh), p2i(sender_sp), p2i(args));
goetz@12256 533
goetz@12256 534 if (Verbose) {
goetz@12256 535 // Dumping last frame with frame::describe.
goetz@12256 536
goetz@12256 537 JavaThread* p = JavaThread::active();
goetz@12256 538
goetz@12256 539 ResourceMark rm;
goetz@12256 540 PRESERVE_EXCEPTION_MARK; // May not be needed by safer and unexpensive here.
goetz@12256 541 FrameValues values;
goetz@12256 542
goetz@12256 543 // Note: We want to allow trace_method_handle from any call site.
goetz@12256 544 // While trace_method_handle creates a frame, it may be entered
goetz@12256 545 // without a valid return PC in Z_R14 (e.g. not just after a call).
goetz@12256 546 // Walking that frame could lead to failures due to that invalid PC.
goetz@12256 547 // => carefully detect that frame when doing the stack walking.
goetz@12256 548
goetz@12256 549 // Walk up to the right frame using the "tracing_fp" argument.
goetz@12256 550 frame cur_frame = os::current_frame(); // Current C frame.
goetz@12256 551
goetz@12256 552 while (cur_frame.fp() != tracing_fp) {
goetz@12256 553 cur_frame = os::get_sender_for_C_frame(&cur_frame);
goetz@12256 554 }
goetz@12256 555
goetz@12256 556 // Safely create a frame and call frame::describe.
goetz@12256 557 intptr_t *dump_sp = cur_frame.sender_sp();
goetz@12256 558 intptr_t *dump_fp = cur_frame.link();
goetz@12256 559
goetz@12256 560 bool walkable = has_mh; // Whether the traced frame shoud be walkable.
goetz@12256 561
goetz@12256 562 // The sender for cur_frame is the caller of trace_method_handle.
goetz@12256 563 if (walkable) {
goetz@12256 564 // The previous definition of walkable may have to be refined
goetz@12256 565 // if new call sites cause the next frame constructor to start
goetz@12256 566 // failing. Alternatively, frame constructors could be
goetz@12256 567 // modified to support the current or future non walkable
goetz@12256 568 // frames (but this is more intrusive and is not considered as
goetz@12256 569 // part of this RFE, which will instead use a simpler output).
goetz@12256 570 frame dump_frame = frame(dump_sp);
goetz@12256 571 dump_frame.describe(values, 1);
goetz@12256 572 } else {
goetz@12256 573 // Robust dump for frames which cannot be constructed from sp/younger_sp
goetz@12256 574 // Add descriptions without building a Java frame to avoid issues.
goetz@12256 575 values.describe(-1, dump_fp, "fp for #1 <not parsed, cannot trust pc>");
goetz@12256 576 values.describe(-1, dump_sp, "sp");
goetz@12256 577 }
goetz@12256 578
goetz@12256 579 bool has_args = has_mh; // Whether Z_esp is meaningful.
goetz@12256 580
goetz@12256 581 // Mark args, if seems valid (may not be valid for some adapters).
goetz@12256 582 if (has_args) {
goetz@12256 583 if ((args >= dump_sp) && (args < dump_fp)) {
goetz@12256 584 values.describe(-1, args, "*Z_esp");
goetz@12256 585 }
goetz@12256 586 }
goetz@12256 587
goetz@12256 588 // Note: the unextended_sp may not be correct.
goetz@12256 589 tty->print_cr(" stack layout:");
goetz@12256 590 values.print(p);
goetz@12256 591 if (has_mh && mh->is_oop()) {
goetz@12256 592 mh->print();
goetz@12256 593 if (java_lang_invoke_MethodHandle::is_instance(mh)) {
goetz@12256 594 if (java_lang_invoke_MethodHandle::form_offset_in_bytes() != 0) {
goetz@12256 595 java_lang_invoke_MethodHandle::form(mh)->print();
goetz@12256 596 }
goetz@12256 597 }
goetz@12256 598 }
goetz@12256 599 }
goetz@12256 600 }
goetz@12256 601
goetz@12256 602 void MethodHandles::trace_method_handle(MacroAssembler* _masm, const char* adaptername) {
goetz@12256 603 if (!TraceMethodHandles) { return; }
goetz@12256 604
goetz@12256 605 BLOCK_COMMENT("trace_method_handle {");
goetz@12256 606
goetz@12256 607 // Save argument registers (they are used in raise exception stub).
goetz@12256 608 __ z_stg(Z_ARG1, Address(Z_SP, 16));
goetz@12256 609 __ z_stg(Z_ARG2, Address(Z_SP, 24));
goetz@12256 610 __ z_stg(Z_ARG3, Address(Z_SP, 32));
goetz@12256 611 __ z_stg(Z_ARG4, Address(Z_SP, 40));
goetz@12256 612 __ z_stg(Z_ARG5, Address(Z_SP, 48));
goetz@12256 613
goetz@12256 614 // Setup arguments.
goetz@12256 615 __ z_lgr(Z_ARG2, Z_ARG4); // mh, see generate_method_handle_interpreter_entry()
goetz@12256 616 __ z_lgr(Z_ARG3, Z_R10); // sender_sp
goetz@12256 617 __ z_lgr(Z_ARG4, Z_esp);
goetz@12256 618 __ load_const_optimized(Z_ARG1, (void *)adaptername);
goetz@12256 619 __ z_lgr(Z_ARG5, Z_SP); // tracing_fp
goetz@12256 620 __ save_return_pc(); // saves Z_R14
goetz@12256 621 __ push_frame_abi160(0);
goetz@12256 622 __ call_VM_leaf(CAST_FROM_FN_PTR(address, trace_method_handle_stub));
goetz@12256 623 __ pop_frame();
goetz@12256 624 __ restore_return_pc(); // restores to Z_R14
goetz@12256 625 __ z_lg(Z_ARG1, Address(Z_SP, 16));
goetz@12256 626 __ z_lg(Z_ARG2, Address(Z_SP, 24));
goetz@12256 627 __ z_lg(Z_ARG3, Address(Z_SP, 32));
goetz@12256 628 __ z_lg(Z_ARG4, Address(Z_SP, 40));
goetz@12256 629 __ z_lg(Z_ARG5, Address(Z_SP, 45));
goetz@12256 630 __ zap_from_to(Z_SP, Z_SP, Z_R0, Z_R1, 50, -1);
goetz@12256 631 __ zap_from_to(Z_SP, Z_SP, Z_R0, Z_R1, -1, 5);
goetz@12256 632
goetz@12256 633 BLOCK_COMMENT("} trace_method_handle");
goetz@12256 634 }
goetz@12256 635 #endif // !PRODUCT