annotate src/hotspot/cpu/x86/gc/z/z_x86_64.ad @ 54643:5c80e6994d8a

8217856: ZGC: Break out C2 matching rules into separate AD file Reviewed-by: neliasso, kvn
author pliden
date Tue, 29 Jan 2019 10:23:38 +0100
parents
children 8323fdac6da5
rev   line source
pliden@54643 1 //
pliden@54643 2 // Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved.
pliden@54643 3 // DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
pliden@54643 4 //
pliden@54643 5 // This code is free software; you can redistribute it and/or modify it
pliden@54643 6 // under the terms of the GNU General Public License version 2 only, as
pliden@54643 7 // published by the Free Software Foundation.
pliden@54643 8 //
pliden@54643 9 // This code is distributed in the hope that it will be useful, but WITHOUT
pliden@54643 10 // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
pliden@54643 11 // FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
pliden@54643 12 // version 2 for more details (a copy is included in the LICENSE file that
pliden@54643 13 // accompanied this code).
pliden@54643 14 //
pliden@54643 15 // You should have received a copy of the GNU General Public License version
pliden@54643 16 // 2 along with this work; if not, write to the Free Software Foundation,
pliden@54643 17 // Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
pliden@54643 18 //
pliden@54643 19 // Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
pliden@54643 20 // or visit www.oracle.com if you need additional information or have any
pliden@54643 21 // questions.
pliden@54643 22 //
pliden@54643 23
pliden@54643 24 source %{
pliden@54643 25
pliden@54643 26 #include "gc/z/zBarrierSetAssembler.hpp"
pliden@54643 27
pliden@54643 28 static void z_load_barrier_slow_reg(MacroAssembler& _masm, Register dst, Address src, bool weak) {
pliden@54643 29 assert(dst != r12, "Invalid register");
pliden@54643 30 assert(dst != r15, "Invalid register");
pliden@54643 31 assert(dst != rsp, "Invalid register");
pliden@54643 32
pliden@54643 33 const address stub = weak ? ZBarrierSet::assembler()->load_barrier_weak_slow_stub(dst)
pliden@54643 34 : ZBarrierSet::assembler()->load_barrier_slow_stub(dst);
pliden@54643 35 __ lea(dst, src);
pliden@54643 36 __ call(RuntimeAddress(stub));
pliden@54643 37 }
pliden@54643 38
pliden@54643 39 %}
pliden@54643 40
pliden@54643 41 // For XMM and YMM enabled processors
pliden@54643 42 instruct zLoadBarrierSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr,
pliden@54643 43 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
pliden@54643 44 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
pliden@54643 45 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
pliden@54643 46 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
pliden@54643 47
pliden@54643 48 match(Set dst (LoadBarrierSlowReg src));
pliden@54643 49 predicate(UseAVX <= 2);
pliden@54643 50
pliden@54643 51 effect(DEF dst, KILL cr,
pliden@54643 52 KILL x0, KILL x1, KILL x2, KILL x3,
pliden@54643 53 KILL x4, KILL x5, KILL x6, KILL x7,
pliden@54643 54 KILL x8, KILL x9, KILL x10, KILL x11,
pliden@54643 55 KILL x12, KILL x13, KILL x14, KILL x15);
pliden@54643 56
pliden@54643 57 format %{ "zLoadBarrierSlowRegXmmAndYmm $dst, $src" %}
pliden@54643 58
pliden@54643 59 ins_encode %{
pliden@54643 60 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */);
pliden@54643 61 %}
pliden@54643 62
pliden@54643 63 ins_pipe(pipe_slow);
pliden@54643 64 %}
pliden@54643 65
pliden@54643 66 // For ZMM enabled processors
pliden@54643 67 instruct zLoadBarrierSlowRegZmm(rRegP dst, memory src, rFlagsReg cr,
pliden@54643 68 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
pliden@54643 69 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
pliden@54643 70 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
pliden@54643 71 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15,
pliden@54643 72 rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19,
pliden@54643 73 rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23,
pliden@54643 74 rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27,
pliden@54643 75 rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{
pliden@54643 76
pliden@54643 77 match(Set dst (LoadBarrierSlowReg src));
pliden@54643 78 predicate(UseAVX == 3);
pliden@54643 79
pliden@54643 80 effect(DEF dst, KILL cr,
pliden@54643 81 KILL x0, KILL x1, KILL x2, KILL x3,
pliden@54643 82 KILL x4, KILL x5, KILL x6, KILL x7,
pliden@54643 83 KILL x8, KILL x9, KILL x10, KILL x11,
pliden@54643 84 KILL x12, KILL x13, KILL x14, KILL x15,
pliden@54643 85 KILL x16, KILL x17, KILL x18, KILL x19,
pliden@54643 86 KILL x20, KILL x21, KILL x22, KILL x23,
pliden@54643 87 KILL x24, KILL x25, KILL x26, KILL x27,
pliden@54643 88 KILL x28, KILL x29, KILL x30, KILL x31);
pliden@54643 89
pliden@54643 90 format %{ "zLoadBarrierSlowRegZmm $dst, $src" %}
pliden@54643 91
pliden@54643 92 ins_encode %{
pliden@54643 93 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */);
pliden@54643 94 %}
pliden@54643 95
pliden@54643 96 ins_pipe(pipe_slow);
pliden@54643 97 %}
pliden@54643 98
pliden@54643 99 // For XMM and YMM enabled processors
pliden@54643 100 instruct zLoadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr,
pliden@54643 101 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
pliden@54643 102 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
pliden@54643 103 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
pliden@54643 104 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
pliden@54643 105
pliden@54643 106 match(Set dst (LoadBarrierWeakSlowReg src));
pliden@54643 107 predicate(UseAVX <= 2);
pliden@54643 108
pliden@54643 109 effect(DEF dst, KILL cr,
pliden@54643 110 KILL x0, KILL x1, KILL x2, KILL x3,
pliden@54643 111 KILL x4, KILL x5, KILL x6, KILL x7,
pliden@54643 112 KILL x8, KILL x9, KILL x10, KILL x11,
pliden@54643 113 KILL x12, KILL x13, KILL x14, KILL x15);
pliden@54643 114
pliden@54643 115 format %{ "zLoadBarrierWeakSlowRegXmmAndYmm $dst, $src" %}
pliden@54643 116
pliden@54643 117 ins_encode %{
pliden@54643 118 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */);
pliden@54643 119 %}
pliden@54643 120
pliden@54643 121 ins_pipe(pipe_slow);
pliden@54643 122 %}
pliden@54643 123
pliden@54643 124 // For ZMM enabled processors
pliden@54643 125 instruct zLoadBarrierWeakSlowRegZmm(rRegP dst, memory src, rFlagsReg cr,
pliden@54643 126 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
pliden@54643 127 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
pliden@54643 128 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
pliden@54643 129 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15,
pliden@54643 130 rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19,
pliden@54643 131 rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23,
pliden@54643 132 rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27,
pliden@54643 133 rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{
pliden@54643 134
pliden@54643 135 match(Set dst (LoadBarrierWeakSlowReg src));
pliden@54643 136 predicate(UseAVX == 3);
pliden@54643 137
pliden@54643 138 effect(DEF dst, KILL cr,
pliden@54643 139 KILL x0, KILL x1, KILL x2, KILL x3,
pliden@54643 140 KILL x4, KILL x5, KILL x6, KILL x7,
pliden@54643 141 KILL x8, KILL x9, KILL x10, KILL x11,
pliden@54643 142 KILL x12, KILL x13, KILL x14, KILL x15,
pliden@54643 143 KILL x16, KILL x17, KILL x18, KILL x19,
pliden@54643 144 KILL x20, KILL x21, KILL x22, KILL x23,
pliden@54643 145 KILL x24, KILL x25, KILL x26, KILL x27,
pliden@54643 146 KILL x28, KILL x29, KILL x30, KILL x31);
pliden@54643 147
pliden@54643 148 format %{ "zLoadBarrierWeakSlowRegZmm $dst, $src" %}
pliden@54643 149
pliden@54643 150 ins_encode %{
pliden@54643 151 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */);
pliden@54643 152 %}
pliden@54643 153
pliden@54643 154 ins_pipe(pipe_slow);
pliden@54643 155 %}