annotate modules/javafx.web/src/main/native/Source/JavaScriptCore/dfg/DFGArgumentsEliminationPhase.cpp @ 11208:db2c977a840b

8220147: Cherry pick GTK WebKit 2.22.7 changes Reviewed-by: mbilla, kcr
author arajkumar
date Fri, 08 Mar 2019 14:03:47 +0530
parents ab4db0272524
children a1fb556cdd7d
rev   line source
kcr@9800 1 /*
arajkumar@10954 2 * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
kcr@9800 3 *
kcr@9800 4 * Redistribution and use in source and binary forms, with or without
kcr@9800 5 * modification, are permitted provided that the following conditions
kcr@9800 6 * are met:
kcr@9800 7 * 1. Redistributions of source code must retain the above copyright
kcr@9800 8 * notice, this list of conditions and the following disclaimer.
kcr@9800 9 * 2. Redistributions in binary form must reproduce the above copyright
kcr@9800 10 * notice, this list of conditions and the following disclaimer in the
kcr@9800 11 * documentation and/or other materials provided with the distribution.
kcr@9800 12 *
kcr@9800 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
kcr@9800 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
kcr@9800 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
kcr@9800 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
kcr@9800 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
kcr@9800 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
kcr@9800 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
kcr@9800 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
kcr@9800 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
kcr@9800 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
kcr@9800 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
kcr@9800 24 */
kcr@9800 25
kcr@9800 26 #include "config.h"
kcr@9800 27 #include "DFGArgumentsEliminationPhase.h"
kcr@9800 28
kcr@9800 29 #if ENABLE(DFG_JIT)
kcr@9800 30
arajkumar@10587 31 #include "ArrayPrototype.h"
kcr@9800 32 #include "BytecodeLivenessAnalysisInlines.h"
arajkumar@10587 33 #include "ClonedArguments.h"
kcr@9800 34 #include "DFGArgumentsUtilities.h"
kcr@9800 35 #include "DFGBasicBlockInlines.h"
kcr@9800 36 #include "DFGBlockMapInlines.h"
kcr@9800 37 #include "DFGClobberize.h"
kcr@9800 38 #include "DFGCombinedLiveness.h"
kcr@9800 39 #include "DFGForAllKills.h"
kcr@9800 40 #include "DFGGraph.h"
kcr@9800 41 #include "DFGInsertionSet.h"
kcr@9800 42 #include "DFGLivenessAnalysisPhase.h"
kcr@9800 43 #include "DFGOSRAvailabilityAnalysisPhase.h"
kcr@9800 44 #include "DFGPhase.h"
kcr@9800 45 #include "JSCInlines.h"
kcr@9800 46 #include <wtf/HashSet.h>
kcr@9800 47 #include <wtf/ListDump.h>
arajkumar@10954 48 #include <wtf/RecursableLambda.h>
kcr@9800 49
kcr@9800 50 namespace JSC { namespace DFG {
kcr@9800 51
kcr@9800 52 namespace {
kcr@9800 53
arajkumar@10954 54 namespace DFGArgumentsEliminationPhaseInternal {
arajkumar@10954 55 static const bool verbose = false;
arajkumar@10954 56 }
kcr@9800 57
kcr@9800 58 class ArgumentsEliminationPhase : public Phase {
kcr@9800 59 public:
kcr@9800 60 ArgumentsEliminationPhase(Graph& graph)
kcr@9800 61 : Phase(graph, "arguments elimination")
kcr@9800 62 {
kcr@9800 63 }
kcr@9800 64
kcr@9800 65 bool run()
kcr@9800 66 {
kcr@9800 67 // For now this phase only works on SSA. This could be changed; we could have a block-local
kcr@9800 68 // version over LoadStore.
kcr@9800 69 DFG_ASSERT(m_graph, nullptr, m_graph.m_form == SSA);
kcr@9800 70
arajkumar@10954 71 if (DFGArgumentsEliminationPhaseInternal::verbose) {
kcr@9800 72 dataLog("Graph before arguments elimination:\n");
kcr@9800 73 m_graph.dump();
kcr@9800 74 }
kcr@9800 75
kcr@9800 76 identifyCandidates();
kcr@9800 77 if (m_candidates.isEmpty())
kcr@9800 78 return false;
kcr@9800 79
kcr@9800 80 eliminateCandidatesThatEscape();
kcr@9800 81 if (m_candidates.isEmpty())
kcr@9800 82 return false;
kcr@9800 83
kcr@9800 84 eliminateCandidatesThatInterfere();
kcr@9800 85 if (m_candidates.isEmpty())
kcr@9800 86 return false;
kcr@9800 87
kcr@9800 88 transform();
kcr@9800 89
kcr@9800 90 return true;
kcr@9800 91 }
kcr@9800 92
kcr@9800 93 private:
kcr@9800 94 // Just finds nodes that we know how to work with.
kcr@9800 95 void identifyCandidates()
kcr@9800 96 {
arajkumar@10587 97 for (BasicBlock* block : m_graph.blocksInPreOrder()) {
kcr@9800 98 for (Node* node : *block) {
kcr@9800 99 switch (node->op()) {
kcr@9800 100 case CreateDirectArguments:
kcr@9800 101 case CreateClonedArguments:
kcr@9800 102 m_candidates.add(node);
kcr@9800 103 break;
kcr@9800 104
arajkumar@10587 105 case CreateRest:
arajkumar@10587 106 if (m_graph.isWatchingHavingABadTimeWatchpoint(node)) {
arajkumar@10587 107 // If we're watching the HavingABadTime watchpoint it means that we will be invalidated
arajkumar@10587 108 // when it fires (it may or may not have actually fired yet). We don't try to eliminate
arajkumar@10587 109 // this allocation when we're not watching the watchpoint because it could entail calling
arajkumar@10587 110 // indexed accessors (and probably more crazy things) on out of bound accesses to the
arajkumar@10587 111 // rest parameter. It's also much easier to reason about this way.
arajkumar@10587 112 m_candidates.add(node);
arajkumar@10587 113 }
arajkumar@10587 114 break;
arajkumar@10587 115
arajkumar@10587 116 case Spread:
arajkumar@10587 117 if (m_graph.isWatchingHavingABadTimeWatchpoint(node)) {
arajkumar@10587 118 // We check ArrayUse here because ArrayUse indicates that the iterator
arajkumar@10587 119 // protocol for Arrays is non-observable by user code (e.g, it hasn't
arajkumar@10587 120 // been changed).
arajkumar@10954 121 if (node->child1().useKind() == ArrayUse) {
arajkumar@10954 122 if ((node->child1()->op() == CreateRest || node->child1()->op() == NewArrayBuffer) && m_candidates.contains(node->child1().node()))
arajkumar@10954 123 m_candidates.add(node);
arajkumar@10954 124 }
arajkumar@10587 125 }
arajkumar@10587 126 break;
arajkumar@10587 127
arajkumar@10587 128 case NewArrayWithSpread: {
arajkumar@10587 129 if (m_graph.isWatchingHavingABadTimeWatchpoint(node)) {
arajkumar@10587 130 BitVector* bitVector = node->bitVector();
arajkumar@11139 131 // We only allow for Spreads to be of CreateRest or NewArrayBuffer nodes for now.
arajkumar@10587 132 bool isOK = true;
arajkumar@10587 133 for (unsigned i = 0; i < node->numChildren(); i++) {
arajkumar@10587 134 if (bitVector->get(i)) {
arajkumar@10587 135 Node* child = m_graph.varArgChild(node, i).node();
arajkumar@10954 136 isOK = child->op() == Spread && (child->child1()->op() == CreateRest || child->child1()->op() == NewArrayBuffer) && m_candidates.contains(child);
arajkumar@10587 137 if (!isOK)
arajkumar@10587 138 break;
arajkumar@10587 139 }
arajkumar@10587 140 }
arajkumar@10587 141
arajkumar@10587 142 if (!isOK)
arajkumar@10587 143 break;
arajkumar@10587 144
arajkumar@10587 145 m_candidates.add(node);
arajkumar@10587 146 }
arajkumar@10587 147 break;
arajkumar@10587 148 }
arajkumar@10587 149
arajkumar@10954 150 case NewArrayBuffer: {
arajkumar@11139 151 if (m_graph.isWatchingHavingABadTimeWatchpoint(node) && !hasAnyArrayStorage(node->indexingMode()))
arajkumar@10954 152 m_candidates.add(node);
arajkumar@10954 153 break;
arajkumar@10954 154 }
arajkumar@10954 155
kcr@9800 156 case CreateScopedArguments:
kcr@9800 157 // FIXME: We could handle this if it wasn't for the fact that scoped arguments are
kcr@9800 158 // always stored into the activation.
kcr@9800 159 // https://bugs.webkit.org/show_bug.cgi?id=143072 and
kcr@9800 160 // https://bugs.webkit.org/show_bug.cgi?id=143073
kcr@9800 161 break;
kcr@9800 162
kcr@9800 163 default:
kcr@9800 164 break;
kcr@9800 165 }
mbilla@10730 166 if (node->isPseudoTerminal())
mbilla@10730 167 break;
kcr@9800 168 }
kcr@9800 169 }
kcr@9800 170
arajkumar@10954 171 if (DFGArgumentsEliminationPhaseInternal::verbose)
kcr@9800 172 dataLog("Candidates: ", listDump(m_candidates), "\n");
kcr@9800 173 }
kcr@9800 174
arajkumar@10587 175 bool isStillValidCandidate(Node* candidate)
arajkumar@10587 176 {
arajkumar@10587 177 switch (candidate->op()) {
arajkumar@10587 178 case Spread:
arajkumar@10587 179 return m_candidates.contains(candidate->child1().node());
arajkumar@10587 180
arajkumar@10587 181 case NewArrayWithSpread: {
arajkumar@10587 182 BitVector* bitVector = candidate->bitVector();
arajkumar@10587 183 for (unsigned i = 0; i < candidate->numChildren(); i++) {
arajkumar@10587 184 if (bitVector->get(i)) {
arajkumar@10587 185 if (!m_candidates.contains(m_graph.varArgChild(candidate, i).node()))
arajkumar@10587 186 return false;
arajkumar@10587 187 }
arajkumar@10587 188 }
arajkumar@10587 189 return true;
arajkumar@10587 190 }
arajkumar@10587 191
arajkumar@10587 192 default:
arajkumar@10587 193 return true;
arajkumar@10587 194 }
arajkumar@10587 195
arajkumar@10587 196 RELEASE_ASSERT_NOT_REACHED();
arajkumar@10587 197 return false;
arajkumar@10587 198 }
arajkumar@10587 199
arajkumar@10587 200 void removeInvalidCandidates()
arajkumar@10587 201 {
arajkumar@10587 202 bool changed;
arajkumar@10587 203 do {
arajkumar@10587 204 changed = false;
arajkumar@10587 205 Vector<Node*, 1> toRemove;
arajkumar@10587 206
arajkumar@10587 207 for (Node* candidate : m_candidates) {
arajkumar@10587 208 if (!isStillValidCandidate(candidate))
arajkumar@10587 209 toRemove.append(candidate);
arajkumar@10587 210 }
arajkumar@10587 211
arajkumar@10587 212 if (toRemove.size()) {
arajkumar@10587 213 changed = true;
arajkumar@10587 214 for (Node* node : toRemove)
arajkumar@10587 215 m_candidates.remove(node);
arajkumar@10587 216 }
arajkumar@10587 217
arajkumar@10587 218 } while (changed);
arajkumar@10587 219 }
arajkumar@10587 220
arajkumar@10587 221 void transitivelyRemoveCandidate(Node* node, Node* source = nullptr)
arajkumar@10587 222 {
arajkumar@10587 223 bool removed = m_candidates.remove(node);
arajkumar@10954 224 if (removed && DFGArgumentsEliminationPhaseInternal::verbose && source)
arajkumar@10587 225 dataLog("eliminating candidate: ", node, " because it escapes from: ", source, "\n");
arajkumar@10587 226
arajkumar@10587 227 if (removed)
arajkumar@10587 228 removeInvalidCandidates();
arajkumar@10587 229 }
arajkumar@10587 230
kcr@9800 231 // Look for escaping sites, and remove from the candidates set if we see an escape.
kcr@9800 232 void eliminateCandidatesThatEscape()
kcr@9800 233 {
arajkumar@10587 234 auto escape = [&] (Edge edge, Node* source) {
kcr@9800 235 if (!edge)
kcr@9800 236 return;
arajkumar@10587 237 transitivelyRemoveCandidate(edge.node(), source);
kcr@9800 238 };
kcr@9800 239
arajkumar@10587 240 auto escapeBasedOnArrayMode = [&] (ArrayMode mode, Edge edge, Node* source) {
kcr@9800 241 switch (mode.type()) {
arajkumar@10954 242 case Array::DirectArguments: {
arajkumar@10954 243 if (edge->op() != CreateDirectArguments) {
arajkumar@10587 244 escape(edge, source);
arajkumar@10954 245 break;
arajkumar@10954 246 }
arajkumar@10954 247
arajkumar@10954 248 // Everything is fine if we're doing an in-bounds access.
arajkumar@10954 249 if (mode.isInBounds())
arajkumar@10954 250 break;
arajkumar@10954 251
arajkumar@10954 252 // If we're out-of-bounds then we proceed only if the prototype chain
arajkumar@10954 253 // for the allocation is sane (i.e. doesn't have indexed properties).
arajkumar@10954 254 JSGlobalObject* globalObject = m_graph.globalObjectFor(edge->origin.semantic);
arajkumar@11139 255 Structure* objectPrototypeStructure = globalObject->objectPrototype()->structure(m_graph.m_vm);
arajkumar@10954 256 if (objectPrototypeStructure->transitionWatchpointSetIsStillValid()
arajkumar@10954 257 && globalObject->objectPrototypeIsSane()) {
arajkumar@10954 258 m_graph.registerAndWatchStructureTransition(objectPrototypeStructure);
arajkumar@10954 259 break;
arajkumar@10954 260 }
arajkumar@10954 261 escape(edge, source);
kcr@9800 262 break;
arajkumar@10954 263 }
kcr@9800 264
arajkumar@10587 265 case Array::Contiguous: {
arajkumar@10587 266 if (edge->op() != CreateClonedArguments && edge->op() != CreateRest) {
arajkumar@10587 267 escape(edge, source);
arajkumar@10954 268 break;
arajkumar@10587 269 }
arajkumar@10587 270
arajkumar@10587 271 // Everything is fine if we're doing an in-bounds access.
arajkumar@10587 272 if (mode.isInBounds())
arajkumar@10587 273 break;
arajkumar@10587 274
arajkumar@10587 275 // If we're out-of-bounds then we proceed only if the prototype chain
arajkumar@10587 276 // for the allocation is sane (i.e. doesn't have indexed properties).
arajkumar@10587 277 JSGlobalObject* globalObject = m_graph.globalObjectFor(edge->origin.semantic);
arajkumar@11139 278 Structure* objectPrototypeStructure = globalObject->objectPrototype()->structure(m_graph.m_vm);
arajkumar@10587 279 if (edge->op() == CreateRest) {
arajkumar@11139 280 Structure* arrayPrototypeStructure = globalObject->arrayPrototype()->structure(m_graph.m_vm);
arajkumar@10770 281 if (arrayPrototypeStructure->transitionWatchpointSetIsStillValid()
arajkumar@10770 282 && objectPrototypeStructure->transitionWatchpointSetIsStillValid()
arajkumar@10587 283 && globalObject->arrayPrototypeChainIsSane()) {
arajkumar@10770 284 m_graph.registerAndWatchStructureTransition(arrayPrototypeStructure);
arajkumar@10770 285 m_graph.registerAndWatchStructureTransition(objectPrototypeStructure);
arajkumar@10587 286 break;
arajkumar@10587 287 }
arajkumar@10587 288 } else {
arajkumar@10770 289 if (objectPrototypeStructure->transitionWatchpointSetIsStillValid()
arajkumar@10587 290 && globalObject->objectPrototypeIsSane()) {
arajkumar@10770 291 m_graph.registerAndWatchStructureTransition(objectPrototypeStructure);
arajkumar@10587 292 break;
arajkumar@10587 293 }
arajkumar@10587 294 }
arajkumar@10587 295 escape(edge, source);
arajkumar@10587 296 break;
arajkumar@10587 297 }
arajkumar@10587 298
arajkumar@10587 299 case Array::ForceExit:
kcr@9800 300 break;
kcr@9800 301
kcr@9800 302 default:
arajkumar@10587 303 escape(edge, source);
kcr@9800 304 break;
kcr@9800 305 }
kcr@9800 306 };
kcr@9800 307
arajkumar@10587 308 removeInvalidCandidates();
arajkumar@10587 309
kcr@9800 310 for (BasicBlock* block : m_graph.blocksInNaturalOrder()) {
kcr@9800 311 for (Node* node : *block) {
kcr@9800 312 switch (node->op()) {
kcr@9800 313 case GetFromArguments:
kcr@9800 314 break;
kcr@9800 315
kcr@9800 316 case GetByVal:
arajkumar@10954 317 escapeBasedOnArrayMode(node->arrayMode(), m_graph.varArgChild(node, 0), node);
arajkumar@10954 318 escape(m_graph.varArgChild(node, 1), node);
arajkumar@10954 319 escape(m_graph.varArgChild(node, 2), node);
kcr@9800 320 break;
kcr@9800 321
kcr@9800 322 case GetArrayLength:
arajkumar@10587 323 escape(node->child2(), node);
arajkumar@11139 324 // Computing the length of a NewArrayWithSpread can require some additions.
arajkumar@11139 325 // These additions can overflow if the array is sufficiently enormous, and in that case we will need to exit.
arajkumar@11139 326 if ((node->child1()->op() == NewArrayWithSpread) && !node->origin.exitOK)
arajkumar@11139 327 escape(node->child1(), node);
arajkumar@10954 328 break;
arajkumar@10954 329
arajkumar@10587 330 case NewArrayWithSpread: {
arajkumar@10587 331 BitVector* bitVector = node->bitVector();
arajkumar@10587 332 bool isWatchingHavingABadTimeWatchpoint = m_graph.isWatchingHavingABadTimeWatchpoint(node);
arajkumar@10587 333 for (unsigned i = 0; i < node->numChildren(); i++) {
arajkumar@10587 334 Edge child = m_graph.varArgChild(node, i);
arajkumar@10587 335 bool dontEscape;
arajkumar@10587 336 if (bitVector->get(i)) {
arajkumar@10587 337 dontEscape = child->op() == Spread
arajkumar@10587 338 && child->child1().useKind() == ArrayUse
arajkumar@10954 339 && (child->child1()->op() == CreateRest || child->child1()->op() == NewArrayBuffer)
arajkumar@10587 340 && isWatchingHavingABadTimeWatchpoint;
arajkumar@10587 341 } else
arajkumar@10587 342 dontEscape = false;
arajkumar@10587 343
arajkumar@10587 344 if (!dontEscape)
arajkumar@10587 345 escape(child, node);
arajkumar@10587 346 }
arajkumar@10587 347
arajkumar@10587 348 break;
arajkumar@10587 349 }
arajkumar@10587 350
arajkumar@10587 351 case Spread: {
arajkumar@10954 352 bool isOK = node->child1().useKind() == ArrayUse && (node->child1()->op() == CreateRest || node->child1()->op() == NewArrayBuffer);
arajkumar@10587 353 if (!isOK)
arajkumar@10587 354 escape(node->child1(), node);
arajkumar@10587 355 break;
arajkumar@10587 356 }
arajkumar@10587 357
arajkumar@10954 358 case NewArrayBuffer:
arajkumar@10954 359 break;
arajkumar@10587 360
kcr@9800 361 case LoadVarargs:
arajkumar@10954 362 if (node->loadVarargsData()->offset && (node->child1()->op() == NewArrayWithSpread || node->child1()->op() == Spread || node->child1()->op() == NewArrayBuffer))
arajkumar@10587 363 escape(node->child1(), node);
kcr@9800 364 break;
kcr@9800 365
kcr@9800 366 case CallVarargs:
kcr@9800 367 case ConstructVarargs:
kcr@10196 368 case TailCallVarargs:
kcr@10196 369 case TailCallVarargsInlinedCaller:
arajkumar@10587 370 escape(node->child1(), node);
arajkumar@10587 371 escape(node->child2(), node);
arajkumar@10954 372 if (node->callVarargsData()->firstVarArgOffset && (node->child3()->op() == NewArrayWithSpread || node->child3()->op() == Spread || node->child1()->op() == NewArrayBuffer))
arajkumar@10587 373 escape(node->child3(), node);
kcr@9800 374 break;
kcr@9800 375
kcr@9800 376 case Check:
arajkumar@10954 377 case CheckVarargs:
kcr@9800 378 m_graph.doToChildren(
kcr@9800 379 node,
kcr@9800 380 [&] (Edge edge) {
kcr@9800 381 if (edge.willNotHaveCheck())
kcr@9800 382 return;
kcr@9800 383
kcr@9800 384 if (alreadyChecked(edge.useKind(), SpecObject))
kcr@9800 385 return;
kcr@9800 386
arajkumar@10587 387 escape(edge, node);
kcr@9800 388 });
kcr@9800 389 break;
kcr@9800 390
kcr@9800 391 case MovHint:
kcr@9800 392 case PutHint:
kcr@9800 393 break;
kcr@9800 394
kcr@9800 395 case GetButterfly:
kcr@9800 396 // This barely works. The danger is that the GetButterfly is used by something that
kcr@9800 397 // does something escaping to a candidate. Fortunately, the only butterfly-using ops
kcr@9800 398 // that we exempt here also use the candidate directly. If there ever was a
kcr@9800 399 // butterfly-using op that we wanted to exempt, then we'd have to look at the
kcr@9800 400 // butterfly's child and check if it's a candidate.
kcr@9800 401 break;
kcr@9800 402
arajkumar@11139 403 case FilterGetByIdStatus:
arajkumar@11139 404 case FilterPutByIdStatus:
arajkumar@11139 405 case FilterCallLinkStatus:
arajkumar@11139 406 case FilterInByIdStatus:
arajkumar@11139 407 break;
arajkumar@11139 408
kcr@9800 409 case CheckArray:
arajkumar@10587 410 escapeBasedOnArrayMode(node->arrayMode(), node->child1(), node);
kcr@9800 411 break;
kcr@9800 412
arajkumar@10954 413 case CheckStructureOrEmpty:
arajkumar@10587 414 case CheckStructure: {
arajkumar@10954 415 Node* target = node->child1().node();
arajkumar@10954 416 if (!m_candidates.contains(target))
arajkumar@10587 417 break;
arajkumar@10587 418
arajkumar@10587 419 Structure* structure = nullptr;
arajkumar@10954 420 JSGlobalObject* globalObject = m_graph.globalObjectFor(target->origin.semantic);
arajkumar@10954 421 switch (target->op()) {
arajkumar@10587 422 case CreateDirectArguments:
arajkumar@10587 423 structure = globalObject->directArgumentsStructure();
arajkumar@10587 424 break;
arajkumar@10587 425 case CreateClonedArguments:
arajkumar@10587 426 structure = globalObject->clonedArgumentsStructure();
arajkumar@10587 427 break;
arajkumar@10587 428 case CreateRest:
arajkumar@10954 429 ASSERT(m_graph.isWatchingHavingABadTimeWatchpoint(target));
arajkumar@10587 430 structure = globalObject->restParameterStructure();
arajkumar@10587 431 break;
arajkumar@10587 432 case NewArrayWithSpread:
arajkumar@10954 433 ASSERT(m_graph.isWatchingHavingABadTimeWatchpoint(target));
arajkumar@10587 434 structure = globalObject->originalArrayStructureForIndexingType(ArrayWithContiguous);
arajkumar@10587 435 break;
arajkumar@10954 436 case NewArrayBuffer: {
arajkumar@10954 437 ASSERT(m_graph.isWatchingHavingABadTimeWatchpoint(target));
arajkumar@11139 438 IndexingType indexingMode = target->indexingMode();
arajkumar@11139 439 ASSERT(!hasAnyArrayStorage(indexingMode));
arajkumar@11139 440 structure = globalObject->originalArrayStructureForIndexingType(indexingMode);
arajkumar@10954 441 break;
arajkumar@10954 442 }
arajkumar@10587 443 default:
arajkumar@10587 444 RELEASE_ASSERT_NOT_REACHED();
arajkumar@10587 445 }
arajkumar@10587 446 ASSERT(structure);
arajkumar@10587 447
arajkumar@10587 448 if (!node->structureSet().contains(m_graph.registerStructure(structure)))
arajkumar@10587 449 escape(node->child1(), node);
arajkumar@10587 450 break;
arajkumar@10587 451 }
kcr@9800 452
kcr@9800 453 // FIXME: We should be able to handle GetById/GetByOffset on callee.
kcr@9800 454 // https://bugs.webkit.org/show_bug.cgi?id=143075
kcr@9800 455
arajkumar@10587 456 case GetByOffset:
arajkumar@10587 457 if (node->child2()->op() == CreateClonedArguments && node->storageAccessData().offset == clonedArgumentsLengthPropertyOffset)
arajkumar@10587 458 break;
arajkumar@10587 459 FALLTHROUGH;
kcr@9800 460 default:
arajkumar@10587 461 m_graph.doToChildren(node, [&] (Edge edge) { return escape(edge, node); });
kcr@9800 462 break;
kcr@9800 463 }
mbilla@10730 464 if (node->isPseudoTerminal())
mbilla@10730 465 break;
kcr@9800 466 }
kcr@9800 467 }
kcr@9800 468
arajkumar@10954 469 if (DFGArgumentsEliminationPhaseInternal::verbose)
kcr@9800 470 dataLog("After escape analysis: ", listDump(m_candidates), "\n");
kcr@9800 471 }
kcr@9800 472
kcr@9800 473 // Anywhere that a candidate is live (in bytecode or in DFG), check if there is a chance of
kcr@9800 474 // interference between the stack area that the arguments object copies from and the arguments
kcr@9800 475 // object's payload. Conservatively this means that the stack region doesn't get stored to.
kcr@9800 476 void eliminateCandidatesThatInterfere()
kcr@9800 477 {
kcr@9800 478 performLivenessAnalysis(m_graph);
kcr@9800 479 performOSRAvailabilityAnalysis(m_graph);
kcr@9800 480 m_graph.initializeNodeOwners();
kcr@9800 481 CombinedLiveness combinedLiveness(m_graph);
kcr@9800 482
kcr@9800 483 BlockMap<Operands<bool>> clobberedByBlock(m_graph);
kcr@9800 484 for (BasicBlock* block : m_graph.blocksInNaturalOrder()) {
kcr@9800 485 Operands<bool>& clobberedByThisBlock = clobberedByBlock[block];
kcr@9800 486 clobberedByThisBlock = Operands<bool>(OperandsLike, m_graph.block(0)->variablesAtHead);
kcr@9800 487 for (Node* node : *block) {
kcr@9800 488 clobberize(
kcr@9800 489 m_graph, node, NoOpClobberize(),
kcr@9800 490 [&] (AbstractHeap heap) {
kcr@9800 491 if (heap.kind() != Stack) {
kcr@9800 492 ASSERT(!heap.overlaps(Stack));
kcr@9800 493 return;
kcr@9800 494 }
kcr@9800 495 ASSERT(!heap.payload().isTop());
kcr@9800 496 VirtualRegister reg(heap.payload().value32());
arajkumar@10954 497 // The register may not point to an argument or local, for example if we are looking at SetArgumentCountIncludingThis.
arajkumar@10954 498 if (!reg.isHeader())
arajkumar@10954 499 clobberedByThisBlock.operand(reg) = true;
kcr@9800 500 },
kcr@9800 501 NoOpClobberize());
kcr@9800 502 }
kcr@9800 503 }
kcr@9800 504
kcr@9800 505 for (BasicBlock* block : m_graph.blocksInNaturalOrder()) {
kcr@9800 506 // Stop if we've already removed all candidates.
kcr@9800 507 if (m_candidates.isEmpty())
kcr@9800 508 return;
kcr@9800 509
kcr@9800 510 // Ignore blocks that don't write to the stack.
kcr@9800 511 bool writesToStack = false;
kcr@9800 512 for (unsigned i = clobberedByBlock[block].size(); i--;) {
kcr@9800 513 if (clobberedByBlock[block][i]) {
kcr@9800 514 writesToStack = true;
kcr@9800 515 break;
kcr@9800 516 }
kcr@9800 517 }
kcr@9800 518 if (!writesToStack)
kcr@9800 519 continue;
kcr@9800 520
kcr@9800 521 forAllKillsInBlock(
kcr@9800 522 m_graph, combinedLiveness, block,
kcr@9800 523 [&] (unsigned nodeIndex, Node* candidate) {
kcr@9800 524 if (!m_candidates.contains(candidate))
kcr@9800 525 return;
kcr@9800 526
kcr@9800 527 // Check if this block has any clobbers that affect this candidate. This is a fairly
kcr@9800 528 // fast check.
kcr@9800 529 bool isClobberedByBlock = false;
kcr@9800 530 Operands<bool>& clobberedByThisBlock = clobberedByBlock[block];
kcr@9800 531
kcr@9800 532 if (InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame) {
kcr@9800 533 if (inlineCallFrame->isVarargs()) {
kcr@9800 534 isClobberedByBlock |= clobberedByThisBlock.operand(
arajkumar@10587 535 inlineCallFrame->stackOffset + CallFrameSlot::argumentCount);
kcr@9800 536 }
kcr@9800 537
kcr@9800 538 if (!isClobberedByBlock || inlineCallFrame->isClosureCall) {
kcr@9800 539 isClobberedByBlock |= clobberedByThisBlock.operand(
arajkumar@10587 540 inlineCallFrame->stackOffset + CallFrameSlot::callee);
kcr@9800 541 }
kcr@9800 542
kcr@9800 543 if (!isClobberedByBlock) {
arajkumar@10954 544 for (unsigned i = 0; i < inlineCallFrame->argumentCountIncludingThis - 1; ++i) {
kcr@9800 545 VirtualRegister reg =
kcr@9800 546 VirtualRegister(inlineCallFrame->stackOffset) +
kcr@9800 547 CallFrame::argumentOffset(i);
kcr@9800 548 if (clobberedByThisBlock.operand(reg)) {
kcr@9800 549 isClobberedByBlock = true;
kcr@9800 550 break;
kcr@9800 551 }
kcr@9800 552 }
kcr@9800 553 }
kcr@9800 554 } else {
kcr@9800 555 // We don't include the ArgumentCount or Callee in this case because we can be
kcr@9800 556 // damn sure that this won't be clobbered.
kcr@9800 557 for (unsigned i = 1; i < static_cast<unsigned>(codeBlock()->numParameters()); ++i) {
kcr@9800 558 if (clobberedByThisBlock.argument(i)) {
kcr@9800 559 isClobberedByBlock = true;
kcr@9800 560 break;
kcr@9800 561 }
kcr@9800 562 }
kcr@9800 563 }
kcr@9800 564
kcr@9800 565 if (!isClobberedByBlock)
kcr@9800 566 return;
kcr@9800 567
kcr@9800 568 // Check if we can immediately eliminate this candidate. If the block has a clobber
kcr@9800 569 // for this arguments allocation, and we'd have to examine every node in the block,
kcr@9800 570 // then we can just eliminate the candidate.
kcr@9800 571 if (nodeIndex == block->size() && candidate->owner != block) {
arajkumar@10954 572 if (DFGArgumentsEliminationPhaseInternal::verbose)
arajkumar@10587 573 dataLog("eliminating candidate: ", candidate, " because it is clobbered by: ", block->at(nodeIndex), "\n");
arajkumar@10587 574 transitivelyRemoveCandidate(candidate);
kcr@9800 575 return;
kcr@9800 576 }
kcr@9800 577
kcr@9800 578 // This loop considers all nodes up to the nodeIndex, excluding the nodeIndex.
kcr@9800 579 while (nodeIndex--) {
kcr@9800 580 Node* node = block->at(nodeIndex);
kcr@9800 581 if (node == candidate)
kcr@9800 582 break;
kcr@9800 583
kcr@9800 584 bool found = false;
kcr@9800 585 clobberize(
kcr@9800 586 m_graph, node, NoOpClobberize(),
kcr@9800 587 [&] (AbstractHeap heap) {
kcr@9800 588 if (heap.kind() == Stack && !heap.payload().isTop()) {
kcr@9800 589 if (argumentsInvolveStackSlot(candidate, VirtualRegister(heap.payload().value32())))
kcr@9800 590 found = true;
kcr@9800 591 return;
kcr@9800 592 }
kcr@9800 593 if (heap.overlaps(Stack))
kcr@9800 594 found = true;
kcr@9800 595 },
kcr@9800 596 NoOpClobberize());
kcr@9800 597
kcr@9800 598 if (found) {
arajkumar@10954 599 if (DFGArgumentsEliminationPhaseInternal::verbose)
arajkumar@10587 600 dataLog("eliminating candidate: ", candidate, " because it is clobbered by ", block->at(nodeIndex), "\n");
arajkumar@10587 601 transitivelyRemoveCandidate(candidate);
kcr@9800 602 return;
kcr@9800 603 }
kcr@9800 604 }
kcr@9800 605 });
kcr@9800 606 }
kcr@9800 607
kcr@9800 608 // Q: How do we handle OSR exit with a live PhantomArguments at a point where the inline call
kcr@9800 609 // frame is dead? A: Naively we could say that PhantomArguments must escape the stack slots. But
kcr@9800 610 // that would break PutStack sinking, which in turn would break object allocation sinking, in
kcr@9800 611 // cases where we have a varargs call to an otherwise pure method. So, we need something smarter.
kcr@9800 612 // For the outermost arguments, we just have a PhantomArguments that magically knows that it
kcr@9800 613 // should load the arguments from the call frame. For the inline arguments, we have the heap map
kcr@9800 614 // in the availabiltiy map track each possible inline argument as a promoted heap location. If the
kcr@9800 615 // PutStacks for those arguments aren't sunk, those heap locations will map to very trivial
kcr@9800 616 // availabilities (they will be flush availabilities). But if sinking happens then those
kcr@9800 617 // availabilities may become whatever. OSR exit should be able to handle this quite naturally,
kcr@9800 618 // since those availabilities speak of the stack before the optimizing compiler stack frame is
kcr@9800 619 // torn down.
kcr@9800 620
arajkumar@10954 621 if (DFGArgumentsEliminationPhaseInternal::verbose)
kcr@9800 622 dataLog("After interference analysis: ", listDump(m_candidates), "\n");
kcr@9800 623 }
kcr@9800 624
kcr@9800 625 void transform()
kcr@9800 626 {
kcr@9800 627 InsertionSet insertionSet(m_graph);
kcr@9800 628
arajkumar@10587 629 for (BasicBlock* block : m_graph.blocksInPreOrder()) {
kcr@9800 630 for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
kcr@9800 631 Node* node = block->at(nodeIndex);
kcr@9800 632
kcr@9800 633 auto getArrayLength = [&] (Node* candidate) -> Node* {
kcr@9800 634 return emitCodeToGetArgumentsArrayLength(
kcr@9800 635 insertionSet, candidate, nodeIndex, node->origin);
kcr@9800 636 };
kcr@9800 637
arajkumar@10587 638 auto isEliminatedAllocation = [&] (Node* candidate) -> bool {
arajkumar@10587 639 if (!m_candidates.contains(candidate))
arajkumar@10587 640 return false;
arajkumar@10587 641 // We traverse in such a way that we are guaranteed to see a def before a use.
arajkumar@10587 642 // Therefore, we should have already transformed the allocation before the use
arajkumar@10587 643 // of an allocation.
arajkumar@10587 644 ASSERT(candidate->op() == PhantomCreateRest || candidate->op() == PhantomDirectArguments || candidate->op() == PhantomClonedArguments
arajkumar@10954 645 || candidate->op() == PhantomSpread || candidate->op() == PhantomNewArrayWithSpread || candidate->op() == PhantomNewArrayBuffer);
arajkumar@10587 646 return true;
arajkumar@10587 647 };
arajkumar@10587 648
kcr@9800 649 switch (node->op()) {
kcr@9800 650 case CreateDirectArguments:
kcr@9800 651 if (!m_candidates.contains(node))
kcr@9800 652 break;
kcr@9800 653
kcr@9800 654 node->setOpAndDefaultFlags(PhantomDirectArguments);
kcr@9800 655 break;
kcr@9800 656
arajkumar@10587 657 case CreateRest:
arajkumar@10587 658 if (!m_candidates.contains(node))
arajkumar@10587 659 break;
arajkumar@10587 660
arajkumar@10587 661 node->setOpAndDefaultFlags(PhantomCreateRest);
arajkumar@10587 662 // We don't need this parameter for OSR exit, we can find out all the information
arajkumar@10587 663 // we need via the static parameter count and the dynamic argument count.
arajkumar@10587 664 node->child1() = Edge();
arajkumar@10587 665 break;
arajkumar@10587 666
kcr@9800 667 case CreateClonedArguments:
kcr@9800 668 if (!m_candidates.contains(node))
kcr@9800 669 break;
kcr@9800 670
kcr@9800 671 node->setOpAndDefaultFlags(PhantomClonedArguments);
kcr@9800 672 break;
kcr@9800 673
arajkumar@10587 674 case Spread:
arajkumar@10587 675 if (!m_candidates.contains(node))
arajkumar@10587 676 break;
arajkumar@10587 677
arajkumar@10587 678 node->setOpAndDefaultFlags(PhantomSpread);
arajkumar@10587 679 break;
arajkumar@10587 680
arajkumar@10587 681 case NewArrayWithSpread:
arajkumar@10587 682 if (!m_candidates.contains(node))
arajkumar@10587 683 break;
arajkumar@10587 684
arajkumar@10587 685 node->setOpAndDefaultFlags(PhantomNewArrayWithSpread);
arajkumar@10587 686 break;
arajkumar@10587 687
arajkumar@10954 688 case NewArrayBuffer:
arajkumar@10954 689 if (!m_candidates.contains(node))
arajkumar@10954 690 break;
arajkumar@10954 691
arajkumar@10954 692 node->setOpAndDefaultFlags(PhantomNewArrayBuffer);
arajkumar@10954 693 node->child1() = Edge(insertionSet.insertConstant(nodeIndex, node->origin, node->cellOperand()));
arajkumar@10954 694 break;
arajkumar@10954 695
kcr@9800 696 case GetFromArguments: {
kcr@9800 697 Node* candidate = node->child1().node();
arajkumar@10587 698 if (!isEliminatedAllocation(candidate))
kcr@9800 699 break;
kcr@9800 700
kcr@9800 701 DFG_ASSERT(
arajkumar@10954 702 m_graph, node, node->child1()->op() == PhantomDirectArguments, node->child1()->op());
kcr@9800 703 VirtualRegister reg =
kcr@9800 704 virtualRegisterForArgument(node->capturedArgumentsOffset().offset() + 1) +
kcr@9800 705 node->origin.semantic.stackOffset();
kcr@9800 706 StackAccessData* data = m_graph.m_stackAccessData.add(reg, FlushedJSValue);
kcr@9800 707 node->convertToGetStack(data);
kcr@9800 708 break;
kcr@9800 709 }
kcr@9800 710
arajkumar@10587 711 case GetByOffset: {
arajkumar@10587 712 Node* candidate = node->child2().node();
arajkumar@10587 713 if (!isEliminatedAllocation(candidate))
arajkumar@10587 714 break;
arajkumar@10587 715
arajkumar@10954 716 ASSERT(candidate->op() == PhantomClonedArguments);
arajkumar@10587 717 ASSERT(node->storageAccessData().offset == clonedArgumentsLengthPropertyOffset);
arajkumar@10587 718
arajkumar@10587 719 // Meh, this is kind of hackish - we use an Identity so that we can reuse the
arajkumar@10587 720 // getArrayLength() helper.
arajkumar@10587 721 node->convertToIdentityOn(getArrayLength(candidate));
arajkumar@10587 722 break;
arajkumar@10587 723 }
arajkumar@10587 724
kcr@9800 725 case GetArrayLength: {
kcr@9800 726 Node* candidate = node->child1().node();
arajkumar@10587 727 if (!isEliminatedAllocation(candidate))
kcr@9800 728 break;
kcr@9800 729
kcr@9800 730 node->convertToIdentityOn(getArrayLength(candidate));
kcr@9800 731 break;
kcr@9800 732 }
kcr@9800 733
kcr@9800 734 case GetByVal: {
kcr@9800 735 // FIXME: For ClonedArguments, we would have already done a separate bounds check.
kcr@9800 736 // This code will cause us to have two bounds checks - the original one that we
kcr@9800 737 // already factored out in SSALoweringPhase, and the new one we insert here, which is
kcr@10196 738 // often implicitly part of GetMyArgumentByVal. B3 will probably eliminate the
kcr@9800 739 // second bounds check, but still - that's just silly.
kcr@9800 740 // https://bugs.webkit.org/show_bug.cgi?id=143076
kcr@9800 741
arajkumar@10954 742 Node* candidate = m_graph.varArgChild(node, 0).node();
arajkumar@10587 743 if (!isEliminatedAllocation(candidate))
kcr@9800 744 break;
kcr@9800 745
arajkumar@10587 746 unsigned numberOfArgumentsToSkip = 0;
arajkumar@10587 747 if (candidate->op() == PhantomCreateRest)
arajkumar@10587 748 numberOfArgumentsToSkip = candidate->numberOfArgumentsToSkip();
arajkumar@10587 749
kcr@9800 750 Node* result = nullptr;
arajkumar@10954 751 if (m_graph.varArgChild(node, 1)->isInt32Constant()) {
arajkumar@10954 752 unsigned index = m_graph.varArgChild(node, 1)->asUInt32();
kcr@9800 753 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10587 754 index += numberOfArgumentsToSkip;
kcr@9800 755
kcr@9800 756 bool safeToGetStack;
kcr@9800 757 if (inlineCallFrame)
arajkumar@10954 758 safeToGetStack = index < inlineCallFrame->argumentCountIncludingThis - 1;
kcr@9800 759 else {
kcr@9800 760 safeToGetStack =
kcr@9800 761 index < static_cast<unsigned>(codeBlock()->numParameters()) - 1;
kcr@9800 762 }
kcr@9800 763 if (safeToGetStack) {
kcr@9800 764 StackAccessData* data;
kcr@9800 765 VirtualRegister arg = virtualRegisterForArgument(index + 1);
kcr@9800 766 if (inlineCallFrame)
kcr@9800 767 arg += inlineCallFrame->stackOffset;
kcr@9800 768 data = m_graph.m_stackAccessData.add(arg, FlushedJSValue);
kcr@9800 769
arajkumar@11208 770 Node* check = nullptr;
kcr@10196 771 if (!inlineCallFrame || inlineCallFrame->isVarargs()) {
arajkumar@11208 772 check = insertionSet.insertNode(
kcr@9800 773 nodeIndex, SpecNone, CheckInBounds, node->origin,
arajkumar@10954 774 m_graph.varArgChild(node, 1), Edge(getArrayLength(candidate), Int32Use));
kcr@9800 775 }
kcr@9800 776
kcr@9800 777 result = insertionSet.insertNode(
arajkumar@11208 778 nodeIndex, node->prediction(), GetStack, node->origin, OpInfo(data), Edge(check, UntypedUse));
kcr@9800 779 }
kcr@9800 780 }
kcr@9800 781
kcr@9800 782 if (!result) {
arajkumar@10587 783 NodeType op;
arajkumar@10587 784 if (node->arrayMode().isInBounds())
arajkumar@10587 785 op = GetMyArgumentByVal;
arajkumar@10587 786 else
arajkumar@10587 787 op = GetMyArgumentByValOutOfBounds;
kcr@9800 788 result = insertionSet.insertNode(
arajkumar@10587 789 nodeIndex, node->prediction(), op, node->origin, OpInfo(numberOfArgumentsToSkip),
arajkumar@10954 790 m_graph.varArgChild(node, 0), m_graph.varArgChild(node, 1));
kcr@9800 791 }
kcr@9800 792
kcr@9800 793 // Need to do this because we may have a data format conversion here.
kcr@9800 794 node->convertToIdentityOn(result);
kcr@9800 795 break;
kcr@9800 796 }
kcr@9800 797
kcr@9800 798 case LoadVarargs: {
kcr@9800 799 Node* candidate = node->child1().node();
arajkumar@10587 800 if (!isEliminatedAllocation(candidate))
kcr@9800 801 break;
kcr@9800 802
arajkumar@10587 803 // LoadVarargs can exit, so it better be exitOK.
arajkumar@10587 804 DFG_ASSERT(m_graph, node, node->origin.exitOK);
arajkumar@10587 805 bool canExit = true;
kcr@9800 806 LoadVarargsData* varargsData = node->loadVarargsData();
kcr@10196 807
arajkumar@10587 808 auto storeArgumentCountIncludingThis = [&] (unsigned argumentCountIncludingThis) {
arajkumar@10587 809 Node* argumentCountIncludingThisNode = insertionSet.insertConstant(
kcr@10196 810 nodeIndex, node->origin.withExitOK(canExit),
arajkumar@10587 811 jsNumber(argumentCountIncludingThis));
kcr@9800 812 insertionSet.insertNode(
kcr@10196 813 nodeIndex, SpecNone, MovHint, node->origin.takeValidExit(canExit),
arajkumar@10587 814 OpInfo(varargsData->count.offset()), Edge(argumentCountIncludingThisNode));
kcr@9800 815 insertionSet.insertNode(
kcr@10196 816 nodeIndex, SpecNone, PutStack, node->origin.withExitOK(canExit),
kcr@9800 817 OpInfo(m_graph.m_stackAccessData.add(varargsData->count, FlushedInt32)),
arajkumar@10587 818 Edge(argumentCountIncludingThisNode, KnownInt32Use));
arajkumar@10587 819 };
kcr@9800 820
arajkumar@10587 821 auto storeValue = [&] (Node* value, unsigned storeIndex) {
arajkumar@10587 822 VirtualRegister reg = varargsData->start + storeIndex;
arajkumar@10587 823 StackAccessData* data =
arajkumar@10587 824 m_graph.m_stackAccessData.add(reg, FlushedJSValue);
kcr@9800 825
arajkumar@10587 826 insertionSet.insertNode(
arajkumar@10587 827 nodeIndex, SpecNone, MovHint, node->origin.takeValidExit(canExit),
arajkumar@10587 828 OpInfo(reg.offset()), Edge(value));
arajkumar@10587 829 insertionSet.insertNode(
arajkumar@10587 830 nodeIndex, SpecNone, PutStack, node->origin.withExitOK(canExit),
arajkumar@10587 831 OpInfo(data), Edge(value));
arajkumar@10587 832 };
kcr@9800 833
arajkumar@10954 834 if (candidate->op() == PhantomNewArrayWithSpread || candidate->op() == PhantomNewArrayBuffer || candidate->op() == PhantomSpread) {
arajkumar@10954 835 auto canConvertToStaticLoadStores = recursableLambda([&] (auto self, Node* candidate) -> bool {
arajkumar@10954 836 if (candidate->op() == PhantomSpread)
arajkumar@10954 837 return self(candidate->child1().node());
mbilla@10730 838
mbilla@10730 839 if (candidate->op() == PhantomNewArrayWithSpread) {
mbilla@10730 840 BitVector* bitVector = candidate->bitVector();
mbilla@10730 841 for (unsigned i = 0; i < candidate->numChildren(); i++) {
arajkumar@10954 842 if (bitVector->get(i)) {
arajkumar@10954 843 if (!self(m_graph.varArgChild(candidate, i).node()))
arajkumar@10954 844 return false;
arajkumar@10954 845 }
mbilla@10730 846 }
arajkumar@10954 847 return true;
arajkumar@10954 848 }
kcr@9800 849
arajkumar@10954 850 // PhantomNewArrayBuffer only contains constants. It can always convert LoadVarargs to static load stores.
arajkumar@10954 851 if (candidate->op() == PhantomNewArrayBuffer)
arajkumar@10954 852 return true;
arajkumar@10954 853
arajkumar@10954 854 ASSERT(candidate->op() == PhantomCreateRest);
arajkumar@10954 855 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10954 856 return inlineCallFrame && !inlineCallFrame->isVarargs();
arajkumar@10954 857 });
arajkumar@10954 858
arajkumar@10954 859 if (canConvertToStaticLoadStores(candidate)) {
arajkumar@10954 860 auto countNumberOfArguments = recursableLambda([&](auto self, Node* candidate) -> unsigned {
arajkumar@10954 861 if (candidate->op() == PhantomSpread)
arajkumar@10954 862 return self(candidate->child1().node());
arajkumar@10954 863
arajkumar@10954 864 if (candidate->op() == PhantomNewArrayWithSpread) {
arajkumar@10954 865 BitVector* bitVector = candidate->bitVector();
arajkumar@10954 866 unsigned result = 0;
arajkumar@10954 867 for (unsigned i = 0; i < candidate->numChildren(); i++) {
arajkumar@10954 868 if (bitVector->get(i))
arajkumar@10954 869 result += self(m_graph.varArgChild(candidate, i).node());
arajkumar@10954 870 else
arajkumar@10954 871 ++result;
arajkumar@10954 872 }
arajkumar@10954 873 return result;
arajkumar@10954 874 }
arajkumar@10954 875
arajkumar@10954 876 if (candidate->op() == PhantomNewArrayBuffer)
arajkumar@11139 877 return candidate->castOperand<JSImmutableButterfly*>()->length();
arajkumar@10954 878
arajkumar@10954 879 ASSERT(candidate->op() == PhantomCreateRest);
arajkumar@10954 880 unsigned numberOfArgumentsToSkip = candidate->numberOfArgumentsToSkip();
arajkumar@10954 881 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10954 882 unsigned frameArgumentCount = inlineCallFrame->argumentCountIncludingThis - 1;
arajkumar@10954 883 if (frameArgumentCount >= numberOfArgumentsToSkip)
arajkumar@10954 884 return frameArgumentCount - numberOfArgumentsToSkip;
arajkumar@10954 885 return 0;
arajkumar@10954 886 });
arajkumar@10954 887
arajkumar@10954 888 unsigned argumentCountIncludingThis = 1 + countNumberOfArguments(candidate); // |this|
arajkumar@10587 889 if (argumentCountIncludingThis <= varargsData->limit) {
arajkumar@10587 890 storeArgumentCountIncludingThis(argumentCountIncludingThis);
kcr@9800 891
arajkumar@10954 892 DFG_ASSERT(m_graph, node, varargsData->limit - 1 >= varargsData->mandatoryMinimum, varargsData->limit, varargsData->mandatoryMinimum);
arajkumar@10587 893 // Define our limit to exclude "this", since that's a bit easier to reason about.
arajkumar@10587 894 unsigned limit = varargsData->limit - 1;
mbilla@10730 895
arajkumar@10954 896 auto forwardNode = recursableLambda([&](auto self, Node* candidate, unsigned storeIndex) -> unsigned {
arajkumar@10954 897 if (candidate->op() == PhantomSpread)
arajkumar@10954 898 return self(candidate->child1().node(), storeIndex);
arajkumar@10954 899
arajkumar@10954 900 if (candidate->op() == PhantomNewArrayWithSpread) {
arajkumar@10954 901 BitVector* bitVector = candidate->bitVector();
arajkumar@10954 902 for (unsigned i = 0; i < candidate->numChildren(); i++) {
arajkumar@10954 903 if (bitVector->get(i))
arajkumar@10954 904 storeIndex = self(m_graph.varArgChild(candidate, i).node(), storeIndex);
arajkumar@10954 905 else {
arajkumar@10954 906 Node* value = m_graph.varArgChild(candidate, i).node();
arajkumar@10954 907 storeValue(value, storeIndex++);
arajkumar@10954 908 }
arajkumar@10954 909 }
arajkumar@10954 910 return storeIndex;
arajkumar@10954 911 }
arajkumar@10954 912
arajkumar@10954 913 if (candidate->op() == PhantomNewArrayBuffer) {
arajkumar@11139 914 auto* array = candidate->castOperand<JSImmutableButterfly*>();
arajkumar@10954 915 for (unsigned index = 0; index < array->length(); ++index) {
arajkumar@10954 916 JSValue constant;
arajkumar@10954 917 if (candidate->indexingType() == ArrayWithDouble)
arajkumar@10954 918 constant = jsDoubleNumber(array->get(index).asNumber());
arajkumar@10954 919 else
arajkumar@10954 920 constant = array->get(index);
arajkumar@10954 921 Node* value = insertionSet.insertConstant(nodeIndex, node->origin.withExitOK(canExit), constant);
arajkumar@10954 922 storeValue(value, storeIndex++);
arajkumar@10954 923 }
arajkumar@10954 924 return storeIndex;
arajkumar@10954 925 }
arajkumar@10954 926
arajkumar@10954 927 ASSERT(candidate->op() == PhantomCreateRest);
arajkumar@10954 928 unsigned numberOfArgumentsToSkip = candidate->numberOfArgumentsToSkip();
arajkumar@10954 929 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10954 930 unsigned frameArgumentCount = inlineCallFrame->argumentCountIncludingThis - 1;
mbilla@10730 931 for (unsigned loadIndex = numberOfArgumentsToSkip; loadIndex < frameArgumentCount; ++loadIndex) {
mbilla@10730 932 VirtualRegister reg = virtualRegisterForArgument(loadIndex + 1) + inlineCallFrame->stackOffset;
mbilla@10730 933 StackAccessData* data = m_graph.m_stackAccessData.add(reg, FlushedJSValue);
mbilla@10730 934 Node* value = insertionSet.insertNode(
mbilla@10730 935 nodeIndex, SpecNone, GetStack, node->origin.withExitOK(canExit),
mbilla@10730 936 OpInfo(data));
arajkumar@10954 937 storeValue(value, storeIndex++);
mbilla@10730 938 }
mbilla@10730 939 return storeIndex;
arajkumar@10954 940 });
mbilla@10730 941
arajkumar@10954 942 unsigned storeIndex = forwardNode(candidate, 0);
arajkumar@10587 943 RELEASE_ASSERT(storeIndex <= limit);
arajkumar@10587 944 Node* undefined = nullptr;
arajkumar@10587 945 for (; storeIndex < limit; ++storeIndex) {
arajkumar@10587 946 if (!undefined) {
arajkumar@10587 947 undefined = insertionSet.insertConstant(
arajkumar@10587 948 nodeIndex, node->origin.withExitOK(canExit), jsUndefined());
arajkumar@10587 949 }
arajkumar@10587 950 storeValue(undefined, storeIndex);
arajkumar@10587 951 }
mbilla@10730 952
arajkumar@10954 953 node->remove(m_graph);
mbilla@10730 954 node->origin.exitOK = canExit;
mbilla@10730 955 break;
arajkumar@10587 956 }
kcr@9800 957 }
arajkumar@10587 958 } else {
arajkumar@10587 959 unsigned numberOfArgumentsToSkip = 0;
arajkumar@10587 960 if (candidate->op() == PhantomCreateRest)
arajkumar@10587 961 numberOfArgumentsToSkip = candidate->numberOfArgumentsToSkip();
arajkumar@10587 962 varargsData->offset += numberOfArgumentsToSkip;
kcr@9800 963
arajkumar@10587 964 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10587 965
arajkumar@10587 966 if (inlineCallFrame
arajkumar@10587 967 && !inlineCallFrame->isVarargs()) {
arajkumar@10587 968
arajkumar@10954 969 unsigned argumentCountIncludingThis = inlineCallFrame->argumentCountIncludingThis;
arajkumar@10587 970 if (argumentCountIncludingThis > varargsData->offset)
arajkumar@10587 971 argumentCountIncludingThis -= varargsData->offset;
arajkumar@10587 972 else
arajkumar@10587 973 argumentCountIncludingThis = 1;
arajkumar@10587 974 RELEASE_ASSERT(argumentCountIncludingThis >= 1);
arajkumar@10587 975
arajkumar@10587 976 if (argumentCountIncludingThis <= varargsData->limit) {
arajkumar@10587 977
arajkumar@10587 978 storeArgumentCountIncludingThis(argumentCountIncludingThis);
arajkumar@10587 979
arajkumar@10954 980 DFG_ASSERT(m_graph, node, varargsData->limit - 1 >= varargsData->mandatoryMinimum, varargsData->limit, varargsData->mandatoryMinimum);
arajkumar@10587 981 // Define our limit to exclude "this", since that's a bit easier to reason about.
arajkumar@10587 982 unsigned limit = varargsData->limit - 1;
arajkumar@10587 983 Node* undefined = nullptr;
arajkumar@10587 984 for (unsigned storeIndex = 0; storeIndex < limit; ++storeIndex) {
arajkumar@10587 985 // First determine if we have an element we can load, and load it if
arajkumar@10587 986 // possible.
arajkumar@10587 987
arajkumar@10587 988 Node* value = nullptr;
arajkumar@10587 989 unsigned loadIndex = storeIndex + varargsData->offset;
arajkumar@10587 990
arajkumar@10954 991 if (loadIndex + 1 < inlineCallFrame->argumentCountIncludingThis) {
arajkumar@10587 992 VirtualRegister reg = virtualRegisterForArgument(loadIndex + 1) + inlineCallFrame->stackOffset;
arajkumar@10587 993 StackAccessData* data = m_graph.m_stackAccessData.add(
arajkumar@10587 994 reg, FlushedJSValue);
arajkumar@10587 995
arajkumar@10587 996 value = insertionSet.insertNode(
arajkumar@10587 997 nodeIndex, SpecNone, GetStack, node->origin.withExitOK(canExit),
arajkumar@10587 998 OpInfo(data));
arajkumar@10587 999 } else {
arajkumar@10587 1000 // FIXME: We shouldn't have to store anything if
arajkumar@10587 1001 // storeIndex >= varargsData->mandatoryMinimum, but we will still
arajkumar@10587 1002 // have GetStacks in that range. So if we don't do the stores, we'll
arajkumar@10587 1003 // have degenerate IR: we'll have GetStacks of something that didn't
arajkumar@10587 1004 // have PutStacks.
arajkumar@10587 1005 // https://bugs.webkit.org/show_bug.cgi?id=147434
arajkumar@10587 1006
arajkumar@10587 1007 if (!undefined) {
arajkumar@10587 1008 undefined = insertionSet.insertConstant(
arajkumar@10587 1009 nodeIndex, node->origin.withExitOK(canExit), jsUndefined());
arajkumar@10587 1010 }
arajkumar@10587 1011 value = undefined;
arajkumar@10587 1012 }
arajkumar@10587 1013
arajkumar@10587 1014 // Now that we have a value, store it.
arajkumar@10587 1015 storeValue(value, storeIndex);
arajkumar@10587 1016 }
arajkumar@10587 1017
arajkumar@10954 1018 node->remove(m_graph);
arajkumar@10587 1019 node->origin.exitOK = canExit;
arajkumar@10587 1020 break;
arajkumar@10587 1021 }
arajkumar@10587 1022 }
kcr@9800 1023 }
kcr@9800 1024
kcr@9800 1025 node->setOpAndDefaultFlags(ForwardVarargs);
kcr@9800 1026 break;
kcr@9800 1027 }
kcr@9800 1028
kcr@9800 1029 case CallVarargs:
kcr@10196 1030 case ConstructVarargs:
kcr@10196 1031 case TailCallVarargs:
kcr@10196 1032 case TailCallVarargsInlinedCaller: {
arajkumar@10587 1033 Node* candidate = node->child3().node();
arajkumar@10587 1034 if (!isEliminatedAllocation(candidate))
kcr@9800 1035 break;
kcr@9800 1036
arajkumar@10587 1037 auto convertToStaticArgumentCountCall = [&] (const Vector<Node*>& arguments) {
kcr@9800 1038 unsigned firstChild = m_graph.m_varArgChildren.size();
kcr@9800 1039 m_graph.m_varArgChildren.append(node->child1());
arajkumar@10587 1040 m_graph.m_varArgChildren.append(node->child2());
kcr@9800 1041 for (Node* argument : arguments)
kcr@9800 1042 m_graph.m_varArgChildren.append(Edge(argument));
kcr@10196 1043 switch (node->op()) {
kcr@10196 1044 case CallVarargs:
kcr@10196 1045 node->setOpAndDefaultFlags(Call);
kcr@10196 1046 break;
kcr@10196 1047 case ConstructVarargs:
kcr@10196 1048 node->setOpAndDefaultFlags(Construct);
kcr@10196 1049 break;
kcr@10196 1050 case TailCallVarargs:
kcr@10196 1051 node->setOpAndDefaultFlags(TailCall);
kcr@10196 1052 break;
kcr@10196 1053 case TailCallVarargsInlinedCaller:
kcr@10196 1054 node->setOpAndDefaultFlags(TailCallInlinedCaller);
kcr@10196 1055 break;
kcr@10196 1056 default:
kcr@10196 1057 RELEASE_ASSERT_NOT_REACHED();
kcr@10196 1058 }
kcr@9800 1059 node->children = AdjacencyList(
kcr@9800 1060 AdjacencyList::Variable,
kcr@9800 1061 firstChild, m_graph.m_varArgChildren.size() - firstChild);
arajkumar@10587 1062 };
arajkumar@10587 1063
arajkumar@10587 1064 auto convertToForwardsCall = [&] () {
arajkumar@10587 1065 switch (node->op()) {
arajkumar@10587 1066 case CallVarargs:
arajkumar@10587 1067 node->setOpAndDefaultFlags(CallForwardVarargs);
arajkumar@10587 1068 break;
arajkumar@10587 1069 case ConstructVarargs:
arajkumar@10587 1070 node->setOpAndDefaultFlags(ConstructForwardVarargs);
arajkumar@10587 1071 break;
arajkumar@10587 1072 case TailCallVarargs:
arajkumar@10587 1073 node->setOpAndDefaultFlags(TailCallForwardVarargs);
arajkumar@10587 1074 break;
arajkumar@10587 1075 case TailCallVarargsInlinedCaller:
arajkumar@10587 1076 node->setOpAndDefaultFlags(TailCallForwardVarargsInlinedCaller);
arajkumar@10587 1077 break;
arajkumar@10587 1078 default:
arajkumar@10587 1079 RELEASE_ASSERT_NOT_REACHED();
arajkumar@10587 1080 }
arajkumar@10587 1081 };
arajkumar@10587 1082
arajkumar@10954 1083 if (candidate->op() == PhantomNewArrayWithSpread || candidate->op() == PhantomNewArrayBuffer || candidate->op() == PhantomSpread) {
arajkumar@10954 1084 auto canTransformToStaticArgumentCountCall = recursableLambda([&](auto self, Node* candidate) -> bool {
arajkumar@10954 1085 if (candidate->op() == PhantomSpread)
arajkumar@10954 1086 return self(candidate->child1().node());
mbilla@10730 1087
arajkumar@10954 1088 if (candidate->op() == PhantomNewArrayWithSpread) {
arajkumar@10954 1089 BitVector* bitVector = candidate->bitVector();
arajkumar@10954 1090 for (unsigned i = 0; i < candidate->numChildren(); i++) {
arajkumar@10954 1091 if (bitVector->get(i)) {
arajkumar@10954 1092 Node* spread = m_graph.varArgChild(candidate, i).node();
arajkumar@10954 1093 if (!self(spread))
arajkumar@10954 1094 return false;
mbilla@10730 1095 }
arajkumar@10587 1096 }
arajkumar@10954 1097 return true;
arajkumar@10587 1098 }
arajkumar@10587 1099
arajkumar@10954 1100 // PhantomNewArrayBuffer only contains constants. It can always convert LoadVarargs to static load stores.
arajkumar@10954 1101 if (candidate->op() == PhantomNewArrayBuffer)
arajkumar@10954 1102 return true;
arajkumar@10954 1103
arajkumar@10954 1104 ASSERT(candidate->op() == PhantomCreateRest);
arajkumar@10954 1105 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10954 1106 return inlineCallFrame && !inlineCallFrame->isVarargs();
arajkumar@10954 1107 });
arajkumar@10954 1108
arajkumar@10954 1109 if (canTransformToStaticArgumentCountCall(candidate)) {
arajkumar@10587 1110 Vector<Node*> arguments;
arajkumar@10954 1111 auto appendNode = recursableLambda([&](auto self, Node* candidate) -> void {
arajkumar@10954 1112 if (candidate->op() == PhantomSpread) {
arajkumar@10954 1113 self(candidate->child1().node());
arajkumar@10954 1114 return;
arajkumar@10954 1115 }
arajkumar@10954 1116
arajkumar@10954 1117 if (candidate->op() == PhantomNewArrayWithSpread) {
arajkumar@10954 1118 BitVector* bitVector = candidate->bitVector();
arajkumar@10954 1119 for (unsigned i = 0; i < candidate->numChildren(); i++) {
arajkumar@10954 1120 Node* child = m_graph.varArgChild(candidate, i).node();
arajkumar@10954 1121 if (bitVector->get(i))
arajkumar@10954 1122 self(child);
arajkumar@10954 1123 else
arajkumar@10954 1124 arguments.append(child);
arajkumar@10954 1125 }
arajkumar@10954 1126 return;
arajkumar@10954 1127 }
arajkumar@10954 1128
arajkumar@10954 1129 if (candidate->op() == PhantomNewArrayBuffer) {
arajkumar@10954 1130 bool canExit = true;
arajkumar@11139 1131 auto* array = candidate->castOperand<JSImmutableButterfly*>();
arajkumar@10954 1132 for (unsigned index = 0; index < array->length(); ++index) {
arajkumar@10954 1133 JSValue constant;
arajkumar@10954 1134 if (candidate->indexingType() == ArrayWithDouble)
arajkumar@10954 1135 constant = jsDoubleNumber(array->get(index).asNumber());
arajkumar@10954 1136 else
arajkumar@10954 1137 constant = array->get(index);
arajkumar@10954 1138 arguments.append(insertionSet.insertConstant(nodeIndex, node->origin.withExitOK(canExit), constant));
arajkumar@10954 1139 }
arajkumar@10954 1140 return;
arajkumar@10954 1141 }
arajkumar@10954 1142
arajkumar@10954 1143 ASSERT(candidate->op() == PhantomCreateRest);
arajkumar@10954 1144 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10954 1145 unsigned numberOfArgumentsToSkip = candidate->numberOfArgumentsToSkip();
arajkumar@10954 1146 for (unsigned i = 1 + numberOfArgumentsToSkip; i < inlineCallFrame->argumentCountIncludingThis; ++i) {
mbilla@10730 1147 StackAccessData* data = m_graph.m_stackAccessData.add(
mbilla@10730 1148 virtualRegisterForArgument(i) + inlineCallFrame->stackOffset,
mbilla@10730 1149 FlushedJSValue);
arajkumar@10587 1150
mbilla@10730 1151 Node* value = insertionSet.insertNode(
mbilla@10730 1152 nodeIndex, SpecNone, GetStack, node->origin, OpInfo(data));
arajkumar@10587 1153
mbilla@10730 1154 arguments.append(value);
mbilla@10730 1155 }
arajkumar@10954 1156 });
mbilla@10730 1157
arajkumar@10954 1158 appendNode(candidate);
arajkumar@10587 1159 convertToStaticArgumentCountCall(arguments);
arajkumar@10587 1160 } else
arajkumar@10587 1161 convertToForwardsCall();
arajkumar@10587 1162 } else {
arajkumar@10587 1163 unsigned numberOfArgumentsToSkip = 0;
arajkumar@10587 1164 if (candidate->op() == PhantomCreateRest)
arajkumar@10587 1165 numberOfArgumentsToSkip = candidate->numberOfArgumentsToSkip();
arajkumar@10587 1166 CallVarargsData* varargsData = node->callVarargsData();
arajkumar@10587 1167 varargsData->firstVarArgOffset += numberOfArgumentsToSkip;
arajkumar@10587 1168
arajkumar@10587 1169 InlineCallFrame* inlineCallFrame = candidate->origin.semantic.inlineCallFrame;
arajkumar@10587 1170 if (inlineCallFrame && !inlineCallFrame->isVarargs()) {
arajkumar@10587 1171 Vector<Node*> arguments;
arajkumar@10954 1172 for (unsigned i = 1 + varargsData->firstVarArgOffset; i < inlineCallFrame->argumentCountIncludingThis; ++i) {
arajkumar@10587 1173 StackAccessData* data = m_graph.m_stackAccessData.add(
arajkumar@10587 1174 virtualRegisterForArgument(i) + inlineCallFrame->stackOffset,
arajkumar@10587 1175 FlushedJSValue);
arajkumar@10587 1176
arajkumar@10587 1177 Node* value = insertionSet.insertNode(
arajkumar@10587 1178 nodeIndex, SpecNone, GetStack, node->origin, OpInfo(data));
arajkumar@10587 1179
arajkumar@10587 1180 arguments.append(value);
arajkumar@10587 1181 }
arajkumar@10587 1182
arajkumar@10587 1183 convertToStaticArgumentCountCall(arguments);
arajkumar@10587 1184 } else
arajkumar@10587 1185 convertToForwardsCall();
kcr@9800 1186 }
kcr@9800 1187
kcr@9800 1188 break;
kcr@9800 1189 }
kcr@9800 1190
kcr@9800 1191 case CheckArray:
arajkumar@11139 1192 case GetButterfly:
arajkumar@11139 1193 case FilterGetByIdStatus:
arajkumar@11139 1194 case FilterPutByIdStatus:
arajkumar@11139 1195 case FilterCallLinkStatus:
arajkumar@11139 1196 case FilterInByIdStatus: {
arajkumar@10587 1197 if (!isEliminatedAllocation(node->child1().node()))
kcr@9800 1198 break;
arajkumar@10954 1199 node->remove(m_graph);
kcr@9800 1200 break;
kcr@9800 1201 }
kcr@9800 1202
arajkumar@10954 1203 case CheckStructureOrEmpty:
arajkumar@10587 1204 case CheckStructure:
arajkumar@10587 1205 if (!isEliminatedAllocation(node->child1().node()))
arajkumar@10587 1206 break;
arajkumar@10587 1207 node->child1() = Edge(); // Remove the cell check since we've proven it's not needed and FTL lowering might botch this.
arajkumar@10954 1208 node->remove(m_graph);
arajkumar@10587 1209 break;
arajkumar@10587 1210
kcr@9800 1211 default:
kcr@9800 1212 break;
kcr@9800 1213 }
mbilla@10730 1214 if (node->isPseudoTerminal())
mbilla@10730 1215 break;
kcr@9800 1216 }
kcr@9800 1217
kcr@9800 1218 insertionSet.execute(block);
kcr@9800 1219 }
kcr@9800 1220 }
kcr@9800 1221
kcr@9800 1222 HashSet<Node*> m_candidates;
kcr@9800 1223 };
kcr@9800 1224
kcr@9800 1225 } // anonymous namespace
kcr@9800 1226
kcr@9800 1227 bool performArgumentsElimination(Graph& graph)
kcr@9800 1228 {
kcr@9800 1229 return runPhase<ArgumentsEliminationPhase>(graph);
kcr@9800 1230 }
kcr@9800 1231
kcr@9800 1232 } } // namespace JSC::DFG
kcr@9800 1233
kcr@9800 1234 #endif // ENABLE(DFG_JIT)
kcr@9800 1235