2 * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "DFGCSEPhase.h"
34 namespace JSC { namespace DFG {
36 class CSEPhase : public Phase {
38 CSEPhase(Graph& graph, OptimizationFixpointState fixpointState)
39 : Phase(graph, "common subexpression elimination")
40 , m_fixpointState(fixpointState)
42 // Replacements are used to implement local common subexpression elimination.
43 m_replacements.resize(m_graph.size());
45 for (unsigned i = 0; i < m_graph.size(); ++i)
46 m_replacements[i] = NoNode;
52 for (unsigned block = 0; block < m_graph.m_blocks.size(); ++block)
53 performBlockCSE(m_graph.m_blocks[block].get());
59 NodeIndex canonicalize(NodeIndex nodeIndex)
61 if (nodeIndex == NoNode)
64 if (m_graph[nodeIndex].op() == ValueToInt32)
65 nodeIndex = m_graph[nodeIndex].child1().index();
69 NodeIndex canonicalize(Edge nodeUse)
71 return canonicalize(nodeUse.indexUnchecked());
74 unsigned endIndexForPureCSE()
76 unsigned result = m_lastSeen[m_graph[m_compileIndex].op()];
77 if (result == UINT_MAX)
81 ASSERT(result <= m_indexInBlock);
82 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
83 dataLog(" limit %u: ", result);
88 NodeIndex pureCSE(Node& node)
90 NodeIndex child1 = canonicalize(node.child1());
91 NodeIndex child2 = canonicalize(node.child2());
92 NodeIndex child3 = canonicalize(node.child3());
94 for (unsigned i = endIndexForPureCSE(); i--;) {
95 NodeIndex index = m_currentBlock->at(i);
96 if (index == child1 || index == child2 || index == child3)
99 Node& otherNode = m_graph[index];
100 if (node.op() != otherNode.op())
103 if (node.arithNodeFlags() != otherNode.arithNodeFlags())
106 NodeIndex otherChild = canonicalize(otherNode.child1());
107 if (otherChild == NoNode)
109 if (otherChild != child1)
112 otherChild = canonicalize(otherNode.child2());
113 if (otherChild == NoNode)
115 if (otherChild != child2)
118 otherChild = canonicalize(otherNode.child3());
119 if (otherChild == NoNode)
121 if (otherChild != child3)
129 NodeIndex constantCSE(Node& node)
131 for (unsigned i = endIndexForPureCSE(); i--;) {
132 NodeIndex index = m_currentBlock->at(i);
133 Node& otherNode = m_graph[index];
134 if (otherNode.op() != JSConstant)
137 if (otherNode.constantNumber() != node.constantNumber())
145 NodeIndex weakConstantCSE(Node& node)
147 for (unsigned i = endIndexForPureCSE(); i--;) {
148 NodeIndex index = m_currentBlock->at(i);
149 Node& otherNode = m_graph[index];
150 if (otherNode.op() != WeakJSConstant)
153 if (otherNode.weakConstant() != node.weakConstant())
161 NodeIndex impureCSE(Node& node)
163 NodeIndex child1 = canonicalize(node.child1());
164 NodeIndex child2 = canonicalize(node.child2());
165 NodeIndex child3 = canonicalize(node.child3());
167 for (unsigned i = m_indexInBlock; i--;) {
168 NodeIndex index = m_currentBlock->at(i);
169 if (index == child1 || index == child2 || index == child3)
172 Node& otherNode = m_graph[index];
173 if (node.op() == otherNode.op()
174 && node.arithNodeFlags() == otherNode.arithNodeFlags()) {
175 NodeIndex otherChild = canonicalize(otherNode.child1());
176 if (otherChild == NoNode)
178 if (otherChild == child1) {
179 otherChild = canonicalize(otherNode.child2());
180 if (otherChild == NoNode)
182 if (otherChild == child2) {
183 otherChild = canonicalize(otherNode.child3());
184 if (otherChild == NoNode)
186 if (otherChild == child3)
191 if (m_graph.clobbersWorld(index))
197 NodeIndex globalVarLoadElimination(WriteBarrier<Unknown>* registerPointer)
199 for (unsigned i = m_indexInBlock; i--;) {
200 NodeIndex index = m_currentBlock->at(i);
201 Node& node = m_graph[index];
204 if (node.registerPointer() == registerPointer)
208 if (node.registerPointer() == registerPointer)
209 return node.child1().index();
214 if (m_graph.clobbersWorld(index))
220 NodeIndex globalVarStoreElimination(WriteBarrier<Unknown>* registerPointer)
222 for (unsigned i = m_indexInBlock; i--;) {
223 NodeIndex index = m_currentBlock->at(i);
224 Node& node = m_graph[index];
225 if (!node.shouldGenerate())
229 if (node.registerPointer() == registerPointer)
234 if (node.registerPointer() == registerPointer)
241 if (m_graph.clobbersWorld(index) || node.canExit())
247 NodeIndex getByValLoadElimination(NodeIndex child1, NodeIndex child2)
249 for (unsigned i = m_indexInBlock; i--;) {
250 NodeIndex index = m_currentBlock->at(i);
251 if (index == child1 || index == canonicalize(child2))
254 Node& node = m_graph[index];
257 if (!m_graph.byValIsPure(node))
259 if (node.child1() == child1 && canonicalize(node.child2()) == canonicalize(child2))
264 if (!m_graph.byValIsPure(node))
266 if (node.child1() == child1 && canonicalize(node.child2()) == canonicalize(child2))
267 return node.child3().index();
268 // We must assume that the PutByVal will clobber the location we're getting from.
269 // FIXME: We can do better; if we know that the PutByVal is accessing an array of a
270 // different type than the GetByVal, then we know that they won't clobber each other.
274 // GetByVal currently always speculates that it's accessing an
275 // array with an integer index, which means that it's impossible
276 // for a structure change or a put to property storage to affect
280 // A push cannot affect previously existing elements in the array.
283 if (m_graph.clobbersWorld(index))
291 bool checkFunctionElimination(JSFunction* function, NodeIndex child1)
293 for (unsigned i = endIndexForPureCSE(); i--;) {
294 NodeIndex index = m_currentBlock->at(i);
298 Node& node = m_graph[index];
299 if (node.op() == CheckFunction && node.child1() == child1 && node.function() == function)
305 bool checkStructureLoadElimination(const StructureSet& structureSet, NodeIndex child1)
307 for (unsigned i = m_indexInBlock; i--;) {
308 NodeIndex index = m_currentBlock->at(i);
312 Node& node = m_graph[index];
315 if (node.child1() == child1
316 && structureSet.isSupersetOf(node.structureSet()))
321 if (node.child1() == child1
322 && structureSet.contains(node.structureTransitionData().newStructure))
324 if (structureSet.contains(node.structureTransitionData().previousStructure))
329 // Setting a property cannot change the structure.
334 if (m_graph.byValIsPure(node)) {
335 // If PutByVal speculates that it's accessing an array with an
336 // integer index, then it's impossible for it to cause a structure
343 if (m_graph.clobbersWorld(index))
351 NodeIndex putStructureStoreElimination(NodeIndex child1)
353 for (unsigned i = m_indexInBlock; i--;) {
354 NodeIndex index = m_currentBlock->at(i);
357 Node& node = m_graph[index];
358 if (!node.shouldGenerate())
364 case PhantomPutStructure:
365 if (node.child1() == child1) // No need to retrace our steps.
370 if (node.child1() == child1)
374 // PutStructure needs to execute if we GC. Hence this needs to
375 // be careful with respect to nodes that GC.
376 case CreateArguments:
377 case TearOffArguments:
378 case NewFunctionNoCheck:
380 case NewFunctionExpression:
381 case CreateActivation:
382 case TearOffActivation:
395 if (m_graph.clobbersWorld(index) || node.canExit())
401 NodeIndex getByOffsetLoadElimination(unsigned identifierNumber, NodeIndex child1)
403 for (unsigned i = m_indexInBlock; i--;) {
404 NodeIndex index = m_currentBlock->at(i);
408 Node& node = m_graph[index];
411 if (node.child1() == child1
412 && m_graph.m_storageAccessData[node.storageAccessDataIndex()].identifierNumber == identifierNumber)
417 if (m_graph.m_storageAccessData[node.storageAccessDataIndex()].identifierNumber == identifierNumber) {
418 if (node.child1() == child1) // Must be same property storage.
419 return node.child3().index();
425 // Changing the structure cannot change the outcome of a property get.
430 if (m_graph.byValIsPure(node)) {
431 // If PutByVal speculates that it's accessing an array with an
432 // integer index, then it's impossible for it to cause a structure
439 if (m_graph.clobbersWorld(index))
447 NodeIndex putByOffsetStoreElimination(unsigned identifierNumber, NodeIndex child1)
449 for (unsigned i = m_indexInBlock; i--;) {
450 NodeIndex index = m_currentBlock->at(i);
454 Node& node = m_graph[index];
455 if (!node.shouldGenerate())
459 if (m_graph.m_storageAccessData[node.storageAccessDataIndex()].identifierNumber == identifierNumber)
464 if (m_graph.m_storageAccessData[node.storageAccessDataIndex()].identifierNumber == identifierNumber) {
465 if (node.child1() == child1) // Must be same property storage.
474 if (m_graph.byValIsPure(node)) {
475 // If PutByVal speculates that it's accessing an array with an
476 // integer index, then it's impossible for it to cause a structure
483 if (m_graph.clobbersWorld(index))
493 NodeIndex getPropertyStorageLoadElimination(NodeIndex child1)
495 for (unsigned i = m_indexInBlock; i--;) {
496 NodeIndex index = m_currentBlock->at(i);
500 Node& node = m_graph[index];
502 case GetPropertyStorage:
503 if (node.child1() == child1)
509 // Changing the structure or putting to the storage cannot
510 // change the property storage pointer.
515 if (m_graph.byValIsPure(node)) {
516 // If PutByVal speculates that it's accessing an array with an
517 // integer index, then it's impossible for it to cause a structure
524 if (m_graph.clobbersWorld(index))
532 NodeIndex getIndexedPropertyStorageLoadElimination(NodeIndex child1, bool hasIntegerIndexPrediction)
534 for (unsigned i = m_indexInBlock; i--;) {
535 NodeIndex index = m_currentBlock->at(i);
539 Node& node = m_graph[index];
541 case GetIndexedPropertyStorage: {
542 SpeculatedType basePrediction = m_graph[node.child2()].prediction();
543 bool nodeHasIntegerIndexPrediction = !(!(basePrediction & SpecInt32) && basePrediction);
544 if (node.child1() == child1 && hasIntegerIndexPrediction == nodeHasIntegerIndexPrediction)
551 // Changing the structure or putting to the storage cannot
552 // change the property storage pointer.
556 // PutByValAlias can't change the indexed storage pointer
560 if (isFixedIndexedStorageObjectSpeculation(m_graph[node.child1()].prediction()) && m_graph.byValIsPure(node))
565 if (m_graph.clobbersWorld(index))
573 NodeIndex getScopeChainLoadElimination(unsigned depth)
575 for (unsigned i = endIndexForPureCSE(); i--;) {
576 NodeIndex index = m_currentBlock->at(i);
577 Node& node = m_graph[index];
578 if (node.op() == GetScopeChain
579 && node.scopeChainDepth() == depth)
585 NodeIndex getLocalLoadElimination(VirtualRegister local, NodeIndex& relevantLocalOp)
587 relevantLocalOp = NoNode;
589 for (unsigned i = m_indexInBlock; i--;) {
590 NodeIndex index = m_currentBlock->at(i);
591 Node& node = m_graph[index];
594 if (node.local() == local) {
595 relevantLocalOp = index;
600 case GetLocalUnlinked:
601 if (node.unlinkedLocal() == local) {
602 relevantLocalOp = index;
608 if (node.local() == local) {
609 relevantLocalOp = index;
610 return node.child1().index();
615 if (m_graph.clobbersWorld(index))
623 struct SetLocalStoreEliminationResult {
624 SetLocalStoreEliminationResult()
625 : mayBeAccessed(false)
627 , mayClobberWorld(false)
633 bool mayClobberWorld;
635 SetLocalStoreEliminationResult setLocalStoreElimination(
636 VirtualRegister local, NodeIndex expectedNodeIndex)
638 SetLocalStoreEliminationResult result;
639 for (unsigned i = m_indexInBlock; i--;) {
640 NodeIndex index = m_currentBlock->at(i);
641 Node& node = m_graph[index];
642 if (!node.shouldGenerate())
647 if (node.local() == local)
648 result.mayBeAccessed = true;
651 case GetLocalUnlinked:
652 if (node.unlinkedLocal() == local)
653 result.mayBeAccessed = true;
657 if (node.local() != local)
659 if (index != expectedNodeIndex)
660 result.mayBeAccessed = true;
661 if (m_graph[index].refCount() > 1)
662 result.mayBeAccessed = true;
667 if (m_graph.uncheckedActivationRegisterFor(node.codeOrigin) == local)
668 result.mayBeAccessed = true;
671 case CheckArgumentsNotCreated:
672 case GetMyArgumentsLength:
673 case GetMyArgumentsLengthSafe:
674 if (m_graph.uncheckedArgumentsRegisterFor(node.codeOrigin) == local)
675 result.mayBeAccessed = true;
678 case GetMyArgumentByVal:
679 case GetMyArgumentByValSafe:
680 result.mayBeAccessed = true;
684 // If this is accessing arguments then it's potentially accessing locals.
685 if (m_graph[node.child1()].shouldSpeculateArguments())
686 result.mayBeAccessed = true;
689 case CreateArguments:
690 case TearOffActivation:
691 case TearOffArguments:
692 // If an activation is being torn off then it means that captured variables
693 // are live. We could be clever here and check if the local qualifies as an
694 // argument register. But that seems like it would buy us very little since
695 // any kind of tear offs are rare to begin with.
696 result.mayBeAccessed = true;
702 result.mayExit |= node.canExit();
703 result.mayClobberWorld |= m_graph.clobbersWorld(index);
705 ASSERT_NOT_REACHED();
706 // Be safe in release mode.
707 result.mayBeAccessed = true;
711 void performSubstitution(Edge& child, bool addRef = true)
713 // Check if this operand is actually unused.
717 // Check if there is any replacement.
718 NodeIndex replacement = m_replacements[child.index()];
719 if (replacement == NoNode)
722 child.setIndex(replacement);
724 // There is definitely a replacement. Assert that the replacement does not
725 // have a replacement.
726 ASSERT(m_replacements[child.index()] == NoNode);
729 m_graph[child].ref();
732 enum PredictionHandlingMode { RequireSamePrediction, AllowPredictionMismatch };
733 bool setReplacement(NodeIndex replacement, PredictionHandlingMode predictionHandlingMode = RequireSamePrediction)
735 if (replacement == NoNode)
738 // Be safe. Don't try to perform replacements if the predictions don't
740 if (predictionHandlingMode == RequireSamePrediction
741 && m_graph[m_compileIndex].prediction() != m_graph[replacement].prediction())
744 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
745 dataLog(" Replacing @%u -> @%u", m_compileIndex, replacement);
748 Node& node = m_graph[m_compileIndex];
749 node.setOpAndDefaultFlags(Phantom);
752 // At this point we will eliminate all references to this node.
753 m_replacements[m_compileIndex] = replacement;
760 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
761 dataLog(" Eliminating @%u", m_compileIndex);
764 Node& node = m_graph[m_compileIndex];
765 ASSERT(node.refCount() == 1);
766 ASSERT(node.mustGenerate());
767 node.setOpAndDefaultFlags(Phantom);
770 void eliminate(NodeIndex nodeIndex, NodeType phantomType = Phantom)
772 if (nodeIndex == NoNode)
774 Node& node = m_graph[nodeIndex];
775 if (node.refCount() != 1)
777 ASSERT(node.mustGenerate());
778 node.setOpAndDefaultFlags(phantomType);
781 void performNodeCSE(Node& node)
783 bool shouldGenerate = node.shouldGenerate();
785 if (node.flags() & NodeHasVarArgs) {
786 for (unsigned childIdx = node.firstChild(); childIdx < node.firstChild() + node.numChildren(); childIdx++)
787 performSubstitution(m_graph.m_varArgChildren[childIdx], shouldGenerate);
789 performSubstitution(node.children.child1(), shouldGenerate);
790 performSubstitution(node.children.child2(), shouldGenerate);
791 performSubstitution(node.children.child3(), shouldGenerate);
797 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
798 dataLog(" %s @%u: ", Graph::opName(m_graph[m_compileIndex].op()), m_compileIndex);
801 // NOTE: there are some nodes that we deliberately don't CSE even though we
802 // probably could, like StrCat and ToPrimitive. That's because there is no
803 // evidence that doing CSE on these nodes would result in a performance
804 // progression. Hence considering these nodes in CSE would just mean that this
805 // code does more work with no win. Of course, we may want to reconsider this,
806 // since StrCat is trivially CSE-able. It's not trivially doable for
807 // ToPrimitive, but we could change that with some speculations if we really
812 // Handle the pure nodes. These nodes never have any side-effects.
829 case GetInt8ArrayLength:
830 case GetInt16ArrayLength:
831 case GetInt32ArrayLength:
832 case GetUint8ArrayLength:
833 case GetUint8ClampedArrayLength:
834 case GetUint16ArrayLength:
835 case GetUint32ArrayLength:
836 case GetFloat32ArrayLength:
837 case GetFloat64ArrayLength:
839 case GetStringLength:
841 case StringCharCodeAt:
851 setReplacement(pureCSE(node));
855 VariableAccessData* variableAccessData = node.variableAccessData();
856 if (!variableAccessData->isCaptured())
858 NodeIndex relevantLocalOp;
859 NodeIndex possibleReplacement = getLocalLoadElimination(variableAccessData->local(), relevantLocalOp);
860 ASSERT(relevantLocalOp == NoNode
861 || m_graph[relevantLocalOp].op() == GetLocalUnlinked
862 || m_graph[relevantLocalOp].variableAccessData() == variableAccessData);
863 NodeIndex phiIndex = node.child1().index();
864 if (!setReplacement(possibleReplacement))
866 // If the GetLocal we replaced used to refer to a SetLocal, then it now
867 // should refer to the child of the SetLocal instead.
868 if (m_graph[phiIndex].op() == SetLocal) {
869 ASSERT(node.child1().index() == phiIndex);
870 m_graph.changeEdge(node.children.child1(), m_graph[phiIndex].child1());
872 NodeIndex oldTailIndex = m_currentBlock->variablesAtTail.operand(
873 variableAccessData->local());
874 if (oldTailIndex == m_compileIndex) {
875 m_currentBlock->variablesAtTail.operand(variableAccessData->local()) =
878 // Maintain graph integrity: since we're replacing a GetLocal with a GetLocalUnlinked,
879 // make sure that the GetLocalUnlinked is now linked.
880 if (m_graph[relevantLocalOp].op() == GetLocalUnlinked) {
881 m_graph[relevantLocalOp].setOp(GetLocal);
882 m_graph[relevantLocalOp].children.child1() = Edge(phiIndex);
883 m_graph.ref(phiIndex);
890 case GetLocalUnlinked: {
891 NodeIndex relevantLocalOpIgnored;
892 m_changed |= setReplacement(getLocalLoadElimination(node.unlinkedLocal(), relevantLocalOpIgnored));
897 VariableAccessData* variableAccessData = node.variableAccessData();
898 VirtualRegister local = variableAccessData->local();
899 NodeIndex replacementIndex = node.child1().index();
900 Node& replacement = m_graph[replacementIndex];
901 if (replacement.op() != SetLocal)
903 ASSERT(replacement.variableAccessData() == variableAccessData);
904 // FIXME: We should be able to remove SetLocals that can exit; we just need
905 // to replace them with appropriate type checks.
906 if (m_fixpointState == FixpointNotConverged) {
907 // Need to be conservative at this time; if the SetLocal has any chance of performing
908 // any speculations then we cannot do anything.
909 if (variableAccessData->isCaptured()) {
910 // Captured SetLocals never speculate and hence never exit.
912 if (variableAccessData->shouldUseDoubleFormat())
914 SpeculatedType prediction = variableAccessData->argumentAwarePrediction();
915 if (isInt32Speculation(prediction))
917 if (isArraySpeculation(prediction))
919 if (isBooleanSpeculation(prediction))
923 if (replacement.canExit())
926 SetLocalStoreEliminationResult result =
927 setLocalStoreElimination(local, replacementIndex);
928 if (result.mayBeAccessed || result.mayClobberWorld)
930 ASSERT(replacement.op() == SetLocal);
931 ASSERT(replacement.refCount() == 1);
932 ASSERT(replacement.shouldGenerate());
933 // FIXME: Investigate using mayExit as a further optimization.
934 node.setOpAndDefaultFlags(Phantom);
935 NodeIndex dataNodeIndex = replacement.child1().index();
936 ASSERT(m_graph[dataNodeIndex].hasResult());
937 m_graph.clearAndDerefChild1(node);
938 node.children.child1() = Edge(dataNodeIndex);
939 m_graph.ref(dataNodeIndex);
940 NodeIndex oldTailIndex = m_currentBlock->variablesAtTail.operand(local);
941 if (oldTailIndex == m_compileIndex)
942 m_currentBlock->variablesAtTail.operand(local) = replacementIndex;
948 // This is strange, but necessary. Some phases will convert nodes to constants,
949 // which may result in duplicated constants. We use CSE to clean this up.
950 setReplacement(constantCSE(node), AllowPredictionMismatch);
954 // FIXME: have CSE for weak constants against strong constants and vice-versa.
955 setReplacement(weakConstantCSE(node));
959 setReplacement(impureCSE(node));
963 setReplacement(getScopeChainLoadElimination(node.scopeChainDepth()));
966 // Handle nodes that are conditionally pure: these are pure, and can
967 // be CSE'd, so long as the prediction is the one we want.
972 case CompareGreaterEq:
974 if (m_graph.isPredictedNumerical(node)) {
975 NodeIndex replacementIndex = pureCSE(node);
976 if (replacementIndex != NoNode && m_graph.isPredictedNumerical(m_graph[replacementIndex]))
977 setReplacement(replacementIndex);
982 // Finally handle heap accesses. These are not quite pure, but we can still
983 // optimize them provided that some subtle conditions are met.
985 setReplacement(globalVarLoadElimination(node.registerPointer()));
989 if (m_fixpointState == FixpointNotConverged)
991 eliminate(globalVarStoreElimination(node.registerPointer()));
995 if (m_graph.byValIsPure(node))
996 setReplacement(getByValLoadElimination(node.child1().index(), node.child2().index()));
1000 if (m_graph.byValIsPure(node)
1001 && !m_graph[node.child1()].shouldSpeculateArguments()
1002 && getByValLoadElimination(node.child1().index(), node.child2().index()) != NoNode)
1003 node.setOp(PutByValAlias);
1006 case CheckStructure:
1007 if (checkStructureLoadElimination(node.structureSet(), node.child1().index()))
1012 if (m_fixpointState == FixpointNotConverged)
1014 eliminate(putStructureStoreElimination(node.child1().index()), PhantomPutStructure);
1018 if (checkFunctionElimination(node.function(), node.child1().index()))
1022 case GetIndexedPropertyStorage: {
1023 SpeculatedType basePrediction = m_graph[node.child2()].prediction();
1024 bool nodeHasIntegerIndexPrediction = !(!(basePrediction & SpecInt32) && basePrediction);
1025 setReplacement(getIndexedPropertyStorageLoadElimination(node.child1().index(), nodeHasIntegerIndexPrediction));
1029 case GetPropertyStorage:
1030 setReplacement(getPropertyStorageLoadElimination(node.child1().index()));
1034 setReplacement(getByOffsetLoadElimination(m_graph.m_storageAccessData[node.storageAccessDataIndex()].identifierNumber, node.child1().index()));
1038 if (m_fixpointState == FixpointNotConverged)
1040 eliminate(putByOffsetStoreElimination(m_graph.m_storageAccessData[node.storageAccessDataIndex()].identifierNumber, node.child1().index()));
1048 m_lastSeen[node.op()] = m_indexInBlock;
1049 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
1054 void performBlockCSE(BasicBlock* block)
1058 if (!block->isReachable)
1061 m_currentBlock = block;
1062 for (unsigned i = 0; i < LastNodeType; ++i)
1063 m_lastSeen[i] = UINT_MAX;
1065 for (m_indexInBlock = 0; m_indexInBlock < block->size(); ++m_indexInBlock) {
1066 m_compileIndex = block->at(m_indexInBlock);
1067 performNodeCSE(m_graph[m_compileIndex]);
1071 BasicBlock* m_currentBlock;
1072 NodeIndex m_compileIndex;
1073 unsigned m_indexInBlock;
1074 Vector<NodeIndex, 16> m_replacements;
1075 FixedArray<unsigned, LastNodeType> m_lastSeen;
1076 OptimizationFixpointState m_fixpointState;
1077 bool m_changed; // Only tracks changes that have a substantive effect on other optimizations.
1080 bool performCSE(Graph& graph, OptimizationFixpointState fixpointState)
1082 return runPhase<CSEPhase>(graph, fixpointState);
1085 } } // namespace JSC::DFG
1087 #endif // ENABLE(DFG_JIT)