2 * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef DFGClobberize_h
27 #define DFGClobberize_h
31 #include "DFGAbstractHeap.h"
32 #include "DFGEdgeUsesStructure.h"
34 #include "DFGHeapLocation.h"
35 #include "DFGLazyNode.h"
36 #include "DFGPureValue.h"
38 namespace JSC { namespace DFG {
40 template<typename ReadFunctor, typename WriteFunctor, typename DefFunctor>
41 void clobberize(Graph& graph, Node* node, const ReadFunctor& read, const WriteFunctor& write, const DefFunctor& def)
45 // - The canonical way of clobbering the world is to read world and write
46 // heap. This is because World subsumes Heap and Stack, and Stack can be
47 // read by anyone but only written to by explicit stack writing operations.
48 // Of course, claiming to also write World is not wrong; it'll just
49 // pessimise some important optimizations.
51 // - We cannot hoist, or sink, anything that has effects. This means that the
52 // easiest way of indicating that something cannot be hoisted is to claim
53 // that it side-effects some miscellaneous thing.
55 // - We cannot hoist forward-exiting nodes without some additional effort. I
56 // believe that what it comes down to is that forward-exiting generally have
57 // their NodeExitsForward cleared upon hoist, except for forward-exiting
58 // nodes that take bogus state as their input. Those are substantially
59 // harder. We disable it for now. In the future we could enable it by having
60 // versions of those nodes that backward-exit instead, but I'm not convinced
63 // - Some nodes lie, and claim that they do not read the JSCell_structureID,
64 // JSCell_typeInfoFlags, etc. These are nodes that use the structure in a way
65 // that does not depend on things that change under structure transitions.
67 // - It's implicitly understood that OSR exits read the world. This is why we
68 // generally don't move or eliminate stores. Every node can exit, so the
69 // read set does not reflect things that would be read if we exited.
70 // Instead, the read set reflects what the node will have to read if it
73 // - Broadly, we don't say that we're reading something if that something is
76 // - We try to make this work even prior to type inference, just so that we
77 // can use it for IR dumps. No promises on whether the answers are sound
78 // prior to type inference - though they probably could be if we did some
81 // - If you do read(Stack) or read(World), then make sure that readTop() in
82 // PreciseLocalClobberize is correct.
84 // While read() and write() are fairly self-explanatory - they track what sorts of things the
85 // node may read or write - the def() functor is more tricky. It tells you the heap locations
86 // (not just abstract heaps) that are defined by a node. A heap location comprises an abstract
87 // heap, some nodes, and a LocationKind. Briefly, a location defined by a node is a location
88 // whose value can be deduced from looking at the node itself. The locations returned must obey
89 // the following properties:
91 // - If someone wants to CSE a load from the heap, then a HeapLocation object should be
92 // sufficient to find a single matching node.
94 // - The abstract heap is the only abstract heap that could be clobbered to invalidate any such
95 // CSE attempt. I.e. if clobberize() reports that on every path between some node and a node
96 // that defines a HeapLocation that it wanted, there were no writes to any abstract heap that
97 // overlap the location's heap, then we have a sound match. Effectively, the semantics of
98 // write() and def() are intertwined such that for them to be sound they must agree on what
101 // read(), write(), and def() for heap locations is enough to do GCSE on effectful things. To
102 // keep things simple, this code will also def() pure things. def() must be overloaded to also
103 // accept PureValue. This way, a client of clobberize() can implement GCSE entirely using the
104 // information that clobberize() passes to write() and def(). Other clients of clobberize() can
105 // just ignore def() by using a NoOpClobberize functor.
107 if (edgesUseStructure(graph, node))
108 read(JSCell_structureID);
110 // We allow the runtime to perform a stack scan at any time. We don't model which nodes get implemented
111 // by calls into the runtime. For debugging we might replace the implementation of any node with a call
112 // to the runtime, and that call may walk stack. Therefore, each node must read() anything that a stack
113 // scan would read. That's what this does.
114 for (InlineCallFrame* inlineCallFrame = node->origin.semantic.inlineCallFrame; inlineCallFrame; inlineCallFrame = inlineCallFrame->directCaller.inlineCallFrame) {
115 if (inlineCallFrame->isClosureCall)
116 read(AbstractHeap(Stack, inlineCallFrame->stackOffset + JSStack::Callee));
117 if (inlineCallFrame->isVarargs())
118 read(AbstractHeap(Stack, inlineCallFrame->stackOffset + JSStack::ArgumentCount));
121 // We don't want to specifically account which nodes can read from the scope
122 // when the debugger is enabled. It's helpful to just claim all nodes do.
123 // Specifically, if a node allocates, this may call into the debugger's machinery.
124 // The debugger's machinery is free to take a stack trace and try to read from
125 // a scope which is expected to be flushed to the stack.
126 if (graph.hasDebuggerEnabled()) {
127 ASSERT(!node->origin.semantic.inlineCallFrame);
128 read(AbstractHeap(Stack, graph.m_codeBlock->scopeRegister()));
132 switch (node->op()) {
136 def(PureValue(node, node->constant()));
142 case ExtractOSREntryLocal:
143 case CheckStructureImmediate:
147 // We should enable CSE of LazyJSConstant. It's a little annoying since LazyJSValue has
148 // more bits than we currently have in PureValue.
164 case GetGlobalObject:
165 case StringCharCodeAt:
166 case CompareStrictEq:
168 case IsArrayConstructor:
181 case BooleanToNumber:
189 def(PureValue(node));
198 if (node->child1().useKind() == UntypedUse || node->child2().useKind() == UntypedUse) {
203 def(PureValue(node));
207 read(MathDotRandomState);
208 write(MathDotRandomState);
212 case HasGenericProperty:
213 case HasStructureProperty:
214 case GetEnumerableLength:
215 case GetPropertyEnumerator: {
221 case GetDirectPname: {
222 // This reads and writes heap because it can end up calling a generic getByVal
223 // if the Structure changed, which could in turn end up calling a getter.
230 case GetEnumeratorStructurePname:
231 case GetEnumeratorGenericPname: {
232 def(PureValue(node));
236 case HasIndexedProperty: {
237 read(JSObject_butterfly);
238 ArrayMode mode = node->arrayMode();
239 switch (mode.type()) {
241 if (mode.isInBounds()) {
242 read(Butterfly_publicLength);
243 read(IndexedInt32Properties);
244 def(HeapLocation(HasIndexedPropertyLoc, IndexedInt32Properties, node->child1(), node->child2()), LazyNode(node));
251 case Array::Double: {
252 if (mode.isInBounds()) {
253 read(Butterfly_publicLength);
254 read(IndexedDoubleProperties);
255 def(HeapLocation(HasIndexedPropertyLoc, IndexedDoubleProperties, node->child1(), node->child2()), LazyNode(node));
262 case Array::Contiguous: {
263 if (mode.isInBounds()) {
264 read(Butterfly_publicLength);
265 read(IndexedContiguousProperties);
266 def(HeapLocation(HasIndexedPropertyLoc, IndexedContiguousProperties, node->child1(), node->child2()), LazyNode(node));
273 case Array::ArrayStorage: {
274 if (mode.isInBounds()) {
275 read(Butterfly_vectorLength);
276 read(IndexedArrayStorageProperties);
289 RELEASE_ASSERT_NOT_REACHED();
293 case StringFromCharCode:
294 switch (node->child1().useKind()) {
296 def(PureValue(node));
303 DFG_CRASH(graph, node, "Bad use kind");
312 def(PureValue(node, node->arithMode()));
318 switch (node->binaryUseKind()) {
322 def(PureValue(node, node->arithMode()));
329 DFG_CRASH(graph, node, "Bad use kind");
336 def(PureValue(node, static_cast<uintptr_t>(node->arithRoundingMode())));
340 def(PureValue(CheckCell, AdjacencyList(AdjacencyList::Fixed, node->child1()), node->cellOperand()));
344 def(PureValue(CheckNotEmpty, AdjacencyList(AdjacencyList::Fixed, node->child1())));
348 def(PureValue(CheckIdent, AdjacencyList(AdjacencyList::Fixed, node->child1()), node->uidOperand()));
351 case ConstantStoragePointer:
352 def(PureValue(node, node->storagePointer()));
371 case CheckTierUpInLoop:
372 case CheckTierUpAtReturn:
373 case CheckTierUpAndOSREnter:
375 case ProfileWillCall:
378 case ProfileControlFlow:
384 case InvalidationPoint:
386 def(HeapLocation(InvalidationPointLoc, Watchpoint_fire), LazyNode(node));
390 read(AbstractHeap(Stack, node->local()));
395 write(Watchpoint_fire);
399 case CreateActivation: {
400 SymbolTable* table = node->castOperand<SymbolTable*>();
401 if (table->singletonScope()->isStillValid())
402 write(Watchpoint_fire);
403 read(HeapObjectCount);
404 write(HeapObjectCount);
408 case CreateDirectArguments:
409 case CreateScopedArguments:
410 case CreateClonedArguments:
412 read(HeapObjectCount);
413 write(HeapObjectCount);
416 case PhantomDirectArguments:
417 case PhantomClonedArguments:
418 // DFG backend requires that the locals that this reads are flushed. FTL backend can handle those
419 // locals being promoted.
420 if (!isFTL(graph.m_plan.mode))
423 // Even though it's phantom, it still has the property that one can't be replaced with another.
424 read(HeapObjectCount);
425 write(HeapObjectCount);
428 case CallObjectConstructor:
432 read(HeapObjectCount);
433 write(HeapObjectCount);
436 case VarInjectionWatchpoint:
438 def(HeapLocation(VarInjectionWatchpointLoc, MiscFields), LazyNode(node));
443 def(HeapLocation(IsObjectOrNullLoc, MiscFields, node->child1()), LazyNode(node));
448 def(HeapLocation(IsFunctionLoc, MiscFields, node->child1()), LazyNode(node));
453 case GetByIdWithThis:
454 case GetByValWithThis:
456 case PutByIdWithThis:
457 case PutByValWithThis:
462 case PutGetterSetterById:
470 case TailCallInlinedCaller:
473 case CallForwardVarargs:
474 case TailCallVarargsInlinedCaller:
475 case TailCallForwardVarargsInlinedCaller:
476 case ConstructVarargs:
477 case ConstructForwardVarargs:
481 case SetFunctionName:
490 case TailCallVarargs:
491 case TailCallForwardVarargs:
497 read(GetterSetter_getter);
498 def(HeapLocation(GetterLoc, GetterSetter_getter, node->child1()), LazyNode(node));
502 read(GetterSetter_setter);
503 def(HeapLocation(SetterLoc, GetterSetter_setter, node->child1()), LazyNode(node));
507 read(AbstractHeap(Stack, JSStack::Callee));
508 def(HeapLocation(StackLoc, AbstractHeap(Stack, JSStack::Callee)), LazyNode(node));
511 case GetArgumentCount:
512 read(AbstractHeap(Stack, JSStack::ArgumentCount));
513 def(HeapLocation(StackPayloadLoc, AbstractHeap(Stack, JSStack::ArgumentCount)), LazyNode(node));
521 read(AbstractHeap(Stack, node->local()));
522 def(HeapLocation(StackLoc, AbstractHeap(Stack, node->local())), LazyNode(node));
526 write(AbstractHeap(Stack, node->local()));
527 def(HeapLocation(StackLoc, AbstractHeap(Stack, node->local())), LazyNode(node->child1().node()));
531 AbstractHeap heap(Stack, node->stackAccessData()->local);
533 def(HeapLocation(StackLoc, heap), LazyNode(node));
538 AbstractHeap heap(Stack, node->stackAccessData()->local);
540 def(HeapLocation(StackLoc, heap), LazyNode(node->child1().node()));
547 LoadVarargsData* data = node->loadVarargsData();
548 write(AbstractHeap(Stack, data->count.offset()));
549 for (unsigned i = data->limit; i--;)
550 write(AbstractHeap(Stack, data->start.offset() + static_cast<int>(i)));
554 case ForwardVarargs: {
555 // We could be way more precise here.
558 LoadVarargsData* data = node->loadVarargsData();
559 write(AbstractHeap(Stack, data->count.offset()));
560 for (unsigned i = data->limit; i--;)
561 write(AbstractHeap(Stack, data->start.offset() + static_cast<int>(i)));
565 case GetLocalUnlinked:
566 read(AbstractHeap(Stack, node->unlinkedLocal()));
567 def(HeapLocation(StackLoc, AbstractHeap(Stack, node->unlinkedLocal())), LazyNode(node));
571 ArrayMode mode = node->arrayMode();
572 switch (mode.type()) {
573 case Array::SelectUsingPredictions:
574 case Array::Unprofiled:
575 case Array::SelectUsingArguments:
576 // Assume the worst since we don't have profiling yet.
581 case Array::ForceExit:
591 if (mode.isOutOfBounds()) {
596 // This appears to read nothing because it's only reading immutable data.
597 def(PureValue(node, mode.asWord()));
600 case Array::DirectArguments:
601 read(DirectArgumentsProperties);
602 def(HeapLocation(IndexedPropertyLoc, DirectArgumentsProperties, node->child1(), node->child2()), LazyNode(node));
605 case Array::ScopedArguments:
606 read(ScopeProperties);
607 def(HeapLocation(IndexedPropertyLoc, ScopeProperties, node->child1(), node->child2()), LazyNode(node));
611 if (mode.isInBounds()) {
612 read(Butterfly_publicLength);
613 read(IndexedInt32Properties);
614 def(HeapLocation(IndexedPropertyLoc, IndexedInt32Properties, node->child1(), node->child2()), LazyNode(node));
622 if (mode.isInBounds()) {
623 read(Butterfly_publicLength);
624 read(IndexedDoubleProperties);
625 def(HeapLocation(IndexedPropertyLoc, IndexedDoubleProperties, node->child1(), node->child2()), LazyNode(node));
632 case Array::Contiguous:
633 if (mode.isInBounds()) {
634 read(Butterfly_publicLength);
635 read(IndexedContiguousProperties);
636 def(HeapLocation(IndexedPropertyLoc, IndexedContiguousProperties, node->child1(), node->child2()), LazyNode(node));
643 case Array::Undecided:
644 def(PureValue(node));
647 case Array::ArrayStorage:
648 case Array::SlowPutArrayStorage:
649 if (mode.isInBounds()) {
650 read(Butterfly_vectorLength);
651 read(IndexedArrayStorageProperties);
658 case Array::Int8Array:
659 case Array::Int16Array:
660 case Array::Int32Array:
661 case Array::Uint8Array:
662 case Array::Uint8ClampedArray:
663 case Array::Uint16Array:
664 case Array::Uint32Array:
665 case Array::Float32Array:
666 case Array::Float64Array:
667 read(TypedArrayProperties);
669 def(HeapLocation(IndexedPropertyLoc, TypedArrayProperties, node->child1(), node->child2()), LazyNode(node));
671 // We should not get an AnyTypedArray in a GetByVal as AnyTypedArray is only created from intrinsics, which
672 // are only added from Inline Caching a GetById.
673 case Array::AnyTypedArray:
674 DFG_CRASH(graph, node, "impossible array mode for get");
677 RELEASE_ASSERT_NOT_REACHED();
681 case GetMyArgumentByVal:
682 case GetMyArgumentByValOutOfBounds: {
684 // FIXME: It would be trivial to have a def here.
685 // https://bugs.webkit.org/show_bug.cgi?id=143077
691 case PutByValAlias: {
692 ArrayMode mode = node->arrayMode();
693 Node* base = graph.varArgChild(node, 0).node();
694 Node* index = graph.varArgChild(node, 1).node();
695 Node* value = graph.varArgChild(node, 2).node();
696 switch (mode.modeForPut().type()) {
697 case Array::SelectUsingPredictions:
698 case Array::SelectUsingArguments:
699 case Array::Unprofiled:
700 case Array::Undecided:
701 // Assume the worst since we don't have profiling yet.
706 case Array::ForceExit:
716 if (node->arrayMode().isOutOfBounds()) {
721 read(Butterfly_publicLength);
722 read(Butterfly_vectorLength);
723 read(IndexedInt32Properties);
724 write(IndexedInt32Properties);
725 if (node->arrayMode().mayStoreToHole())
726 write(Butterfly_publicLength);
727 def(HeapLocation(IndexedPropertyLoc, IndexedInt32Properties, base, index), LazyNode(value));
731 if (node->arrayMode().isOutOfBounds()) {
736 read(Butterfly_publicLength);
737 read(Butterfly_vectorLength);
738 read(IndexedDoubleProperties);
739 write(IndexedDoubleProperties);
740 if (node->arrayMode().mayStoreToHole())
741 write(Butterfly_publicLength);
742 def(HeapLocation(IndexedPropertyLoc, IndexedDoubleProperties, base, index), LazyNode(value));
745 case Array::Contiguous:
746 if (node->arrayMode().isOutOfBounds()) {
751 read(Butterfly_publicLength);
752 read(Butterfly_vectorLength);
753 read(IndexedContiguousProperties);
754 write(IndexedContiguousProperties);
755 if (node->arrayMode().mayStoreToHole())
756 write(Butterfly_publicLength);
757 def(HeapLocation(IndexedPropertyLoc, IndexedContiguousProperties, base, index), LazyNode(value));
760 case Array::ArrayStorage:
761 case Array::SlowPutArrayStorage:
762 // Give up on life for now.
767 case Array::Int8Array:
768 case Array::Int16Array:
769 case Array::Int32Array:
770 case Array::Uint8Array:
771 case Array::Uint8ClampedArray:
772 case Array::Uint16Array:
773 case Array::Uint32Array:
774 case Array::Float32Array:
775 case Array::Float64Array:
777 write(TypedArrayProperties);
778 // FIXME: We can't def() anything here because these operations truncate their inputs.
779 // https://bugs.webkit.org/show_bug.cgi?id=134737
781 case Array::AnyTypedArray:
783 case Array::DirectArguments:
784 case Array::ScopedArguments:
785 DFG_CRASH(graph, node, "impossible array mode for put");
788 RELEASE_ASSERT_NOT_REACHED();
793 read(JSCell_structureID);
797 read(JSCell_indexingType);
798 read(JSCell_typeInfoType);
799 read(JSCell_structureID);
802 case CheckTypeInfoFlags:
803 read(JSCell_typeInfoFlags);
804 def(HeapLocation(CheckTypeInfoFlagsLoc, JSCell_typeInfoFlags, node->child1()), LazyNode(node));
807 case OverridesHasInstance:
808 read(JSCell_typeInfoFlags);
809 def(HeapLocation(OverridesHasInstanceLoc, JSCell_typeInfoFlags, node->child1()), LazyNode(node));
813 read(JSCell_structureID);
814 def(HeapLocation(InstanceOfLoc, JSCell_structureID, node->child1(), node->child2()), LazyNode(node));
817 case InstanceOfCustom:
823 write(JSCell_structureID);
824 write(JSCell_typeInfoType);
825 write(JSCell_typeInfoFlags);
826 write(JSCell_indexingType);
829 case AllocatePropertyStorage:
830 write(JSObject_butterfly);
831 def(HeapLocation(ButterflyLoc, JSObject_butterfly, node->child1()), LazyNode(node));
834 case ReallocatePropertyStorage:
835 read(JSObject_butterfly);
836 write(JSObject_butterfly);
837 def(HeapLocation(ButterflyLoc, JSObject_butterfly, node->child1()), LazyNode(node));
841 read(JSObject_butterfly);
842 def(HeapLocation(ButterflyLoc, JSObject_butterfly, node->child1()), LazyNode(node));
846 case ArrayifyToStructure:
847 read(JSCell_structureID);
848 read(JSCell_indexingType);
849 read(JSObject_butterfly);
850 write(JSCell_structureID);
851 write(JSCell_indexingType);
852 write(JSObject_butterfly);
853 write(Watchpoint_fire);
856 case GetIndexedPropertyStorage:
857 if (node->arrayMode().type() == Array::String) {
858 def(PureValue(node, node->arrayMode().asWord()));
862 def(HeapLocation(IndexedPropertyStorageLoc, MiscFields, node->child1()), LazyNode(node));
865 case GetTypedArrayByteOffset:
867 def(HeapLocation(TypedArrayByteOffsetLoc, MiscFields, node->child1()), LazyNode(node));
871 case GetGetterSetterByOffset: {
872 unsigned identifierNumber = node->storageAccessData().identifierNumber;
873 AbstractHeap heap(NamedProperties, identifierNumber);
875 def(HeapLocation(NamedPropertyLoc, heap, node->child2()), LazyNode(node));
884 case MultiGetByOffset: {
885 read(JSCell_structureID);
886 read(JSObject_butterfly);
887 AbstractHeap heap(NamedProperties, node->multiGetByOffsetData().identifierNumber);
889 def(HeapLocation(NamedPropertyLoc, heap, node->child1()), LazyNode(node));
893 case MultiPutByOffset: {
894 read(JSCell_structureID);
895 read(JSObject_butterfly);
896 AbstractHeap heap(NamedProperties, node->multiPutByOffsetData().identifierNumber);
898 if (node->multiPutByOffsetData().writesStructures())
899 write(JSCell_structureID);
900 if (node->multiPutByOffsetData().reallocatesStorage())
901 write(JSObject_butterfly);
902 def(HeapLocation(NamedPropertyLoc, heap, node->child1()), LazyNode(node->child2().node()));
907 unsigned identifierNumber = node->storageAccessData().identifierNumber;
908 AbstractHeap heap(NamedProperties, identifierNumber);
910 def(HeapLocation(NamedPropertyLoc, heap, node->child2()), LazyNode(node->child3().node()));
914 case GetArrayLength: {
915 ArrayMode mode = node->arrayMode();
916 switch (mode.type()) {
919 case Array::Contiguous:
920 case Array::ArrayStorage:
921 case Array::SlowPutArrayStorage:
922 read(Butterfly_publicLength);
923 def(HeapLocation(ArrayLengthLoc, Butterfly_publicLength, node->child1()), LazyNode(node));
927 def(PureValue(node, mode.asWord()));
930 case Array::DirectArguments:
931 case Array::ScopedArguments:
933 def(HeapLocation(ArrayLengthLoc, MiscFields, node->child1()), LazyNode(node));
937 ASSERT(mode.isSomeTypedArrayView());
939 def(HeapLocation(ArrayLengthLoc, MiscFields, node->child1()), LazyNode(node));
945 read(AbstractHeap(ScopeProperties, node->scopeOffset().offset()));
946 def(HeapLocation(ClosureVariableLoc, AbstractHeap(ScopeProperties, node->scopeOffset().offset()), node->child1()), LazyNode(node));
950 write(AbstractHeap(ScopeProperties, node->scopeOffset().offset()));
951 def(HeapLocation(ClosureVariableLoc, AbstractHeap(ScopeProperties, node->scopeOffset().offset()), node->child1()), LazyNode(node->child2().node()));
954 case GetRegExpObjectLastIndex:
955 read(RegExpObject_lastIndex);
956 def(HeapLocation(RegExpObjectLastIndexLoc, RegExpObject_lastIndex, node->child1()), LazyNode(node));
959 case SetRegExpObjectLastIndex:
960 write(RegExpObject_lastIndex);
961 def(HeapLocation(RegExpObjectLastIndexLoc, RegExpObject_lastIndex, node->child1()), LazyNode(node->child2().node()));
964 case RecordRegExpCachedResult:
968 case GetFromArguments: {
969 AbstractHeap heap(DirectArgumentsProperties, node->capturedArgumentsOffset().offset());
971 def(HeapLocation(DirectArgumentsLoc, heap, node->child1()), LazyNode(node));
975 case PutToArguments: {
976 AbstractHeap heap(DirectArgumentsProperties, node->capturedArgumentsOffset().offset());
978 def(HeapLocation(DirectArgumentsLoc, heap, node->child1()), LazyNode(node->child2().node()));
983 case GetGlobalLexicalVariable:
984 read(AbstractHeap(Absolute, node->variablePointer()));
985 def(HeapLocation(GlobalVariableLoc, AbstractHeap(Absolute, node->variablePointer())), LazyNode(node));
988 case PutGlobalVariable:
989 write(AbstractHeap(Absolute, node->variablePointer()));
990 def(HeapLocation(GlobalVariableLoc, AbstractHeap(Absolute, node->variablePointer())), LazyNode(node->child2().node()));
993 case NewArrayWithSize:
995 read(HeapObjectCount);
996 write(HeapObjectCount);
1000 read(HeapObjectCount);
1001 write(HeapObjectCount);
1003 unsigned numElements = node->numChildren();
1005 def(HeapLocation(ArrayLengthLoc, Butterfly_publicLength, node),
1006 LazyNode(graph.freeze(jsNumber(numElements))));
1012 switch (node->indexingType()) {
1013 case ALL_DOUBLE_INDEXING_TYPES:
1014 heap = IndexedDoubleProperties;
1017 case ALL_INT32_INDEXING_TYPES:
1018 heap = IndexedInt32Properties;
1021 case ALL_CONTIGUOUS_INDEXING_TYPES:
1022 heap = IndexedContiguousProperties;
1029 if (numElements < graph.m_uint32ValuesInUse.size()) {
1030 for (unsigned operandIdx = 0; operandIdx < numElements; ++operandIdx) {
1031 Edge use = graph.m_varArgChildren[node->firstChild() + operandIdx];
1032 def(HeapLocation(IndexedPropertyLoc, heap, node, LazyNode(graph.freeze(jsNumber(operandIdx)))),
1033 LazyNode(use.node()));
1036 for (uint32_t operandIdx : graph.m_uint32ValuesInUse) {
1037 if (operandIdx >= numElements)
1039 Edge use = graph.m_varArgChildren[node->firstChild() + operandIdx];
1040 // operandIdx comes from graph.m_uint32ValuesInUse and thus is guaranteed to be already frozen
1041 def(HeapLocation(IndexedPropertyLoc, heap, node, LazyNode(graph.freeze(jsNumber(operandIdx)))),
1042 LazyNode(use.node()));
1048 case NewArrayBuffer: {
1049 read(HeapObjectCount);
1050 write(HeapObjectCount);
1052 unsigned numElements = node->numConstants();
1053 def(HeapLocation(ArrayLengthLoc, Butterfly_publicLength, node),
1054 LazyNode(graph.freeze(jsNumber(numElements))));
1057 NodeType op = JSConstant;
1058 switch (node->indexingType()) {
1059 case ALL_DOUBLE_INDEXING_TYPES:
1060 heap = IndexedDoubleProperties;
1061 op = DoubleConstant;
1064 case ALL_INT32_INDEXING_TYPES:
1065 heap = IndexedInt32Properties;
1068 case ALL_CONTIGUOUS_INDEXING_TYPES:
1069 heap = IndexedContiguousProperties;
1076 JSValue* data = graph.m_codeBlock->constantBuffer(node->startConstant());
1077 if (numElements < graph.m_uint32ValuesInUse.size()) {
1078 for (unsigned index = 0; index < numElements; ++index) {
1079 def(HeapLocation(IndexedPropertyLoc, heap, node, LazyNode(graph.freeze(jsNumber(index)))),
1080 LazyNode(graph.freeze(data[index]), op));
1083 Vector<uint32_t> possibleIndices;
1084 for (uint32_t index : graph.m_uint32ValuesInUse) {
1085 if (index >= numElements)
1087 possibleIndices.append(index);
1089 for (uint32_t index : possibleIndices) {
1090 def(HeapLocation(IndexedPropertyLoc, heap, node, LazyNode(graph.freeze(jsNumber(index)))),
1091 LazyNode(graph.freeze(data[index]), op));
1105 case NewStringObject:
1106 case PhantomNewObject:
1107 case MaterializeNewObject:
1108 case PhantomNewFunction:
1109 case PhantomNewGeneratorFunction:
1110 case PhantomCreateActivation:
1111 case MaterializeCreateActivation:
1112 read(HeapObjectCount);
1113 write(HeapObjectCount);
1117 case NewGeneratorFunction:
1118 if (node->castOperand<FunctionExecutable*>()->singletonFunction()->isStillValid())
1119 write(Watchpoint_fire);
1120 read(HeapObjectCount);
1121 write(HeapObjectCount);
1126 if (node->child2().useKind() == RegExpObjectUse
1127 && node->child3().useKind() == StringUse) {
1129 read(RegExpObject_lastIndex);
1131 write(RegExpObject_lastIndex);
1139 case StringReplaceRegExp:
1140 if (node->child1().useKind() == StringUse
1141 && node->child2().useKind() == RegExpObjectUse
1142 && node->child3().useKind() == StringUse) {
1144 read(RegExpObject_lastIndex);
1146 write(RegExpObject_lastIndex);
1154 if (node->arrayMode().isOutOfBounds()) {
1159 def(PureValue(node));
1165 case CompareGreater:
1166 case CompareGreaterEq:
1167 if (node->isBinaryUseKind(StringUse)) {
1168 read(HeapObjectCount);
1169 write(HeapObjectCount);
1172 if (!node->isBinaryUseKind(UntypedUse)) {
1173 def(PureValue(node));
1181 case CallStringConstructor:
1182 switch (node->child1().useKind()) {
1183 case StringObjectUse:
1184 case StringOrStringObjectUse:
1185 // These don't def a pure value, unfortunately. I'll avoid load-eliminating these for
1196 RELEASE_ASSERT_NOT_REACHED();
1200 case ThrowReferenceError:
1204 case CountExecution:
1205 case CheckWatchdogTimer:
1206 read(InternalState);
1207 write(InternalState);
1210 case LogShadowChickenPrologue:
1211 case LogShadowChickenTail:
1216 RELEASE_ASSERT_NOT_REACHED();
1220 DFG_CRASH(graph, node, toCString("Unrecognized node type: ", Graph::opName(node->op())).data());
1223 class NoOpClobberize {
1225 NoOpClobberize() { }
1226 template<typename... T>
1227 void operator()(T...) const { }
1230 class CheckClobberize {
1237 template<typename... T>
1238 void operator()(T...) const { m_result = true; }
1240 bool result() const { return m_result; }
1243 mutable bool m_result;
1246 bool doesWrites(Graph&, Node*);
1248 class AbstractHeapOverlaps {
1250 AbstractHeapOverlaps(AbstractHeap heap)
1256 void operator()(AbstractHeap otherHeap) const
1260 m_result = m_heap.overlaps(otherHeap);
1263 bool result() const { return m_result; }
1266 AbstractHeap m_heap;
1267 mutable bool m_result;
1270 bool accessesOverlap(Graph&, Node*, AbstractHeap);
1271 bool writesOverlap(Graph&, Node*, AbstractHeap);
1273 bool clobbersHeap(Graph&, Node*);
1275 // We would have used bind() for these, but because of the overlaoding that we are doing,
1276 // it's quite a bit of clearer to just write this out the traditional way.
1278 template<typename T>
1279 class ReadMethodClobberize {
1281 ReadMethodClobberize(T& value)
1286 void operator()(AbstractHeap heap) const
1294 template<typename T>
1295 class WriteMethodClobberize {
1297 WriteMethodClobberize(T& value)
1302 void operator()(AbstractHeap heap) const
1304 m_value.write(heap);
1310 template<typename T>
1311 class DefMethodClobberize {
1313 DefMethodClobberize(T& value)
1318 void operator()(PureValue value) const
1323 void operator()(HeapLocation location, LazyNode node) const
1325 m_value.def(location, node);
1332 template<typename Adaptor>
1333 void clobberize(Graph& graph, Node* node, Adaptor& adaptor)
1335 ReadMethodClobberize<Adaptor> read(adaptor);
1336 WriteMethodClobberize<Adaptor> write(adaptor);
1337 DefMethodClobberize<Adaptor> def(adaptor);
1338 clobberize(graph, node, read, write, def);
1341 } } // namespace JSC::DFG
1343 #endif // ENABLE(DFG_JIT)
1345 #endif // DFGClobberize_h