2 * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "DFGStrengthReductionPhase.h"
31 #include "DFGAbstractHeap.h"
32 #include "DFGClobberize.h"
34 #include "DFGInsertionSet.h"
36 #include "DFGPredictionPropagationPhase.h"
37 #include "DFGVariableAccessDataDump.h"
38 #include "JSCInlines.h"
39 #include "MathCommon.h"
40 #include "RegExpConstructor.h"
41 #include "StringPrototype.h"
43 #include <wtf/text/StringBuilder.h>
45 namespace JSC { namespace DFG {
47 class StrengthReductionPhase : public Phase {
48 static const bool verbose = false;
51 StrengthReductionPhase(Graph& graph)
52 : Phase(graph, "strength reduction")
53 , m_insertionSet(graph)
59 ASSERT(m_graph.m_fixpointState == FixpointNotConverged);
63 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
64 m_block = m_graph.block(blockIndex);
67 for (m_nodeIndex = 0; m_nodeIndex < m_block->size(); ++m_nodeIndex) {
68 m_node = m_block->at(m_nodeIndex);
71 m_insertionSet.execute(m_block);
80 switch (m_node->op()) {
82 handleCommutativity();
84 if (m_node->child1().useKind() != UntypedUse && m_node->child2()->isInt32Constant() && !m_node->child2()->asInt32()) {
85 convertToIdentityOverChild1();
92 handleCommutativity();
98 if (m_node->child1().useKind() != UntypedUse && m_node->child2()->isInt32Constant() && !(m_node->child2()->asInt32() & 0x1f)) {
99 convertToIdentityOverChild1();
105 if (m_node->child1()->op() == BitURShift
106 && m_node->child1()->child2()->isInt32Constant()
107 && (m_node->child1()->child2()->asInt32() & 0x1f)
108 && m_node->arithMode() != Arith::DoOverflow) {
109 m_node->convertToIdentity();
116 handleCommutativity();
118 if (m_node->child2()->isInt32Constant() && !m_node->child2()->asInt32()) {
119 convertToIdentityOverChild1();
125 handleCommutativity();
126 Edge& child2 = m_node->child2();
127 if (child2->isNumberConstant() && child2->asNumber() == 2) {
128 switch (m_node->binaryUseKind()) {
130 // It is always valuable to get rid of a double multiplication by 2.
131 // We won't have half-register dependencies issues on x86 and we won't have to load the constants.
132 m_node->setOp(ArithAdd);
133 child2.setNode(m_node->child1().node());
140 // For integers, we can only convert compatible modes.
141 // ArithAdd does handle do negative zero check for example.
142 if (m_node->arithMode() == Arith::CheckOverflow || m_node->arithMode() == Arith::Unchecked) {
143 m_node->setOp(ArithAdd);
144 child2.setNode(m_node->child1().node());
155 if (m_node->child2()->isInt32Constant()
156 && m_node->isBinaryUseKind(Int32Use)) {
157 int32_t value = m_node->child2()->asInt32();
158 if (value != INT32_MIN) {
159 m_node->setOp(ArithAdd);
160 m_node->child2().setNode(
161 m_insertionSet.insertConstant(
162 m_nodeIndex, m_node->origin, jsNumber(-value)));
170 if (m_node->child2()->isNumberConstant()) {
171 double yOperandValue = m_node->child2()->asNumber();
172 if (yOperandValue == 1) {
173 convertToIdentityOverChild1();
175 } else if (yOperandValue == 2) {
176 m_node->setOp(ArithMul);
177 m_node->child2() = m_node->child1();
185 // In: ArithMod(ArithMod(x, const1), const2)
186 // Out: Identity(ArithMod(x, const1))
187 // if const1 <= const2.
188 if (m_node->binaryUseKind() == Int32Use
189 && m_node->child2()->isInt32Constant()
190 && m_node->child1()->op() == ArithMod
191 && m_node->child1()->binaryUseKind() == Int32Use
192 && m_node->child1()->child2()->isInt32Constant()
193 && std::abs(m_node->child1()->child2()->asInt32()) <= std::abs(m_node->child2()->asInt32())) {
194 convertToIdentityOverChild1();
200 // ArithDiv(x, constant)
202 // ArithMul(x, 1 / constant)
203 // if the operation has the same result.
204 if (m_node->isBinaryUseKind(DoubleRepUse)
205 && m_node->child2()->isNumberConstant()) {
207 if (std::optional<double> reciprocal = safeReciprocalForDivByConst(m_node->child2()->asNumber())) {
208 Node* reciprocalNode = m_insertionSet.insertConstant(m_nodeIndex, m_node->origin, jsDoubleNumber(*reciprocal), DoubleConstant);
209 m_node->setOp(ArithMul);
210 m_node->child2() = Edge(reciprocalNode, DoubleRepUse);
219 // This short-circuits circuitous conversions, like ValueRep(Int52Rep(value)).
221 // The only speculation that we would do beyond validating that we have a type that
222 // can be represented a certain way is an Int32 check that would appear on Int52Rep
223 // nodes. For now, if we see this and the final type we want is an Int52, we use it
224 // as an excuse not to fold. The only thing we would need is a Int52RepInt32Use kind.
225 bool hadInt32Check = false;
226 if (m_node->op() == Int52Rep) {
227 if (m_node->child1().useKind() != Int32Use)
229 hadInt32Check = true;
231 for (Node* node = m_node->child1().node(); ; node = node->child1().node()) {
232 if (canonicalResultRepresentation(node->result()) ==
233 canonicalResultRepresentation(m_node->result())) {
234 m_insertionSet.insertCheck(m_graph, m_nodeIndex, m_node);
236 // FIXME: Consider adding Int52RepInt32Use or even DoubleRepInt32Use,
237 // which would be super weird. The latter would only arise in some
238 // seriously circuitous conversions.
239 if (canonicalResultRepresentation(node->result()) != NodeResultJS)
242 m_insertionSet.insertCheck(
243 m_nodeIndex, m_node->origin, Edge(node, Int32Use));
245 m_node->child1() = node->defaultEdge();
246 m_node->convertToIdentity();
251 switch (node->op()) {
253 if (node->child1().useKind() != Int32Use)
255 hadInt32Check = true;
270 ASSERT(m_graph.m_form != SSA);
272 if (m_graph.willCatchExceptionInMachineFrame(m_node->origin.semantic)) {
273 // FIXME: We should be able to relax this:
274 // https://bugs.webkit.org/show_bug.cgi?id=150824
278 Node* setLocal = nullptr;
279 VirtualRegister local = m_node->local();
281 for (unsigned i = m_nodeIndex; i--;) {
282 Node* node = m_block->at(i);
284 if (node->op() == SetLocal && node->local() == local) {
289 if (accessesOverlap(m_graph, node, AbstractHeap(Stack, local)))
297 // The Flush should become a PhantomLocal at this point. This means that we want the
298 // local's value during OSR, but we don't care if the value is stored to the stack. CPS
299 // rethreading can canonicalize PhantomLocals for us.
300 m_node->convertFlushToPhantomLocal();
306 // FIXME: we should probably do this in constant folding but this currently relies on an OSR exit rule.
307 // https://bugs.webkit.org/show_bug.cgi?id=154832
308 case OverridesHasInstance: {
309 if (!m_node->child2().node()->isCellConstant())
312 if (m_node->child2().node()->asCell() != m_graph.globalObjectFor(m_node->origin.semantic)->functionProtoHasInstanceSymbolFunction()) {
313 m_graph.convertToConstant(m_node, jsBoolean(true));
316 } else if (!m_graph.hasExitSite(m_node->origin.semantic, BadTypeInfoFlags)) {
317 // We optimistically assume that we will not see a function that has a custom instanceof operation as they should be rare.
318 m_insertionSet.insertNode(m_nodeIndex, SpecNone, CheckTypeInfoFlags, m_node->origin, OpInfo(ImplementsDefaultHasInstance), Edge(m_node->child1().node(), CellUse));
319 m_graph.convertToConstant(m_node, jsBoolean(false));
326 // FIXME: We have a lot of string constant-folding rules here. It would be great to
327 // move these to the abstract interpreter once AbstractValue can support LazyJSValue.
328 // https://bugs.webkit.org/show_bug.cgi?id=155204
331 if (m_node->child1()->isConstant()
332 && m_node->child2()->isConstant()
333 && (!!m_node->child1()->tryGetString(m_graph) || !!m_node->child2()->tryGetString(m_graph))) {
334 auto tryGetConstantString = [&] (Node* node) -> String {
335 String string = node->tryGetString(m_graph);
336 if (!string.isEmpty())
338 JSValue value = node->constant()->value();
342 return String::number(value.asInt32());
343 if (value.isNumber())
344 return String::numberToStringECMAScript(value.asNumber());
345 if (value.isBoolean())
346 return value.asBoolean() ? ASCIILiteral("true") : ASCIILiteral("false");
348 return ASCIILiteral("null");
349 if (value.isUndefined())
350 return ASCIILiteral("undefined");
354 String leftString = tryGetConstantString(m_node->child1().node());
357 String rightString = tryGetConstantString(m_node->child2().node());
361 StringBuilder builder;
362 builder.append(leftString);
363 builder.append(rightString);
364 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, builder.toString()));
372 String leftString = m_node->child1()->tryGetString(m_graph);
375 String rightString = m_node->child2()->tryGetString(m_graph);
379 if (m_node->child3()) {
380 extraString = m_node->child3()->tryGetString(m_graph);
385 StringBuilder builder;
386 builder.append(leftString);
387 builder.append(rightString);
389 builder.append(extraString);
391 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, builder.toString()));
397 case CallStringConstructor: {
398 Edge& child1 = m_node->child1();
399 switch (child1.useKind()) {
403 if (child1->hasConstant()) {
404 JSValue value = child1->constant()->value();
408 result = String::number(value.asInt32());
409 else if (value.isNumber())
410 result = String::numberToStringECMAScript(value.asNumber());
412 if (!result.isNull()) {
413 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, result));
427 case NumberToStringWithValidRadixConstant: {
428 Edge& child1 = m_node->child1();
429 if (child1->hasConstant()) {
430 JSValue value = child1->constant()->value();
431 if (value && value.isNumber()) {
432 String result = toStringWithRadix(value.asNumber(), m_node->validRadixConstant());
433 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, result));
440 case GetArrayLength: {
441 if (m_node->arrayMode().type() == Array::Generic
442 || m_node->arrayMode().type() == Array::String) {
443 String string = m_node->child1()->tryGetString(m_graph);
445 m_graph.convertToConstant(m_node, jsNumber(string.length()));
453 case GetGlobalObject: {
454 if (JSObject* object = m_node->child1()->dynamicCastConstant<JSObject*>(vm())) {
455 m_graph.convertToConstant(m_node, object->globalObject());
464 case RegExpMatchFast:
465 case RegExpExecNonGlobalOrSticky: {
466 JSGlobalObject* globalObject = m_node->child1()->dynamicCastConstant<JSGlobalObject*>(vm());
469 dataLog("Giving up because no global object.\n");
473 if (globalObject->isHavingABadTime()) {
475 dataLog("Giving up because bad time.\n");
479 Node* regExpObjectNode = nullptr;
480 RegExp* regExp = nullptr;
481 if (m_node->op() == RegExpExec || m_node->op() == RegExpTest || m_node->op() == RegExpMatchFast) {
482 regExpObjectNode = m_node->child2().node();
483 if (RegExpObject* regExpObject = regExpObjectNode->dynamicCastConstant<RegExpObject*>(vm()))
484 regExp = regExpObject->regExp();
485 else if (regExpObjectNode->op() == NewRegexp)
486 regExp = regExpObjectNode->castOperand<RegExp*>();
489 dataLog("Giving up because the regexp is unknown.\n");
493 regExp = m_node->castOperand<RegExp*>();
495 if (m_node->op() == RegExpMatchFast) {
496 if (regExp->global()) {
497 if (regExp->sticky())
499 if (m_node->child3().useKind() != StringUse)
501 NodeOrigin origin = m_node->origin;
502 m_insertionSet.insertNode(
503 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
504 m_insertionSet.insertNode(
505 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
507 Edge(regExpObjectNode, RegExpObjectUse),
508 m_insertionSet.insertConstantForUse(
509 m_nodeIndex, origin, jsNumber(0), UntypedUse));
510 origin = origin.withInvalidExit();
511 m_node->convertToRegExpMatchFastGlobal(m_graph.freeze(regExp));
512 m_node->origin = origin;
517 m_node->setOp(RegExpExec);
519 // Continue performing strength reduction onto RegExpExec node.
522 ASSERT(m_node->op() != RegExpMatchFast);
524 auto foldToConstant = [&] {
525 Node* stringNode = nullptr;
526 if (m_node->op() == RegExpExecNonGlobalOrSticky)
527 stringNode = m_node->child2().node();
529 stringNode = m_node->child3().node();
531 // NOTE: This mostly already protects us from having the compiler execute a regexp
532 // operation on a ginormous string by preventing us from getting our hands on ginormous
533 // strings in the first place.
534 String string = stringNode->tryGetString(m_graph);
537 dataLog("Giving up because the string is unknown.\n");
541 FrozenValue* regExpFrozenValue = m_graph.freeze(regExp);
543 // Refuse to do things with regular expressions that have a ginormous number of
545 unsigned ginormousNumberOfSubPatterns = 1000;
546 if (regExp->numSubpatterns() > ginormousNumberOfSubPatterns) {
548 dataLog("Giving up because of pattern limit.\n");
552 if ((m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) && regExp->hasNamedCaptures()) {
553 // FIXME: https://bugs.webkit.org/show_bug.cgi?id=176464
554 // Implement strength reduction optimization for named capture groups.
556 dataLog("Giving up because of named capture groups.\n");
561 if (regExp->globalOrSticky()) {
562 // This will only work if we can prove what the value of lastIndex is. To do this
563 // safely, we need to execute the insertion set so that we see any previous strength
564 // reductions. This is needed for soundness since otherwise the effectfulness of any
565 // previous strength reductions would be invisible to us.
566 ASSERT(regExpObjectNode);
567 executeInsertionSet();
568 lastIndex = UINT_MAX;
569 for (unsigned otherNodeIndex = m_nodeIndex; otherNodeIndex--;) {
570 Node* otherNode = m_block->at(otherNodeIndex);
571 if (otherNode == regExpObjectNode) {
575 if (otherNode->op() == SetRegExpObjectLastIndex
576 && otherNode->child1() == regExpObjectNode
577 && otherNode->child2()->isInt32Constant()
578 && otherNode->child2()->asInt32() >= 0) {
579 lastIndex = static_cast<unsigned>(otherNode->child2()->asInt32());
582 if (writesOverlap(m_graph, otherNode, RegExpObject_lastIndex))
585 if (lastIndex == UINT_MAX) {
587 dataLog("Giving up because the last index is not known.\n");
593 m_graph.watchpoints().addLazily(globalObject->havingABadTimeWatchpoint());
595 Structure* structure;
596 if ((m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) && regExp->hasNamedCaptures())
597 structure = globalObject->regExpMatchesArrayWithGroupsStructure();
599 structure = globalObject->regExpMatchesArrayStructure();
601 if (structure->indexingType() != ArrayWithContiguous) {
602 // This is further protection against a race with haveABadTime.
604 dataLog("Giving up because the structure has the wrong indexing type.\n");
607 m_graph.registerStructure(structure);
609 RegExpConstructor* constructor = globalObject->regExpConstructor();
610 FrozenValue* constructorFrozenValue = m_graph.freeze(constructor);
614 // We have to call the kind of match function that the main thread would have called.
615 // Otherwise, we might not have the desired Yarr code compiled, and the match will fail.
616 if (m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) {
618 if (!regExp->matchConcurrently(vm(), string, lastIndex, position, ovector)) {
620 dataLog("Giving up because match failed.\n");
623 result.start = position;
624 result.end = ovector[1];
626 if (!regExp->matchConcurrently(vm(), string, lastIndex, result)) {
628 dataLog("Giving up because match failed.\n");
633 // We've constant-folded the regexp. Now we're committed to replacing RegExpExec/Test.
637 NodeOrigin origin = m_node->origin;
639 m_insertionSet.insertNode(
640 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
642 if (m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) {
644 RegisteredStructureSet* structureSet = m_graph.addStructureSet(structure);
646 // Create an array modeling the JS array that we will try to allocate. This is
647 // basically createRegExpMatchesArray but over C++ strings instead of JSStrings.
648 Vector<String> resultArray;
649 resultArray.append(string.substring(result.start, result.end - result.start));
650 for (unsigned i = 1; i <= regExp->numSubpatterns(); ++i) {
651 int start = ovector[2 * i];
653 resultArray.append(string.substring(start, ovector[2 * i + 1] - start));
655 resultArray.append(String());
658 unsigned publicLength = resultArray.size();
659 unsigned vectorLength =
660 Butterfly::optimalContiguousVectorLength(structure, publicLength);
662 UniquedStringImpl* indexUID = vm().propertyNames->index.impl();
663 UniquedStringImpl* inputUID = vm().propertyNames->input.impl();
664 unsigned indexIndex = m_graph.identifiers().ensure(indexUID);
665 unsigned inputIndex = m_graph.identifiers().ensure(inputUID);
667 unsigned firstChild = m_graph.m_varArgChildren.size();
668 m_graph.m_varArgChildren.append(
669 m_insertionSet.insertConstantForUse(
670 m_nodeIndex, origin, structure, KnownCellUse));
671 ObjectMaterializationData* data = m_graph.m_objectMaterializationData.add();
673 m_graph.m_varArgChildren.append(
674 m_insertionSet.insertConstantForUse(
675 m_nodeIndex, origin, jsNumber(publicLength), KnownInt32Use));
676 data->m_properties.append(PublicLengthPLoc);
678 m_graph.m_varArgChildren.append(
679 m_insertionSet.insertConstantForUse(
680 m_nodeIndex, origin, jsNumber(vectorLength), KnownInt32Use));
681 data->m_properties.append(VectorLengthPLoc);
683 m_graph.m_varArgChildren.append(
684 m_insertionSet.insertConstantForUse(
685 m_nodeIndex, origin, jsNumber(result.start), UntypedUse));
686 data->m_properties.append(
687 PromotedLocationDescriptor(NamedPropertyPLoc, indexIndex));
689 m_graph.m_varArgChildren.append(Edge(stringNode, UntypedUse));
690 data->m_properties.append(
691 PromotedLocationDescriptor(NamedPropertyPLoc, inputIndex));
693 auto materializeString = [&] (const String& string) -> Node* {
696 if (string.isEmpty()) {
697 return m_insertionSet.insertConstant(
698 m_nodeIndex, origin, vm().smallStrings.emptyString());
700 LazyJSValue value = LazyJSValue::newString(m_graph, string);
701 return m_insertionSet.insertNode(
702 m_nodeIndex, SpecNone, LazyJSConstant, origin,
703 OpInfo(m_graph.m_lazyJSValues.add(value)));
706 for (unsigned i = 0; i < resultArray.size(); ++i) {
707 if (Node* node = materializeString(resultArray[i])) {
708 m_graph.m_varArgChildren.append(Edge(node, UntypedUse));
709 data->m_properties.append(
710 PromotedLocationDescriptor(IndexedPropertyPLoc, i));
714 Node* resultNode = m_insertionSet.insertNode(
715 m_nodeIndex, SpecArray, Node::VarArg, MaterializeNewObject, origin,
716 OpInfo(structureSet), OpInfo(data), firstChild,
717 m_graph.m_varArgChildren.size() - firstChild);
719 m_node->convertToIdentityOn(resultNode);
721 m_graph.convertToConstant(m_node, jsNull());
723 m_graph.convertToConstant(m_node, jsBoolean(!!result));
725 // Whether it's Exec or Test, we need to tell the constructor and RegExpObject what's up.
726 // Because SetRegExpObjectLastIndex may exit and it clobbers exit state, we do that
729 if (regExp->globalOrSticky()) {
730 ASSERT(regExpObjectNode);
731 m_insertionSet.insertNode(
732 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
734 Edge(regExpObjectNode, RegExpObjectUse),
735 m_insertionSet.insertConstantForUse(
736 m_nodeIndex, origin, jsNumber(result ? result.end : 0), UntypedUse));
738 origin = origin.withInvalidExit();
742 unsigned firstChild = m_graph.m_varArgChildren.size();
743 m_graph.m_varArgChildren.append(
744 m_insertionSet.insertConstantForUse(
745 m_nodeIndex, origin, constructorFrozenValue, KnownCellUse));
746 m_graph.m_varArgChildren.append(
747 m_insertionSet.insertConstantForUse(
748 m_nodeIndex, origin, regExpFrozenValue, KnownCellUse));
749 m_graph.m_varArgChildren.append(Edge(stringNode, KnownCellUse));
750 m_graph.m_varArgChildren.append(
751 m_insertionSet.insertConstantForUse(
752 m_nodeIndex, origin, jsNumber(result.start), KnownInt32Use));
753 m_graph.m_varArgChildren.append(
754 m_insertionSet.insertConstantForUse(
755 m_nodeIndex, origin, jsNumber(result.end), KnownInt32Use));
756 m_insertionSet.insertNode(
757 m_nodeIndex, SpecNone, Node::VarArg, RecordRegExpCachedResult, origin,
758 OpInfo(), OpInfo(), firstChild, m_graph.m_varArgChildren.size() - firstChild);
760 origin = origin.withInvalidExit();
763 m_node->origin = origin;
767 auto convertToStatic = [&] {
768 if (m_node->op() != RegExpExec)
770 if (regExp->globalOrSticky())
772 if (m_node->child3().useKind() != StringUse)
774 NodeOrigin origin = m_node->origin;
775 m_insertionSet.insertNode(
776 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
777 m_node->convertToRegExpExecNonGlobalOrSticky(m_graph.freeze(regExp));
782 if (foldToConstant())
785 if (convertToStatic())
792 case StringReplaceRegExp: {
793 Node* stringNode = m_node->child1().node();
794 String string = stringNode->tryGetString(m_graph);
798 Node* regExpObjectNode = m_node->child2().node();
800 if (RegExpObject* regExpObject = regExpObjectNode->dynamicCastConstant<RegExpObject*>(vm()))
801 regExp = regExpObject->regExp();
802 else if (regExpObjectNode->op() == NewRegexp)
803 regExp = regExpObjectNode->castOperand<RegExp*>();
806 dataLog("Giving up because the regexp is unknown.\n");
810 String replace = m_node->child3()->tryGetString(m_graph);
814 StringBuilder builder;
816 unsigned lastIndex = 0;
817 unsigned startPosition = 0;
822 // Model which version of match() is called by the main thread.
823 if (replace.isEmpty() && regExp->global()) {
824 if (!regExp->matchConcurrently(vm(), string, startPosition, result)) {
830 if (!regExp->matchConcurrently(vm(), string, startPosition, position, ovector)) {
835 result.start = position;
836 result.end = ovector[1];
842 unsigned replLen = replace.length();
843 if (lastIndex < result.start || replLen) {
844 builder.append(string, lastIndex, result.start - lastIndex);
846 builder.append(substituteBackreferences(replace, string, ovector.data(), regExp));
849 lastIndex = result.end;
850 startPosition = lastIndex;
852 // special case of empty match
853 if (result.empty()) {
855 if (startPosition > string.length())
858 } while (regExp->global());
862 // We are committed at this point.
865 NodeOrigin origin = m_node->origin;
867 // Preserve any checks we have.
868 m_insertionSet.insertNode(
869 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
871 if (regExp->global()) {
872 m_insertionSet.insertNode(
873 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
875 Edge(regExpObjectNode, RegExpObjectUse),
876 m_insertionSet.insertConstantForUse(
877 m_nodeIndex, origin, jsNumber(0), UntypedUse));
879 origin = origin.withInvalidExit();
882 if (!lastIndex && builder.isEmpty())
883 m_node->convertToIdentityOn(stringNode);
885 if (lastIndex < string.length())
886 builder.append(string, lastIndex, string.length() - lastIndex);
888 m_node->convertToLazyJSConstant(m_graph, LazyJSValue::newString(m_graph, builder.toString()));
891 m_node->origin = origin;
897 case TailCallInlinedCaller:
899 ExecutableBase* executable = nullptr;
900 Edge callee = m_graph.varArgChild(m_node, 0);
901 if (JSFunction* function = callee->dynamicCastConstant<JSFunction*>(vm()))
902 executable = function->executable();
903 else if (callee->isFunctionAllocation())
904 executable = callee->castOperand<FunctionExecutable*>();
909 if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(vm(), executable)) {
910 // We need to update m_parameterSlots before we get to the backend, but we don't
911 // want to do too much of this.
912 unsigned numAllocatedArgs =
913 static_cast<unsigned>(functionExecutable->parameterCount()) + 1;
915 if (numAllocatedArgs <= Options::maximumDirectCallStackSize()) {
916 m_graph.m_parameterSlots = std::max(
917 m_graph.m_parameterSlots,
918 Graph::parameterSlotsForArgCount(numAllocatedArgs));
922 m_node->convertToDirectCall(m_graph.freeze(executable));
932 void convertToIdentityOverChild(unsigned childIndex)
934 ASSERT(!(m_node->flags() & NodeHasVarArgs));
935 m_insertionSet.insertCheck(m_graph, m_nodeIndex, m_node);
936 m_node->children.removeEdge(childIndex ^ 1);
937 m_node->convertToIdentity();
941 void convertToIdentityOverChild1()
943 convertToIdentityOverChild(0);
946 void convertToIdentityOverChild2()
948 convertToIdentityOverChild(1);
951 void convertToLazyJSValue(Node* node, LazyJSValue value)
953 m_insertionSet.insertCheck(m_graph, m_nodeIndex, node);
954 node->convertToLazyJSConstant(m_graph, value);
957 void handleCommutativity()
959 // It's definitely not sound to swap the lhs and rhs when we may be performing effectful
960 // calls on the lhs/rhs for valueOf.
961 if (m_node->child1().useKind() == UntypedUse || m_node->child2().useKind() == UntypedUse)
964 // If the right side is a constant then there is nothing left to do.
965 if (m_node->child2()->hasConstant())
968 // This case ensures that optimizations that look for x + const don't also have
969 // to look for const + x.
970 if (m_node->child1()->hasConstant() && !m_node->child1()->asJSValue().isCell()) {
971 std::swap(m_node->child1(), m_node->child2());
976 // This case ensures that CSE is commutativity-aware.
977 if (m_node->child1().node() > m_node->child2().node()) {
978 std::swap(m_node->child1(), m_node->child2());
984 void executeInsertionSet()
986 m_nodeIndex += m_insertionSet.execute(m_block);
989 InsertionSet m_insertionSet;
991 unsigned m_nodeIndex;
996 bool performStrengthReduction(Graph& graph)
998 return runPhase<StrengthReductionPhase>(graph);
1001 } } // namespace JSC::DFG
1003 #endif // ENABLE(DFG_JIT)