Enhance the MacroAssembler and LinkBuffer to support pointer profiling.
[WebKit-https.git] / Source / JavaScriptCore / yarr / YarrJIT.cpp
index 8a8397b..683edcc 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
 #include "config.h"
 #include "YarrJIT.h"
 
-#include "ASCIICType.h"
+#include <wtf/ASCIICType.h>
 #include "LinkBuffer.h"
+#include "Options.h"
+#include "VM.h"
 #include "Yarr.h"
+#include "YarrCanonicalize.h"
 
 #if ENABLE(YARR_JIT)
 
@@ -36,19 +39,50 @@ using namespace WTF;
 
 namespace JSC { namespace Yarr {
 
+template<YarrJITCompileMode compileMode>
 class YarrGenerator : private MacroAssembler {
-    friend void jitCompile(JSGlobalData*, YarrCodeBlock& jitObject, const UString& pattern, unsigned& numSubpatterns, const char*& error, bool ignoreCase, bool multiline);
+    friend void jitCompile(VM*, YarrCodeBlock&, const String& pattern, unsigned& numSubpatterns, const char*& error, bool ignoreCase, bool multiline);
 
 #if CPU(ARM)
     static const RegisterID input = ARMRegisters::r0;
     static const RegisterID index = ARMRegisters::r1;
     static const RegisterID length = ARMRegisters::r2;
-    static const RegisterID output = ARMRegisters::r4;
+    static const RegisterID output = ARMRegisters::r3;
 
-    static const RegisterID regT0 = ARMRegisters::r5;
-    static const RegisterID regT1 = ARMRegisters::r6;
+    static const RegisterID regT0 = ARMRegisters::r4;
+    static const RegisterID regT1 = ARMRegisters::r5;
+    static const RegisterID initialStart = ARMRegisters::r8;
 
     static const RegisterID returnRegister = ARMRegisters::r0;
+    static const RegisterID returnRegister2 = ARMRegisters::r1;
+
+#define HAVE_INITIAL_START_REG
+#elif CPU(ARM64)
+    // Argument registers
+    static const RegisterID input = ARM64Registers::x0;
+    static const RegisterID index = ARM64Registers::x1;
+    static const RegisterID length = ARM64Registers::x2;
+    static const RegisterID output = ARM64Registers::x3;
+    static const RegisterID freelistRegister = ARM64Registers::x4;
+    static const RegisterID freelistSizeRegister = ARM64Registers::x5;
+
+    // Scratch registers
+    static const RegisterID regT0 = ARM64Registers::x6;
+    static const RegisterID regT1 = ARM64Registers::x7;
+    static const RegisterID regT2 = ARM64Registers::x8;
+    static const RegisterID remainingMatchCount = ARM64Registers::x9;
+    static const RegisterID regUnicodeInputAndTrail = ARM64Registers::x10;
+    static const RegisterID initialStart = ARM64Registers::x11;
+    static const RegisterID supplementaryPlanesBase = ARM64Registers::x12;
+    static const RegisterID surrogateTagMask = ARM64Registers::x13;
+    static const RegisterID leadingSurrogateTag = ARM64Registers::x14;
+    static const RegisterID trailingSurrogateTag = ARM64Registers::x15;
+
+    static const RegisterID returnRegister = ARM64Registers::x0;
+    static const RegisterID returnRegister2 = ARM64Registers::x1;
+
+#define HAVE_INITIAL_START_REG
+#define JIT_UNICODE_EXPRESSIONS
 #elif CPU(MIPS)
     static const RegisterID input = MIPSRegisters::a0;
     static const RegisterID index = MIPSRegisters::a1;
@@ -57,18 +91,12 @@ class YarrGenerator : private MacroAssembler {
 
     static const RegisterID regT0 = MIPSRegisters::t4;
     static const RegisterID regT1 = MIPSRegisters::t5;
+    static const RegisterID initialStart = MIPSRegisters::t6;
 
     static const RegisterID returnRegister = MIPSRegisters::v0;
-#elif CPU(SH4)
-    static const RegisterID input = SH4Registers::r4;
-    static const RegisterID index = SH4Registers::r5;
-    static const RegisterID length = SH4Registers::r6;
-    static const RegisterID output = SH4Registers::r7;
+    static const RegisterID returnRegister2 = MIPSRegisters::v1;
 
-    static const RegisterID regT0 = SH4Registers::r0;
-    static const RegisterID regT1 = SH4Registers::r1;
-
-    static const RegisterID returnRegister = SH4Registers::r0;
+#define HAVE_INITIAL_START_REG
 #elif CPU(X86)
     static const RegisterID input = X86Registers::eax;
     static const RegisterID index = X86Registers::edx;
@@ -79,16 +107,206 @@ class YarrGenerator : private MacroAssembler {
     static const RegisterID regT1 = X86Registers::esi;
 
     static const RegisterID returnRegister = X86Registers::eax;
+    static const RegisterID returnRegister2 = X86Registers::edx;
 #elif CPU(X86_64)
+#if !OS(WINDOWS)
+    // Argument registers
     static const RegisterID input = X86Registers::edi;
     static const RegisterID index = X86Registers::esi;
     static const RegisterID length = X86Registers::edx;
     static const RegisterID output = X86Registers::ecx;
+    static const RegisterID freelistRegister = X86Registers::r8;
+    static const RegisterID freelistSizeRegister = X86Registers::r9; // Only used during initialization.
+#else
+    // If the return value doesn't fit in 64bits, its destination is pointed by rcx and the parameters are shifted.
+    // http://msdn.microsoft.com/en-us/library/7572ztz4.aspx
+    COMPILE_ASSERT(sizeof(MatchResult) > sizeof(void*), MatchResult_does_not_fit_in_64bits);
+    static const RegisterID input = X86Registers::edx;
+    static const RegisterID index = X86Registers::r8;
+    static const RegisterID length = X86Registers::r9;
+    static const RegisterID output = X86Registers::r10;
+#endif
 
+    // Scratch registers
     static const RegisterID regT0 = X86Registers::eax;
-    static const RegisterID regT1 = X86Registers::ebx;
+#if !OS(WINDOWS)
+    static const RegisterID regT1 = X86Registers::r9;
+    static const RegisterID regT2 = X86Registers::r10;
+#else
+    static const RegisterID regT1 = X86Registers::ecx;
+    static const RegisterID regT2 = X86Registers::edi;
+#endif
+
+    static const RegisterID initialStart = X86Registers::ebx;
+#if !OS(WINDOWS)
+    static const RegisterID remainingMatchCount = X86Registers::r12;
+#else
+    static const RegisterID remainingMatchCount = X86Registers::esi;
+#endif
+    static const RegisterID regUnicodeInputAndTrail = X86Registers::r13;
+    static const RegisterID leadingSurrogateTag = X86Registers::r14;
+    static const RegisterID trailingSurrogateTag = X86Registers::r15;
 
     static const RegisterID returnRegister = X86Registers::eax;
+    static const RegisterID returnRegister2 = X86Registers::edx;
+
+    const TrustedImm32 supplementaryPlanesBase = TrustedImm32(0x10000);
+    const TrustedImm32 surrogateTagMask = TrustedImm32(0xfffffc00);
+#define HAVE_INITIAL_START_REG
+#define JIT_UNICODE_EXPRESSIONS
+#endif
+
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+    struct ParenContextSizes {
+        size_t m_numSubpatterns;
+        size_t m_frameSlots;
+
+        ParenContextSizes(size_t numSubpatterns, size_t frameSlots)
+            : m_numSubpatterns(numSubpatterns)
+            , m_frameSlots(frameSlots)
+        {
+        }
+
+        size_t numSubpatterns() { return m_numSubpatterns; }
+
+        size_t frameSlots() { return m_frameSlots; }
+    };
+
+    struct ParenContext {
+        struct ParenContext* next;
+        uint32_t begin;
+        uint32_t matchAmount;
+        uintptr_t returnAddress;
+        struct Subpatterns {
+            unsigned start;
+            unsigned end;
+        } subpatterns[0];
+        uintptr_t frameSlots[0];
+
+        static size_t sizeFor(ParenContextSizes& parenContextSizes)
+        {
+            return sizeof(ParenContext) + sizeof(Subpatterns) * parenContextSizes.numSubpatterns() + sizeof(uintptr_t) * parenContextSizes.frameSlots();
+        }
+
+        static ptrdiff_t nextOffset()
+        {
+            return offsetof(ParenContext, next);
+        }
+
+        static ptrdiff_t beginOffset()
+        {
+            return offsetof(ParenContext, begin);
+        }
+
+        static ptrdiff_t matchAmountOffset()
+        {
+            return offsetof(ParenContext, matchAmount);
+        }
+
+        static ptrdiff_t returnAddressOffset()
+        {
+            return offsetof(ParenContext, returnAddress);
+        }
+
+        static ptrdiff_t subpatternOffset(size_t subpattern)
+        {
+            return offsetof(ParenContext, subpatterns) + (subpattern - 1) * sizeof(Subpatterns);
+        }
+
+        static ptrdiff_t savedFrameOffset(ParenContextSizes& parenContextSizes)
+        {
+            return offsetof(ParenContext, subpatterns) + (parenContextSizes.numSubpatterns()) * sizeof(Subpatterns);
+        }
+    };
+
+    void initParenContextFreeList()
+    {
+        RegisterID parenContextPointer = regT0;
+        RegisterID nextParenContextPointer = regT2;
+
+        size_t parenContextSize = ParenContext::sizeFor(m_parenContextSizes);
+
+        parenContextSize = WTF::roundUpToMultipleOf<sizeof(uintptr_t)>(parenContextSize);
+
+        // Check that the paren context is a reasonable size.
+        if (parenContextSize > INT16_MAX)
+            m_abortExecution.append(jump());
+
+        Jump emptyFreeList = branchTestPtr(Zero, freelistRegister);
+        move(freelistRegister, parenContextPointer);
+        addPtr(TrustedImm32(parenContextSize), freelistRegister, nextParenContextPointer);
+        addPtr(freelistRegister, freelistSizeRegister);
+        subPtr(TrustedImm32(parenContextSize), freelistSizeRegister);
+
+        Label loopTop(this);
+        Jump initDone = branchPtr(Above, nextParenContextPointer, freelistSizeRegister);
+        storePtr(nextParenContextPointer, Address(parenContextPointer, ParenContext::nextOffset()));
+        move(nextParenContextPointer, parenContextPointer);
+        addPtr(TrustedImm32(parenContextSize), parenContextPointer, nextParenContextPointer);
+        jump(loopTop);
+
+        initDone.link(this);
+        storePtr(TrustedImmPtr(nullptr), Address(parenContextPointer, ParenContext::nextOffset()));
+        emptyFreeList.link(this);
+    }
+
+    void allocateParenContext(RegisterID result)
+    {
+        m_abortExecution.append(branchTestPtr(Zero, freelistRegister));
+        sub32(TrustedImm32(1), remainingMatchCount);
+        m_hitMatchLimit.append(branchTestPtr(Zero, remainingMatchCount));
+        move(freelistRegister, result);
+        loadPtr(Address(freelistRegister, ParenContext::nextOffset()), freelistRegister);
+    }
+
+    void freeParenContext(RegisterID headPtrRegister, RegisterID newHeadPtrRegister)
+    {
+        loadPtr(Address(headPtrRegister, ParenContext::nextOffset()), newHeadPtrRegister);
+        storePtr(freelistRegister, Address(headPtrRegister, ParenContext::nextOffset()));
+        move(headPtrRegister, freelistRegister);
+    }
+
+    void saveParenContext(RegisterID parenContextReg, RegisterID tempReg, unsigned firstSubpattern, unsigned lastSubpattern, unsigned subpatternBaseFrameLocation)
+    {
+        store32(index, Address(parenContextReg, ParenContext::beginOffset()));
+        loadFromFrame(subpatternBaseFrameLocation + BackTrackInfoParentheses::matchAmountIndex(), tempReg);
+        store32(tempReg, Address(parenContextReg, ParenContext::matchAmountOffset()));
+        loadFromFrame(subpatternBaseFrameLocation + BackTrackInfoParentheses::returnAddressIndex(), tempReg);
+        storePtr(tempReg, Address(parenContextReg, ParenContext::returnAddressOffset()));
+        if (compileMode == IncludeSubpatterns) {
+            for (unsigned subpattern = firstSubpattern; subpattern <= lastSubpattern; subpattern++) {
+                loadPtr(Address(output, (subpattern << 1) * sizeof(unsigned)), tempReg);
+                storePtr(tempReg, Address(parenContextReg, ParenContext::subpatternOffset(subpattern)));
+                clearSubpatternStart(subpattern);
+            }
+        }
+        subpatternBaseFrameLocation += YarrStackSpaceForBackTrackInfoParentheses;
+        for (unsigned frameLocation = subpatternBaseFrameLocation; frameLocation < m_parenContextSizes.frameSlots(); frameLocation++) {
+            loadFromFrame(frameLocation, tempReg);
+            storePtr(tempReg, Address(parenContextReg, ParenContext::savedFrameOffset(m_parenContextSizes) + frameLocation * sizeof(uintptr_t)));
+        }
+    }
+
+    void restoreParenContext(RegisterID parenContextReg, RegisterID tempReg, unsigned firstSubpattern, unsigned lastSubpattern, unsigned subpatternBaseFrameLocation)
+    {
+        load32(Address(parenContextReg, ParenContext::beginOffset()), index);
+        storeToFrame(index, subpatternBaseFrameLocation + BackTrackInfoParentheses::beginIndex());
+        load32(Address(parenContextReg, ParenContext::matchAmountOffset()), tempReg);
+        storeToFrame(tempReg, subpatternBaseFrameLocation + BackTrackInfoParentheses::matchAmountIndex());
+        loadPtr(Address(parenContextReg, ParenContext::returnAddressOffset()), tempReg);
+        storeToFrame(tempReg, subpatternBaseFrameLocation + BackTrackInfoParentheses::returnAddressIndex());
+        if (compileMode == IncludeSubpatterns) {
+            for (unsigned subpattern = firstSubpattern; subpattern <= lastSubpattern; subpattern++) {
+                loadPtr(Address(parenContextReg, ParenContext::subpatternOffset(subpattern)), tempReg);
+                storePtr(tempReg, Address(output, (subpattern << 1) * sizeof(unsigned)));
+            }
+        }
+        subpatternBaseFrameLocation += YarrStackSpaceForBackTrackInfoParentheses;
+        for (unsigned frameLocation = subpatternBaseFrameLocation; frameLocation < m_parenContextSizes.frameSlots(); frameLocation++) {
+            loadPtr(Address(parenContextReg, ParenContext::savedFrameOffset(m_parenContextSizes) + frameLocation * sizeof(uintptr_t)), tempReg);
+            storeToFrame(tempReg, frameLocation);
+        }
+    }
 #endif
 
     void optimizeAlternative(PatternAlternative* alternative)
@@ -100,8 +318,10 @@ class YarrGenerator : private MacroAssembler {
             PatternTerm& term = alternative->m_terms[i];
             PatternTerm& nextTerm = alternative->m_terms[i + 1];
 
+            // We can move BMP only character classes after fixed character terms.
             if ((term.type == PatternTerm::TypeCharacterClass)
                 && (term.quantityType == QuantifierFixedCount)
+                && (!m_decodeSurrogatePairs || !term.characterClass->m_hasNonBMPCharacters)
                 && (nextTerm.type == PatternTerm::TypePatternCharacter)
                 && (nextTerm.quantityType == QuantifierFixedCount)) {
                 PatternTerm termCopy = term;
@@ -111,7 +331,7 @@ class YarrGenerator : private MacroAssembler {
         }
     }
 
-    void matchCharacterClassRange(RegisterID character, JumpList& failures, JumpList& matchDest, const CharacterRange* ranges, unsigned count, unsigned* matchIndex, const UChar* matches, unsigned matchCount)
+    void matchCharacterClassRange(RegisterID character, JumpList& failures, JumpList& matchDest, const CharacterRange* ranges, unsigned count, unsigned* matchIndex, const UChar32* matches, unsigned matchCount)
     {
         do {
             // pick which range we're going to generate
@@ -160,26 +380,28 @@ class YarrGenerator : private MacroAssembler {
 
     void matchCharacterClass(RegisterID character, JumpList& matchDest, const CharacterClass* charClass)
     {
-        if (charClass->m_table) {
-            ExtendedAddress tableEntry(character, reinterpret_cast<intptr_t>(charClass->m_table->m_table));
-            matchDest.append(branchTest8(charClass->m_table->m_inverted ? Zero : NonZero, tableEntry));
+        if (charClass->m_table && !m_decodeSurrogatePairs) {
+            ExtendedAddress tableEntry(character, reinterpret_cast<intptr_t>(charClass->m_table));
+            matchDest.append(branchTest8(charClass->m_tableInverted ? Zero : NonZero, tableEntry));
             return;
         }
-        Jump unicodeFail;
+        JumpList unicodeFail;
         if (charClass->m_matchesUnicode.size() || charClass->m_rangesUnicode.size()) {
-            Jump isAscii = branch32(LessThanOrEqual, character, TrustedImm32(0x7f));
+            JumpList isAscii;
+            if (charClass->m_matches.size() || charClass->m_ranges.size())
+                isAscii.append(branch32(LessThanOrEqual, character, TrustedImm32(0x7f)));
 
             if (charClass->m_matchesUnicode.size()) {
                 for (unsigned i = 0; i < charClass->m_matchesUnicode.size(); ++i) {
-                    UChar ch = charClass->m_matchesUnicode[i];
+                    UChar32 ch = charClass->m_matchesUnicode[i];
                     matchDest.append(branch32(Equal, character, Imm32(ch)));
                 }
             }
 
             if (charClass->m_rangesUnicode.size()) {
                 for (unsigned i = 0; i < charClass->m_rangesUnicode.size(); ++i) {
-                    UChar lo = charClass->m_rangesUnicode[i].begin;
-                    UChar hi = charClass->m_rangesUnicode[i].end;
+                    UChar32 lo = charClass->m_rangesUnicode[i].begin;
+                    UChar32 hi = charClass->m_rangesUnicode[i].end;
 
                     Jump below = branch32(LessThan, character, Imm32(lo));
                     matchDest.append(branch32(LessThanOrEqual, character, Imm32(hi)));
@@ -187,7 +409,8 @@ class YarrGenerator : private MacroAssembler {
                 }
             }
 
-            unicodeFail = jump();
+            if (charClass->m_matches.size() || charClass->m_ranges.size())
+                unicodeFail = jump();
             isAscii.link(this);
         }
 
@@ -205,7 +428,7 @@ class YarrGenerator : private MacroAssembler {
 
             for (unsigned i = 0; i < charClass->m_matches.size(); ++i) {
                 char ch = charClass->m_matches[i];
-                if (m_pattern.m_ignoreCase) {
+                if (m_pattern.ignoreCase()) {
                     if (isASCIILower(ch)) {
                         matchesAZaz.append(ch);
                         continue;
@@ -228,9 +451,10 @@ class YarrGenerator : private MacroAssembler {
     }
 
     // Jumps if input not available; will have (incorrectly) incremented already!
-    Jump jumpIfNoAvailableInput(unsigned countToCheck)
+    Jump jumpIfNoAvailableInput(unsigned countToCheck = 0)
     {
-        add32(Imm32(countToCheck), index);
+        if (countToCheck)
+            add32(Imm32(countToCheck), index);
         return branch32(Above, index, length);
     }
 
@@ -255,21 +479,102 @@ class YarrGenerator : private MacroAssembler {
         return branch32(NotEqual, index, length);
     }
 
-    Jump jumpIfCharEquals(UChar ch, int inputPosition)
+    BaseIndex negativeOffsetIndexedAddress(Checked<unsigned> negativeCharacterOffset, RegisterID tempReg, RegisterID indexReg = index)
+    {
+        RegisterID base = input;
+
+        // BaseIndex() addressing can take a int32_t offset. Given that we can have a regular
+        // expression that has unsigned character offsets, BaseIndex's signed offset is insufficient
+        // for addressing in extreme cases where we might underflow. Therefore we check to see if
+        // negativeCharacterOffset will underflow directly or after converting for 16 bit characters.
+        // If so, we do our own address calculating by adjusting the base, using the result register
+        // as a temp address register.
+        unsigned maximumNegativeOffsetForCharacterSize = m_charSize == Char8 ? 0x7fffffff : 0x3fffffff;
+        unsigned offsetAdjustAmount = 0x40000000;
+        if (negativeCharacterOffset.unsafeGet() > maximumNegativeOffsetForCharacterSize) {
+            base = tempReg;
+            move(input, base);
+            while (negativeCharacterOffset.unsafeGet() > maximumNegativeOffsetForCharacterSize) {
+                subPtr(TrustedImm32(offsetAdjustAmount), base);
+                if (m_charSize != Char8)
+                    subPtr(TrustedImm32(offsetAdjustAmount), base);
+                negativeCharacterOffset -= offsetAdjustAmount;
+            }
+        }
+
+        Checked<int32_t> characterOffset(-static_cast<int32_t>(negativeCharacterOffset.unsafeGet()));
+
+        if (m_charSize == Char8)
+            return BaseIndex(input, indexReg, TimesOne, (characterOffset * static_cast<int32_t>(sizeof(char))).unsafeGet());
+
+        return BaseIndex(input, indexReg, TimesTwo, (characterOffset * static_cast<int32_t>(sizeof(UChar))).unsafeGet());
+    }
+
+#ifdef JIT_UNICODE_EXPRESSIONS
+    void tryReadUnicodeCharImpl(RegisterID resultReg)
     {
-        return branch16(Equal, BaseIndex(input, index, TimesTwo, inputPosition * sizeof(UChar)), Imm32(ch));
+        ASSERT(m_charSize == Char16);
+
+        JumpList notUnicode;
+        load16Unaligned(regUnicodeInputAndTrail, resultReg);
+        and32(surrogateTagMask, resultReg, regT2);
+        notUnicode.append(branch32(NotEqual, regT2, leadingSurrogateTag));
+        addPtr(TrustedImm32(2), regUnicodeInputAndTrail);
+        getEffectiveAddress(BaseIndex(input, length, TimesTwo), regT2);
+        notUnicode.append(branch32(AboveOrEqual, regUnicodeInputAndTrail, regT2));
+        load16Unaligned(Address(regUnicodeInputAndTrail), regUnicodeInputAndTrail);
+        and32(surrogateTagMask, regUnicodeInputAndTrail, regT2);
+        notUnicode.append(branch32(NotEqual, regT2, trailingSurrogateTag));
+        sub32(leadingSurrogateTag, resultReg);
+        sub32(trailingSurrogateTag, regUnicodeInputAndTrail);
+        lshift32(TrustedImm32(10), resultReg);
+        or32(regUnicodeInputAndTrail, resultReg);
+        add32(supplementaryPlanesBase, resultReg);
+        notUnicode.link(this);
     }
 
-    Jump jumpIfCharNotEquals(UChar ch, int inputPosition)
+    void tryReadUnicodeChar(BaseIndex address, RegisterID resultReg)
     {
-        return branch16(NotEqual, BaseIndex(input, index, TimesTwo, inputPosition * sizeof(UChar)), Imm32(ch));
+        ASSERT(m_charSize == Char16);
+
+        getEffectiveAddress(address, regUnicodeInputAndTrail);
+
+        if (resultReg == regT0)
+            m_tryReadUnicodeCharacterCalls.append(nearCall());
+        else
+            tryReadUnicodeCharImpl(resultReg);
     }
+#endif
 
-    void readCharacter(int inputPosition, RegisterID reg)
+    void readCharacter(Checked<unsigned> negativeCharacterOffset, RegisterID resultReg, RegisterID indexReg = index)
     {
-        load16(BaseIndex(input, index, TimesTwo, inputPosition * sizeof(UChar)), reg);
+        BaseIndex address = negativeOffsetIndexedAddress(negativeCharacterOffset, resultReg, indexReg);
+
+        if (m_charSize == Char8)
+            load8(address, resultReg);
+#ifdef JIT_UNICODE_EXPRESSIONS
+        else if (m_decodeSurrogatePairs)
+            tryReadUnicodeChar(address, resultReg);
+#endif
+        else
+            load16Unaligned(address, resultReg);
     }
 
+    Jump jumpIfCharNotEquals(UChar32 ch, Checked<unsigned> negativeCharacterOffset, RegisterID character)
+    {
+        readCharacter(negativeCharacterOffset, character);
+
+        // For case-insesitive compares, non-ascii characters that have different
+        // upper & lower case representations are converted to a character class.
+        ASSERT(!m_pattern.ignoreCase() || isASCIIAlpha(ch) || isCanonicallyUnique(ch, m_canonicalMode));
+        if (m_pattern.ignoreCase() && isASCIIAlpha(ch)) {
+            or32(TrustedImm32(0x20), character);
+            ch |= 0x20;
+        }
+
+        return branch32(NotEqual, character, Imm32(ch));
+    }
+    
     void storeToFrame(RegisterID reg, unsigned frameLocation)
     {
         poke(reg, frameLocation);
@@ -280,9 +585,16 @@ class YarrGenerator : private MacroAssembler {
         poke(imm, frameLocation);
     }
 
+#if CPU(ARM64) || CPU(X86_64)
+    void storeToFrame(TrustedImmPtr imm, unsigned frameLocation)
+    {
+        poke(imm, frameLocation);
+    }
+#endif
+
     DataLabelPtr storeToFrameWithPatch(unsigned frameLocation)
     {
-        return storePtrWithPatch(TrustedImmPtr(0), Address(stackPointerRegister, frameLocation * sizeof(void*)));
+        return storePtrWithPatch(TrustedImmPtr(nullptr), Address(stackPointerRegister, frameLocation * sizeof(void*)));
     }
 
     void loadFromFrame(unsigned frameLocation, RegisterID reg)
@@ -292,1873 +604,2692 @@ class YarrGenerator : private MacroAssembler {
 
     void loadFromFrameAndJump(unsigned frameLocation)
     {
-        jump(Address(stackPointerRegister, frameLocation * sizeof(void*)));
+        jump(Address(stackPointerRegister, frameLocation * sizeof(void*)), ptrTag(YarrBacktrackPtrTag, &m_codeBlock));
     }
 
-    struct IndirectJumpEntry {
-        IndirectJumpEntry(int32_t stackOffset)
-            : m_stackOffset(stackOffset)
-        {
-        }
+    unsigned alignCallFrameSizeInBytes(unsigned callFrameSize)
+    {
+        if (!callFrameSize)
+            return 0;
+
+        callFrameSize *= sizeof(void*);
+        if (callFrameSize / sizeof(void*) != m_pattern.m_body->m_callFrameSize)
+            CRASH();
+        callFrameSize = (callFrameSize + 0x3f) & ~0x3f;
+        return callFrameSize;
+    }
+    void initCallFrame()
+    {
+        unsigned callFrameSizeInBytes = alignCallFrameSizeInBytes(m_pattern.m_body->m_callFrameSize);
+        if (callFrameSizeInBytes) {
+#if CPU(X86_64) || CPU(ARM64)
+            if (Options::zeroStackFrame()) {
+                // We need to start from the stack pointer, because we could have spilled callee saves
+                move(stackPointerRegister, regT0);
+                subPtr(Imm32(callFrameSizeInBytes), stackPointerRegister);
+                if (callFrameSizeInBytes <= 128) {
+                    for (unsigned offset = 0; offset < callFrameSizeInBytes; offset += sizeof(intptr_t))
+                        storePtr(TrustedImm32(0), Address(regT0, -8 - offset));
+                } else {
+                    Label zeroLoop = label();
+                    subPtr(TrustedImm32(sizeof(intptr_t) * 2), regT0);
+#if CPU(ARM64)
+                    storePair64(ARM64Registers::zr, ARM64Registers::zr, regT0);
+#else
+                    storePtr(TrustedImm32(0), Address(regT0));
+                    storePtr(TrustedImm32(0), Address(regT0, sizeof(intptr_t)));
+#endif
+                    branchPtr(NotEqual, regT0, stackPointerRegister).linkTo(zeroLoop, this);
+                }
+            } else
+#endif
+                subPtr(Imm32(callFrameSizeInBytes), stackPointerRegister);
 
-        IndirectJumpEntry(int32_t stackOffset, Jump jump)
-            : m_stackOffset(stackOffset)
-        {
-            addJump(jump);
         }
+    }
+    void removeCallFrame()
+    {
+        unsigned callFrameSizeInBytes = alignCallFrameSizeInBytes(m_pattern.m_body->m_callFrameSize);
+        if (callFrameSizeInBytes)
+            addPtr(Imm32(callFrameSizeInBytes), stackPointerRegister);
+    }
 
-        IndirectJumpEntry(int32_t stackOffset, DataLabelPtr dataLabel)
-        : m_stackOffset(stackOffset)
-        {
-            addDataLabel(dataLabel);
-        }
+    void generateFailReturn()
+    {
+        move(TrustedImmPtr((void*)WTF::notFound), returnRegister);
+        move(TrustedImm32(0), returnRegister2);
+        generateReturn();
+    }
 
-        void addJump(Jump jump)
-        {
-            m_relJumps.append(jump);
+    void generateJITFailReturn()
+    {
+        if (m_abortExecution.empty() && m_hitMatchLimit.empty())
+            return;
+
+        JumpList finishExiting;
+        if (!m_abortExecution.empty()) {
+            m_abortExecution.link(this);
+            move(TrustedImmPtr((void*)static_cast<size_t>(-2)), returnRegister);
+            finishExiting.append(jump());
         }
-        
-        void addDataLabel(DataLabelPtr dataLabel)
-        {
-            m_dataLabelPtrVector.append(dataLabel);
+
+        if (!m_hitMatchLimit.empty()) {
+            m_hitMatchLimit.link(this);
+            move(TrustedImmPtr((void*)static_cast<size_t>(-1)), returnRegister);
         }
 
-        int32_t m_stackOffset;
-        JumpList m_relJumps;
-        Vector<DataLabelPtr, 16> m_dataLabelPtrVector;
-    };
+        finishExiting.link(this);
+        removeCallFrame();
+        move(TrustedImm32(0), returnRegister2);
+        generateReturn();
+    }
 
-    struct AlternativeBacktrackRecord {
-        DataLabelPtr dataLabel;
-        Label backtrackLocation;
+    // Used to record subpatterns, should only be called if compileMode is IncludeSubpatterns.
+    void setSubpatternStart(RegisterID reg, unsigned subpattern)
+    {
+        ASSERT(subpattern);
+        // FIXME: should be able to ASSERT(compileMode == IncludeSubpatterns), but then this function is conditionally NORETURN. :-(
+        store32(reg, Address(output, (subpattern << 1) * sizeof(int)));
+    }
+    void setSubpatternEnd(RegisterID reg, unsigned subpattern)
+    {
+        ASSERT(subpattern);
+        // FIXME: should be able to ASSERT(compileMode == IncludeSubpatterns), but then this function is conditionally NORETURN. :-(
+        store32(reg, Address(output, ((subpattern << 1) + 1) * sizeof(int)));
+    }
+    void clearSubpatternStart(unsigned subpattern)
+    {
+        ASSERT(subpattern);
+        // FIXME: should be able to ASSERT(compileMode == IncludeSubpatterns), but then this function is conditionally NORETURN. :-(
+        store32(TrustedImm32(-1), Address(output, (subpattern << 1) * sizeof(int)));
+    }
 
-        AlternativeBacktrackRecord(DataLabelPtr dataLabel, Label backtrackLocation)
-            : dataLabel(dataLabel)
-            , backtrackLocation(backtrackLocation)
-        {
-        }
-    };
+    void clearMatches(unsigned subpattern, unsigned lastSubpattern)
+    {
+        for (; subpattern <= lastSubpattern; subpattern++)
+            clearSubpatternStart(subpattern);
+    }
 
-    struct ParenthesesTail;
-    struct TermGenerationState;
+    // We use one of three different strategies to track the start of the current match,
+    // while matching.
+    // 1) If the pattern has a fixed size, do nothing! - we calculate the value lazily
+    //    at the end of matching. This is irrespective of compileMode, and in this case
+    //    these methods should never be called.
+    // 2) If we're compiling IncludeSubpatterns, 'output' contains a pointer to an output
+    //    vector, store the match start in the output vector.
+    // 3) If we're compiling MatchOnly, 'output' is unused, store the match start directly
+    //    in this register.
+    void setMatchStart(RegisterID reg)
+    {
+        ASSERT(!m_pattern.m_body->m_hasFixedSize);
+        if (compileMode == IncludeSubpatterns)
+            store32(reg, output);
+        else
+            move(reg, output);
+    }
+    void getMatchStart(RegisterID reg)
+    {
+        ASSERT(!m_pattern.m_body->m_hasFixedSize);
+        if (compileMode == IncludeSubpatterns)
+            load32(output, reg);
+        else
+            move(output, reg);
+    }
 
-    struct GenerationState {
-        typedef HashMap<int, IndirectJumpEntry*, WTF::IntHash<uint32_t>, UnsignedWithZeroKeyHashTraits<uint32_t> > IndirectJumpHashMap;
+    enum YarrOpCode {
+        // These nodes wrap body alternatives - those in the main disjunction,
+        // rather than subpatterns or assertions. These are chained together in
+        // a doubly linked list, with a 'begin' node for the first alternative,
+        // a 'next' node for each subsequent alternative, and an 'end' node at
+        // the end. In the case of repeating alternatives, the 'end' node also
+        // has a reference back to 'begin'.
+        OpBodyAlternativeBegin,
+        OpBodyAlternativeNext,
+        OpBodyAlternativeEnd,
+        // Similar to the body alternatives, but used for subpatterns with two
+        // or more alternatives.
+        OpNestedAlternativeBegin,
+        OpNestedAlternativeNext,
+        OpNestedAlternativeEnd,
+        // Used for alternatives in subpatterns where there is only a single
+        // alternative (backtracking is easier in these cases), or for alternatives
+        // which never need to be backtracked (those in parenthetical assertions,
+        // terminal subpatterns).
+        OpSimpleNestedAlternativeBegin,
+        OpSimpleNestedAlternativeNext,
+        OpSimpleNestedAlternativeEnd,
+        // Used to wrap 'Once' subpattern matches (quantityMaxCount == 1).
+        OpParenthesesSubpatternOnceBegin,
+        OpParenthesesSubpatternOnceEnd,
+        // Used to wrap 'Terminal' subpattern matches (at the end of the regexp).
+        OpParenthesesSubpatternTerminalBegin,
+        OpParenthesesSubpatternTerminalEnd,
+        // Used to wrap generic captured matches
+        OpParenthesesSubpatternBegin,
+        OpParenthesesSubpatternEnd,
+        // Used to wrap parenthetical assertions.
+        OpParentheticalAssertionBegin,
+        OpParentheticalAssertionEnd,
+        // Wraps all simple terms (pattern characters, character classes).
+        OpTerm,
+        // Where an expression contains only 'once through' body alternatives
+        // and no repeating ones, this op is used to return match failure.
+        OpMatchFailed
+    };
 
-        GenerationState()
-            : m_parenNestingLevel(0)
+    // This structure is used to hold the compiled opcode information,
+    // including reference back to the original PatternTerm/PatternAlternatives,
+    // and JIT compilation data structures.
+    struct YarrOp {
+        explicit YarrOp(PatternTerm* term)
+            : m_op(OpTerm)
+            , m_term(term)
+            , m_isDeadCode(false)
         {
         }
 
-        void addIndirectJumpEntry(int32_t stackOffset, Jump jump)
+        explicit YarrOp(YarrOpCode op)
+            : m_op(op)
+            , m_isDeadCode(false)
         {
-            IndirectJumpHashMap::iterator result = m_indirectJumpMap.find(stackOffset);
-
-            ASSERT(stackOffset >= 0);
+        }
 
-            uint32_t offset = static_cast<uint32_t>(stackOffset);
+        // The operation, as a YarrOpCode, and also a reference to the PatternTerm.
+        YarrOpCode m_op;
+        PatternTerm* m_term;
 
-            if (result == m_indirectJumpMap.end())
-                m_indirectJumpMap.add(offset, new IndirectJumpEntry(stackOffset, jump));
-            else
-                result->second->addJump(jump);
-        }
+        // For alternatives, this holds the PatternAlternative and doubly linked
+        // references to this alternative's siblings. In the case of the
+        // OpBodyAlternativeEnd node at the end of a section of repeating nodes,
+        // m_nextOp will reference the OpBodyAlternativeBegin node of the first
+        // repeating alternative.
+        PatternAlternative* m_alternative;
+        size_t m_previousOp;
+        size_t m_nextOp;
 
-        void addIndirectJumpEntry(int32_t stackOffset, JumpList jumps)
-        {
-            JumpList::JumpVector jumpVector = jumps.jumps();
-            size_t size = jumpVector.size();
-            for (size_t i = 0; i < size; ++i)
-                addIndirectJumpEntry(stackOffset, jumpVector[i]);
+        // Used to record a set of Jumps out of the generated code, typically
+        // used for jumps out to backtracking code, and a single reentry back
+        // into the code for a node (likely where a backtrack will trigger
+        // rematching).
+        Label m_reentry;
+        JumpList m_jumps;
 
-            jumps.empty();
-        }
+        // Used for backtracking when the prior alternative did not consume any
+        // characters but matched.
+        Jump m_zeroLengthMatch;
 
-        void addIndirectJumpEntry(int32_t stackOffset, DataLabelPtr dataLabel)
-        {
-            IndirectJumpHashMap::iterator result = m_indirectJumpMap.find(stackOffset);
+        // This flag is used to null out the second pattern character, when
+        // two are fused to match a pair together.
+        bool m_isDeadCode;
 
-            ASSERT(stackOffset >= 0);
+        // Currently used in the case of some of the more complex management of
+        // 'm_checkedOffset', to cache the offset used in this alternative, to avoid
+        // recalculating it.
+        Checked<unsigned> m_checkAdjust;
 
-            uint32_t offset = static_cast<uint32_t>(stackOffset);
+        // Used by OpNestedAlternativeNext/End to hold the pointer to the
+        // value that will be pushed into the pattern's frame to return to,
+        // upon backtracking back into the disjunction.
+        DataLabelPtr m_returnAddress;
+    };
 
-            if (result == m_indirectJumpMap.end())
-                m_indirectJumpMap.add(offset, new IndirectJumpEntry(stackOffset, dataLabel));
-            else
-                result->second->addDataLabel(dataLabel);
+    // BacktrackingState
+    // This class encapsulates information about the state of code generation
+    // whilst generating the code for backtracking, when a term fails to match.
+    // Upon entry to code generation of the backtracking code for a given node,
+    // the Backtracking state will hold references to all control flow sources
+    // that are outputs in need of further backtracking from the prior node
+    // generated (which is the subsequent operation in the regular expression,
+    // and in the m_ops Vector, since we generated backtracking backwards).
+    // These references to control flow take the form of:
+    //  - A jump list of jumps, to be linked to code that will backtrack them
+    //    further.
+    //  - A set of DataLabelPtr values, to be populated with values to be
+    //    treated effectively as return addresses backtracking into complex
+    //    subpatterns.
+    //  - A flag indicating that the current sequence of generated code up to
+    //    this point requires backtracking.
+    class BacktrackingState {
+    public:
+        BacktrackingState()
+            : m_pendingFallthrough(false)
+        {
         }
 
-        void emitIndirectJumpTable(MacroAssembler* masm)
+        // Add a jump or jumps, a return address, or set the flag indicating
+        // that the current 'fallthrough' control flow requires backtracking.
+        void append(const Jump& jump)
         {
-            for (IndirectJumpHashMap::iterator iter = m_indirectJumpMap.begin(); iter != m_indirectJumpMap.end(); ++iter) {
-                IndirectJumpEntry* indJumpEntry = iter->second;
-                size_t size = indJumpEntry->m_dataLabelPtrVector.size();
-                if (size) {
-                    // Link any associated DataLabelPtr's with indirect jump via label
-                    Label hereLabel = masm->label();
-                    for (size_t i = 0; i < size; ++i)
-                        m_backtrackRecords.append(AlternativeBacktrackRecord(indJumpEntry->m_dataLabelPtrVector[i], hereLabel));
-                }
-                indJumpEntry->m_relJumps.link(masm);
-                masm->jump(Address(stackPointerRegister, indJumpEntry->m_stackOffset));
-                delete indJumpEntry;
-            }
+            m_laterFailures.append(jump);
         }
-
-        void incrementParenNestingLevel()
+        void append(JumpList& jumpList)
         {
-            ++m_parenNestingLevel;
+            m_laterFailures.append(jumpList);
         }
-
-        void decrementParenNestingLevel()
+        void append(const DataLabelPtr& returnAddress)
         {
-            --m_parenNestingLevel;
+            m_pendingReturns.append(returnAddress);
         }
-
-        ParenthesesTail* addParenthesesTail(PatternTerm& term, JumpList* jumpListToPriorParen)
+        void fallthrough()
         {
-            OwnPtr<ParenthesesTail> tail = adoptPtr(new ParenthesesTail(term, m_parenNestingLevel, jumpListToPriorParen));
-            ParenthesesTail* rawTail = tail.get();
-
-            m_parenTails.append(tail.release());
-            m_parenTailsForIteration.append(rawTail);
-
-            return rawTail;
+            ASSERT(!m_pendingFallthrough);
+            m_pendingFallthrough = true;
         }
 
-        void emitParenthesesTail(YarrGenerator* generator)
+        // These methods clear the backtracking state, either linking to the
+        // current location, a provided label, or copying the backtracking out
+        // to a JumpList. All actions may require code generation to take place,
+        // and as such are passed a pointer to the assembler.
+        void link(MacroAssembler* assembler)
         {
-            unsigned vectorSize = m_parenTails.size();
-            bool priorBacktrackFallThrough = false;
-
-            // Emit in reverse order so parentTail N can fall through to N-1
-            for (unsigned index = vectorSize; index > 0; --index) {
-                JumpList jumpsToNext;
-                priorBacktrackFallThrough = m_parenTails[index-1].get()->generateCode(generator, jumpsToNext, priorBacktrackFallThrough, index > 1);
-                if (index > 1)
-                    jumpsToNext.linkTo(generator->label(), generator);
-                else
-                    addJumpsToNextInteration(jumpsToNext);
+            if (m_pendingReturns.size()) {
+                Label here(assembler);
+                for (unsigned i = 0; i < m_pendingReturns.size(); ++i)
+                    m_backtrackRecords.append(ReturnAddressRecord(m_pendingReturns[i], here));
+                m_pendingReturns.clear();
             }
-            m_parenTails.clear();
+            m_laterFailures.link(assembler);
+            m_laterFailures.clear();
+            m_pendingFallthrough = false;
         }
-
-        void addJumpToNextInteration(Jump jump)
+        void linkTo(Label label, MacroAssembler* assembler)
         {
-            m_jumpsToNextInteration.append(jump);
+            if (m_pendingReturns.size()) {
+                for (unsigned i = 0; i < m_pendingReturns.size(); ++i)
+                    m_backtrackRecords.append(ReturnAddressRecord(m_pendingReturns[i], label));
+                m_pendingReturns.clear();
+            }
+            if (m_pendingFallthrough)
+                assembler->jump(label);
+            m_laterFailures.linkTo(label, assembler);
+            m_laterFailures.clear();
+            m_pendingFallthrough = false;
+        }
+        void takeBacktracksToJumpList(JumpList& jumpList, MacroAssembler* assembler)
+        {
+            if (m_pendingReturns.size()) {
+                Label here(assembler);
+                for (unsigned i = 0; i < m_pendingReturns.size(); ++i)
+                    m_backtrackRecords.append(ReturnAddressRecord(m_pendingReturns[i], here));
+                m_pendingReturns.clear();
+                m_pendingFallthrough = true;
+            }
+            if (m_pendingFallthrough)
+                jumpList.append(assembler->jump());
+            jumpList.append(m_laterFailures);
+            m_laterFailures.clear();
+            m_pendingFallthrough = false;
         }
 
-        void addJumpsToNextInteration(JumpList jumps)
+        bool isEmpty()
         {
-            m_jumpsToNextInteration.append(jumps);
+            return m_laterFailures.empty() && m_pendingReturns.isEmpty() && !m_pendingFallthrough;
         }
 
-        void addDataLabelToNextIteration(DataLabelPtr dataLabel)
+        // Called at the end of code generation to link all return addresses.
+        void linkDataLabels(LinkBuffer& linkBuffer, YarrCodeBlock& codeBlock)
         {
-            m_dataPtrsToNextIteration.append(dataLabel);
+            ASSERT(isEmpty());
+            for (unsigned i = 0; i < m_backtrackRecords.size(); ++i)
+                linkBuffer.patch(m_backtrackRecords[i].m_dataLabel, linkBuffer.locationOf(m_backtrackRecords[i].m_backtrackLocation, ptrTag(YarrBacktrackPtrTag, &codeBlock)));
         }
 
-        void linkToNextIteration(Label label)
-        {
-            m_nextIteration = label;
+    private:
+        struct ReturnAddressRecord {
+            ReturnAddressRecord(DataLabelPtr dataLabel, Label backtrackLocation)
+                : m_dataLabel(dataLabel)
+                , m_backtrackLocation(backtrackLocation)
+            {
+            }
 
-            for (unsigned i = 0; i < m_dataPtrsToNextIteration.size(); ++i)
-                m_backtrackRecords.append(AlternativeBacktrackRecord(m_dataPtrsToNextIteration[i], m_nextIteration));
+            DataLabelPtr m_dataLabel;
+            Label m_backtrackLocation;
+        };
 
-            m_dataPtrsToNextIteration.clear();
+        JumpList m_laterFailures;
+        bool m_pendingFallthrough;
+        Vector<DataLabelPtr, 4> m_pendingReturns;
+        Vector<ReturnAddressRecord, 4> m_backtrackRecords;
+    };
 
-            for (unsigned i = 0; i < m_parenTailsForIteration.size(); ++i)
-                m_parenTailsForIteration[i]->setNextIteration(m_nextIteration);
+    // Generation methods:
+    // ===================
 
-            m_parenTailsForIteration.clear();
-        }
+    // This method provides a default implementation of backtracking common
+    // to many terms; terms commonly jump out of the forwards  matching path
+    // on any failed conditions, and add these jumps to the m_jumps list. If
+    // no special handling is required we can often just backtrack to m_jumps.
+    void backtrackTermDefault(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        m_backtrackingState.append(op.m_jumps);
+    }
 
-        void linkToNextIteration(YarrGenerator* generator)
-        {
-            m_jumpsToNextInteration.linkTo(m_nextIteration, generator);
-        }
+    void generateAssertionBOL(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        int m_parenNestingLevel;
-        Vector<AlternativeBacktrackRecord> m_backtrackRecords;
-        IndirectJumpHashMap m_indirectJumpMap;
-        Label m_nextIteration;
-        Vector<OwnPtr<ParenthesesTail> > m_parenTails;
-        JumpList m_jumpsToNextInteration;
-        Vector<DataLabelPtr> m_dataPtrsToNextIteration;
-        Vector<ParenthesesTail*> m_parenTailsForIteration;
-    };
+        if (m_pattern.multiline()) {
+            const RegisterID character = regT0;
 
-    struct BacktrackDestination {
-        typedef enum {
-            NoBacktrack,
-            BacktrackLabel,
-            BacktrackStackOffset,
-            BacktrackJumpList,
-            BacktrackLinked
-        } BacktrackType;
-
-        BacktrackDestination()
-            : m_backtrackType(NoBacktrack)
-            , m_backtrackToLabel(0)
-            , m_subDataLabelPtr(0)
-            , m_nextBacktrack(0)
-            , m_backtrackSourceLabel(0)
-            , m_backtrackSourceJumps(0)
-        {
-        }
+            JumpList matchDest;
+            if (!term->inputPosition)
+                matchDest.append(branch32(Equal, index, Imm32(m_checkedOffset.unsafeGet())));
 
-        BacktrackDestination(int32_t stackOffset)
-            : m_backtrackType(BacktrackStackOffset)
-            , m_backtrackStackOffset(stackOffset)
-            , m_backtrackToLabel(0)
-            , m_subDataLabelPtr(0)
-            , m_nextBacktrack(0)
-            , m_backtrackSourceLabel(0)
-            , m_backtrackSourceJumps(0)
-        {
-        }
+            readCharacter(m_checkedOffset - term->inputPosition + 1, character);
+            matchCharacterClass(character, matchDest, m_pattern.newlineCharacterClass());
+            op.m_jumps.append(jump());
 
-        BacktrackDestination(Label label)
-            : m_backtrackType(BacktrackLabel)
-            , m_backtrackLabel(label)
-            , m_backtrackToLabel(0)
-            , m_subDataLabelPtr(0)
-            , m_nextBacktrack(0)
-            , m_backtrackSourceLabel(0)
-            , m_backtrackSourceJumps(0)
-        {
+            matchDest.link(this);
+        } else {
+            // Erk, really should poison out these alternatives early. :-/
+            if (term->inputPosition)
+                op.m_jumps.append(jump());
+            else
+                op.m_jumps.append(branch32(NotEqual, index, Imm32(m_checkedOffset.unsafeGet())));
         }
+    }
+    void backtrackAssertionBOL(size_t opIndex)
+    {
+        backtrackTermDefault(opIndex);
+    }
 
-        void clear(bool doDataLabelClear = true)
-        {
-            m_backtrackType = NoBacktrack;
-            if (doDataLabelClear)
-                clearDataLabel();
-            m_nextBacktrack = 0;
-        }
+    void generateAssertionEOL(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        void clearDataLabel()
-        {
-            m_dataLabelPtr = DataLabelPtr();
-        }
+        if (m_pattern.multiline()) {
+            const RegisterID character = regT0;
 
-        bool hasDestination()
-        {
-            return (m_backtrackType != NoBacktrack);
-        }
+            JumpList matchDest;
+            if (term->inputPosition == m_checkedOffset.unsafeGet())
+                matchDest.append(atEndOfInput());
 
-        bool isStackOffset()
-        {
-            return (m_backtrackType == BacktrackStackOffset);
-        }
+            readCharacter(m_checkedOffset - term->inputPosition, character);
+            matchCharacterClass(character, matchDest, m_pattern.newlineCharacterClass());
+            op.m_jumps.append(jump());
 
-        bool isLabel()
-        {
-            return (m_backtrackType == BacktrackLabel);
+            matchDest.link(this);
+        } else {
+            if (term->inputPosition == m_checkedOffset.unsafeGet())
+                op.m_jumps.append(notAtEndOfInput());
+            // Erk, really should poison out these alternatives early. :-/
+            else
+                op.m_jumps.append(jump());
         }
+    }
+    void backtrackAssertionEOL(size_t opIndex)
+    {
+        backtrackTermDefault(opIndex);
+    }
 
-        bool isJumpList()
-        {
-            return (m_backtrackType == BacktrackJumpList);
-        }
+    // Also falls though on nextIsNotWordChar.
+    void matchAssertionWordchar(size_t opIndex, JumpList& nextIsWordChar, JumpList& nextIsNotWordChar)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        bool hasDataLabel()
-        {
-            return m_dataLabelPtr.isSet();
-        }
+        const RegisterID character = regT0;
 
-        void copyTarget(BacktrackDestination& rhs, bool copyDataLabel = true)
-        {
-            m_backtrackType = rhs.m_backtrackType;
-            if (m_backtrackType == BacktrackStackOffset)
-                m_backtrackStackOffset = rhs.m_backtrackStackOffset;
-            else if (m_backtrackType == BacktrackLabel)
-                m_backtrackLabel = rhs.m_backtrackLabel;
-            if (copyDataLabel)
-                m_dataLabelPtr = rhs.m_dataLabelPtr;
-            m_backtrackSourceJumps = rhs.m_backtrackSourceJumps;
-            m_backtrackSourceLabel = rhs.m_backtrackSourceLabel;
-        }
-
-        void copyTo(BacktrackDestination& lhs)
-        {
-            lhs.m_backtrackType = m_backtrackType;
-            if (m_backtrackType == BacktrackStackOffset)
-                lhs.m_backtrackStackOffset = m_backtrackStackOffset;
-            else if (m_backtrackType == BacktrackLabel)
-                lhs.m_backtrackLabel = m_backtrackLabel;
-            lhs.m_backtrackSourceJumps = m_backtrackSourceJumps;
-            lhs.m_backtrackSourceLabel = m_backtrackSourceLabel;
-            lhs.m_dataLabelPtr = m_dataLabelPtr;
-            lhs.m_backTrackJumps = m_backTrackJumps;
-        }
-
-        void addBacktrackJump(Jump jump)
-        {
-            m_backTrackJumps.append(jump);
-        }
+        if (term->inputPosition == m_checkedOffset.unsafeGet())
+            nextIsNotWordChar.append(atEndOfInput());
 
-        void setStackOffset(int32_t stackOffset)
-        {
-            m_backtrackType = BacktrackStackOffset;
-            m_backtrackStackOffset = stackOffset;
-        }
+        readCharacter(m_checkedOffset - term->inputPosition, character);
 
-        void setLabel(Label label)
-        {
-            m_backtrackType = BacktrackLabel;
-            m_backtrackLabel = label;
-        }
+        CharacterClass* wordcharCharacterClass;
 
-        void setNextBacktrackLabel(Label label)
-        {
-            if (m_nextBacktrack)
-                m_nextBacktrack->setLabel(label);
-        }
+        if (m_unicodeIgnoreCase)
+            wordcharCharacterClass = m_pattern.wordUnicodeIgnoreCaseCharCharacterClass();
+        else
+            wordcharCharacterClass = m_pattern.wordcharCharacterClass();
 
-        void propagateBacktrackToLabel(const BacktrackDestination& rhs)
-        {
-            if (!m_backtrackToLabel && rhs.m_backtrackToLabel)
-                m_backtrackToLabel = rhs.m_backtrackToLabel;
-        }
+        matchCharacterClass(character, nextIsWordChar, wordcharCharacterClass);
+    }
 
-        void setBacktrackToLabel(Label* backtrackToLabel)
-        {
-            if (!m_backtrackToLabel)
-                m_backtrackToLabel = backtrackToLabel;
-        }
+    void generateAssertionWordBoundary(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        bool hasBacktrackToLabel()
-        {
-            return m_backtrackToLabel;
-        }
+        const RegisterID character = regT0;
 
-        void setBacktrackJumpList(JumpList* jumpList)
-        {
-            m_backtrackType = BacktrackJumpList;
-            m_backtrackSourceJumps = jumpList;
-        }
+        Jump atBegin;
+        JumpList matchDest;
+        if (!term->inputPosition)
+            atBegin = branch32(Equal, index, Imm32(m_checkedOffset.unsafeGet()));
+        readCharacter(m_checkedOffset - term->inputPosition + 1, character);
 
-        void setBacktrackSourceLabel(Label* backtrackSourceLabel)
-        {
-            m_backtrackSourceLabel = backtrackSourceLabel;
-        }
+        CharacterClass* wordcharCharacterClass;
 
-        void setDataLabel(DataLabelPtr dp)
-        {
-            if (m_subDataLabelPtr) {
-                *m_subDataLabelPtr = dp;
-                m_subDataLabelPtr = 0;
-            } else {
-                ASSERT(!hasDataLabel());
-                m_dataLabelPtr = dp;
-            }
-        }
+        if (m_unicodeIgnoreCase)
+            wordcharCharacterClass = m_pattern.wordUnicodeIgnoreCaseCharCharacterClass();
+        else
+            wordcharCharacterClass = m_pattern.wordcharCharacterClass();
 
-        void clearSubDataLabelPtr()
-        {
-            m_subDataLabelPtr = 0;
-        }
+        matchCharacterClass(character, matchDest, wordcharCharacterClass);
+        if (!term->inputPosition)
+            atBegin.link(this);
 
-        void setSubDataLabelPtr(DataLabelPtr* subDataLabelPtr)
-        {
-            m_subDataLabelPtr = subDataLabelPtr;
+        // We fall through to here if the last character was not a wordchar.
+        JumpList nonWordCharThenWordChar;
+        JumpList nonWordCharThenNonWordChar;
+        if (term->invert()) {
+            matchAssertionWordchar(opIndex, nonWordCharThenNonWordChar, nonWordCharThenWordChar);
+            nonWordCharThenWordChar.append(jump());
+        } else {
+            matchAssertionWordchar(opIndex, nonWordCharThenWordChar, nonWordCharThenNonWordChar);
+            nonWordCharThenNonWordChar.append(jump());
         }
+        op.m_jumps.append(nonWordCharThenNonWordChar);
 
-        void linkToNextBacktrack(BacktrackDestination* nextBacktrack)
-        {
-            m_nextBacktrack = nextBacktrack;
+        // We jump here if the last character was a wordchar.
+        matchDest.link(this);
+        JumpList wordCharThenWordChar;
+        JumpList wordCharThenNonWordChar;
+        if (term->invert()) {
+            matchAssertionWordchar(opIndex, wordCharThenNonWordChar, wordCharThenWordChar);
+            wordCharThenWordChar.append(jump());
+        } else {
+            matchAssertionWordchar(opIndex, wordCharThenWordChar, wordCharThenNonWordChar);
+            // This can fall-though!
         }
 
-        int32_t getStackOffset()
-        {
-            ASSERT(m_backtrackType == BacktrackStackOffset);
-            return m_backtrackStackOffset;
-        }
+        op.m_jumps.append(wordCharThenWordChar);
 
-        Label getLabel()
-        {
-            ASSERT(m_backtrackType == BacktrackLabel);
-            return m_backtrackLabel;
-        }
+        nonWordCharThenWordChar.link(this);
+        wordCharThenNonWordChar.link(this);
+    }
+    void backtrackAssertionWordBoundary(size_t opIndex)
+    {
+        backtrackTermDefault(opIndex);
+    }
 
-        JumpList& getBacktrackJumps()
-        {
-            return m_backTrackJumps;
-        }
+    void generatePatternCharacterOnce(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
 
-        DataLabelPtr& getDataLabel()
-        {
-            return m_dataLabelPtr;
-        }
+        if (op.m_isDeadCode)
+            return;
+        
+        // m_ops always ends with a OpBodyAlternativeEnd or OpMatchFailed
+        // node, so there must always be at least one more node.
+        ASSERT(opIndex + 1 < m_ops.size());
+        YarrOp* nextOp = &m_ops[opIndex + 1];
 
-        void jumpToBacktrack(MacroAssembler* masm)
-        {
-            if (isJumpList()) {
-                if (m_backtrackSourceLabel && (m_backtrackSourceLabel->isSet()))
-                    masm->jump().linkTo(*m_backtrackSourceLabel, masm);
-                else
-                    m_backtrackSourceJumps->append(masm->jump());
-            } else if (isStackOffset())
-                masm->jump(Address(stackPointerRegister, m_backtrackStackOffset));
-            else if (isLabel())
-                masm->jump().linkTo(m_backtrackLabel, masm);
-            else
-                m_backTrackJumps.append(masm->jump());
-        }
+        PatternTerm* term = op.m_term;
+        UChar32 ch = term->patternCharacter;
 
-        void jumpToBacktrack(YarrGenerator* generator, Jump jump)
-        {
-            if (isJumpList()) {
-                if (m_backtrackSourceLabel && (m_backtrackSourceLabel->isSet()))
-                    jump.linkTo(*m_backtrackSourceLabel, generator);
-                else
-                    m_backtrackSourceJumps->append(jump);
-            } else if (isStackOffset())
-                generator->m_expressionState.addIndirectJumpEntry(getStackOffset(), jump);
-            else if (isLabel())
-                jump.linkTo(getLabel(), generator);
-            else
-                m_backTrackJumps.append(jump);
+        if ((ch > 0xff) && (m_charSize == Char8)) {
+            // Have a 16 bit pattern character and an 8 bit string - short circuit
+            op.m_jumps.append(jump());
+            return;
         }
 
-        void jumpToBacktrack(YarrGenerator* generator, JumpList& jumps)
-        {
-            if (isJumpList()) {
-                if (m_backtrackSourceLabel && (m_backtrackSourceLabel->isSet()))
-                    jumps.linkTo(*m_backtrackSourceLabel, generator);
-                else
-                    m_backtrackSourceJumps->append(jumps);
-            } else if (isStackOffset())
-                generator->m_expressionState.addIndirectJumpEntry(getStackOffset(), jumps);
-            else if (isLabel())
-                jumps.linkTo(getLabel(), generator);
-            else
-                m_backTrackJumps.append(jumps);
-        }
+        const RegisterID character = regT0;
+        unsigned maxCharactersAtOnce = m_charSize == Char8 ? 4 : 2;
+        unsigned ignoreCaseMask = 0;
+#if CPU(BIG_ENDIAN)
+        int allCharacters = ch << (m_charSize == Char8 ? 24 : 16);
+#else
+        int allCharacters = ch;
+#endif
+        unsigned numberCharacters;
+        unsigned startTermPosition = term->inputPosition;
+
+        // For case-insesitive compares, non-ascii characters that have different
+        // upper & lower case representations are converted to a character class.
+        ASSERT(!m_pattern.ignoreCase() || isASCIIAlpha(ch) || isCanonicallyUnique(ch, m_canonicalMode));
+
+        if (m_pattern.ignoreCase() && isASCIIAlpha(ch))
+#if CPU(BIG_ENDIAN)
+            ignoreCaseMask |= 32 << (m_charSize == Char8 ? 24 : 16);
+#else
+            ignoreCaseMask |= 32;
+#endif
 
-        bool plantJumpToBacktrackIfExists(YarrGenerator* generator)
-        {
-            if (isJumpList()) {
-                if (m_backtrackSourceLabel && (m_backtrackSourceLabel->isSet()))
-                    generator->jump(*m_backtrackSourceLabel);
-                else
-                    m_backtrackSourceJumps->append(generator->jump());
+        for (numberCharacters = 1; numberCharacters < maxCharactersAtOnce && nextOp->m_op == OpTerm; ++numberCharacters, nextOp = &m_ops[opIndex + numberCharacters]) {
+            PatternTerm* nextTerm = nextOp->m_term;
+            
+            if (nextTerm->type != PatternTerm::TypePatternCharacter
+                || nextTerm->quantityType != QuantifierFixedCount
+                || nextTerm->quantityMaxCount != 1
+                || nextTerm->inputPosition != (startTermPosition + numberCharacters)
+                || (U16_LENGTH(nextTerm->patternCharacter) != 1 && m_decodeSurrogatePairs))
+                break;
 
-                return true;
-            }
+            nextOp->m_isDeadCode = true;
 
-            if (isStackOffset()) {
-                generator->jump(Address(stackPointerRegister, getStackOffset()));
-                return true;
-            }
+#if CPU(BIG_ENDIAN)
+            int shiftAmount = (m_charSize == Char8 ? 24 : 16) - ((m_charSize == Char8 ? 8 : 16) * numberCharacters);
+#else
+            int shiftAmount = (m_charSize == Char8 ? 8 : 16) * numberCharacters;
+#endif
 
-            if (isLabel()) {
-                generator->jump(getLabel());
-                if (hasDataLabel()) {
-                    generator->m_expressionState.m_backtrackRecords.append(AlternativeBacktrackRecord(getDataLabel(), getLabel()));
-                    clearDataLabel();
-                }
-                return true;
+            UChar32 currentCharacter = nextTerm->patternCharacter;
+
+            if ((currentCharacter > 0xff) && (m_charSize == Char8)) {
+                // Have a 16 bit pattern character and an 8 bit string - short circuit
+                op.m_jumps.append(jump());
+                return;
             }
 
-            return false;
-        }
+            // For case-insesitive compares, non-ascii characters that have different
+            // upper & lower case representations are converted to a character class.
+            ASSERT(!m_pattern.ignoreCase() || isASCIIAlpha(currentCharacter) || isCanonicallyUnique(currentCharacter, m_canonicalMode));
 
-        void linkBacktrackToLabel(Label backtrackLabel)
-        {
-            if (m_backtrackToLabel)
-                *m_backtrackToLabel = backtrackLabel;
-        }
+            allCharacters |= (currentCharacter << shiftAmount);
 
-        void linkAlternativeBacktracks(YarrGenerator* generator, bool nextIteration = false)
-        {
-            Label hereLabel = generator->label();
+            if ((m_pattern.ignoreCase()) && (isASCIIAlpha(currentCharacter)))
+                ignoreCaseMask |= 32 << shiftAmount;                    
+        }
 
-            if (m_backtrackToLabel) {
-                *m_backtrackToLabel = hereLabel;
-                m_backtrackToLabel = 0;
+        if (m_charSize == Char8) {
+            switch (numberCharacters) {
+            case 1:
+                op.m_jumps.append(jumpIfCharNotEquals(ch, m_checkedOffset - startTermPosition, character));
+                return;
+            case 2: {
+                load16Unaligned(negativeOffsetIndexedAddress(m_checkedOffset - startTermPosition, character), character);
+                break;
             }
+            case 3: {
+                load16Unaligned(negativeOffsetIndexedAddress(m_checkedOffset - startTermPosition, character), character);
+                if (ignoreCaseMask)
+                    or32(Imm32(ignoreCaseMask), character);
+                op.m_jumps.append(branch32(NotEqual, character, Imm32((allCharacters & 0xffff) | ignoreCaseMask)));
+                op.m_jumps.append(jumpIfCharNotEquals(allCharacters >> 16, m_checkedOffset - startTermPosition - 2, character));
+                return;
+            }
+            case 4: {
+                load32WithUnalignedHalfWords(negativeOffsetIndexedAddress(m_checkedOffset- startTermPosition, character), character);
+                break;
+            }
+            }
+        } else {
+            switch (numberCharacters) {
+            case 1:
+                op.m_jumps.append(jumpIfCharNotEquals(ch, m_checkedOffset - term->inputPosition, character));
+                return;
+            case 2:
+                load32WithUnalignedHalfWords(negativeOffsetIndexedAddress(m_checkedOffset- term->inputPosition, character), character);
+                break;
+            }
+        }
 
-            m_backTrackJumps.link(generator);
-
-            if (nextIteration)
-                generator->m_expressionState.linkToNextIteration(hereLabel);
+        if (ignoreCaseMask)
+            or32(Imm32(ignoreCaseMask), character);
+        op.m_jumps.append(branch32(NotEqual, character, Imm32(allCharacters | ignoreCaseMask)));
+        return;
+    }
+    void backtrackPatternCharacterOnce(size_t opIndex)
+    {
+        backtrackTermDefault(opIndex);
+    }
 
-            if (hasDataLabel()) {
-                generator->m_expressionState.m_backtrackRecords.append(AlternativeBacktrackRecord(getDataLabel(), hereLabel));
-                // data label cleared as a result of the clear() below
-            }
+    void generatePatternCharacterFixed(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
+        UChar32 ch = term->patternCharacter;
 
-            clear();
-        }
+        const RegisterID character = regT0;
+        const RegisterID countRegister = regT1;
 
-        void linkAlternativeBacktracksTo(YarrGenerator* generator, Label label, bool nextIteration = false)
-        {
-            m_backTrackJumps.linkTo(label, generator);
+        move(index, countRegister);
+        Checked<unsigned> scaledMaxCount = term->quantityMaxCount;
+        scaledMaxCount *= U_IS_BMP(ch) ? 1 : 2;
+        sub32(Imm32(scaledMaxCount.unsafeGet()), countRegister);
 
-            if (nextIteration)
-                generator->m_expressionState.linkToNextIteration(label);
+        Label loop(this);
+        readCharacter(m_checkedOffset - term->inputPosition - scaledMaxCount, character, countRegister);
+        // For case-insesitive compares, non-ascii characters that have different
+        // upper & lower case representations are converted to a character class.
+        ASSERT(!m_pattern.ignoreCase() || isASCIIAlpha(ch) || isCanonicallyUnique(ch, m_canonicalMode));
+        if (m_pattern.ignoreCase() && isASCIIAlpha(ch)) {
+            or32(TrustedImm32(0x20), character);
+            ch |= 0x20;
+        }
+
+        op.m_jumps.append(branch32(NotEqual, character, Imm32(ch)));
+#ifdef JIT_UNICODE_EXPRESSIONS
+        if (m_decodeSurrogatePairs && !U_IS_BMP(ch))
+            add32(TrustedImm32(2), countRegister);
+        else
+#endif
+            add32(TrustedImm32(1), countRegister);
+        branch32(NotEqual, countRegister, index).linkTo(loop, this);
+    }
+    void backtrackPatternCharacterFixed(size_t opIndex)
+    {
+        backtrackTermDefault(opIndex);
+    }
 
-            if (hasDataLabel()) {
-                generator->m_expressionState.m_backtrackRecords.append(AlternativeBacktrackRecord(getDataLabel(), label));
-                clearDataLabel();
-            }
-        }
+    void generatePatternCharacterGreedy(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
+        UChar32 ch = term->patternCharacter;
 
-    private:
-        BacktrackType m_backtrackType;
-        int32_t m_backtrackStackOffset;
-        Label m_backtrackLabel;
-        DataLabelPtr m_dataLabelPtr;
-        Label* m_backtrackToLabel;
-        DataLabelPtr* m_subDataLabelPtr;
-        BacktrackDestination* m_nextBacktrack;
-        Label* m_backtrackSourceLabel;
-        JumpList* m_backtrackSourceJumps;
-        JumpList m_backTrackJumps;
-    };
+        const RegisterID character = regT0;
+        const RegisterID countRegister = regT1;
 
-    struct TermGenerationState {
-        TermGenerationState(PatternDisjunction* disjunction, unsigned checkedTotal)
-            : disjunction(disjunction)
-            , checkedTotal(checkedTotal)
-            , m_subParenNum(0)
-            , m_linkedBacktrack(0)
-            , m_jumpList(0)
-        {
-        }
+        move(TrustedImm32(0), countRegister);
 
-        void resetAlternative()
-        {
-            m_backtrack.clear();
-            alt = 0;
-        }
-        bool alternativeValid()
-        {
-            return alt < disjunction->m_alternatives.size();
-        }
-        void nextAlternative()
-        {
-            ++alt;
-        }
-        PatternAlternative* alternative()
-        {
-            return disjunction->m_alternatives[alt];
-        }
-        bool isLastAlternative()
-        {
-            return (alt + 1) == disjunction->m_alternatives.size();
-        }
+        // Unless have a 16 bit pattern character and an 8 bit string - short circuit
+        if (!((ch > 0xff) && (m_charSize == Char8))) {
+            JumpList failures;
+            Label loop(this);
+            failures.append(atEndOfInput());
+            failures.append(jumpIfCharNotEquals(ch, m_checkedOffset - term->inputPosition, character));
+
+            add32(TrustedImm32(1), index);
+#ifdef JIT_UNICODE_EXPRESSIONS
+            if (m_decodeSurrogatePairs && !U_IS_BMP(ch)) {
+                Jump surrogatePairOk = notAtEndOfInput();
+                sub32(TrustedImm32(1), index);
+                failures.append(jump());
+                surrogatePairOk.link(this);
+                add32(TrustedImm32(1), index);
+            }
+#endif
+            add32(TrustedImm32(1), countRegister);
 
-        void resetTerm()
-        {
-            ASSERT(alternativeValid());
-            t = 0;
-            m_subParenNum = 0;
-        }
-        bool termValid()
-        {
-            ASSERT(alternativeValid());
-            return t < alternative()->m_terms.size();
-        }
-        void nextTerm()
-        {
-            ASSERT(alternativeValid());
-            ++t;
-        }
-        PatternTerm& term()
-        {
-            ASSERT(alternativeValid());
-            return alternative()->m_terms[t];
-        }
-        bool isLastTerm()
-        {
-            ASSERT(alternativeValid());
-            return (t + 1) == alternative()->m_terms.size();
-        }
-        unsigned getSubParenNum()
-        {
-            return m_subParenNum++;
-        }
-        bool isMainDisjunction()
-        {
-            return !disjunction->m_parent;
-        }
+            if (term->quantityMaxCount == quantifyInfinite)
+                jump(loop);
+            else
+                branch32(NotEqual, countRegister, Imm32(term->quantityMaxCount.unsafeGet())).linkTo(loop, this);
 
-        void setJumpListToPriorParen(JumpList* jumpList)
-        {
-            m_jumpList = jumpList;
+            failures.link(this);
         }
+        op.m_reentry = label();
 
-        JumpList* getJumpListToPriorParen()
-        {
-            return m_jumpList;
-        }
+        storeToFrame(countRegister, term->frameLocation + BackTrackInfoPatternCharacter::matchAmountIndex());
+    }
+    void backtrackPatternCharacterGreedy(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        PatternTerm& lookaheadTerm()
-        {
-            ASSERT(alternativeValid());
-            ASSERT((t + 1) < alternative()->m_terms.size());
-            return alternative()->m_terms[t + 1];
-        }
-        bool isSinglePatternCharacterLookaheadTerm()
-        {
-            ASSERT(alternativeValid());
-            return ((t + 1) < alternative()->m_terms.size())
-                && (lookaheadTerm().type == PatternTerm::TypePatternCharacter)
-                && (lookaheadTerm().quantityType == QuantifierFixedCount)
-                && (lookaheadTerm().quantityCount == 1);
-        }
+        const RegisterID countRegister = regT1;
 
-        int inputOffset()
-        {
-            return term().inputPosition - checkedTotal;
-        }
+        m_backtrackingState.link(this);
 
-        void clearBacktrack()
-        {
-            m_backtrack.clear(false);
-            m_linkedBacktrack = 0;
-        }
+        loadFromFrame(term->frameLocation + BackTrackInfoPatternCharacter::matchAmountIndex(), countRegister);
+        m_backtrackingState.append(branchTest32(Zero, countRegister));
+        sub32(TrustedImm32(1), countRegister);
+        if (!m_decodeSurrogatePairs || U_IS_BMP(term->patternCharacter))
+            sub32(TrustedImm32(1), index);
+        else
+            sub32(TrustedImm32(2), index);
+        jump(op.m_reentry);
+    }
 
-        void jumpToBacktrack(MacroAssembler* masm)
-        {
-            m_backtrack.jumpToBacktrack(masm);
-        }
+    void generatePatternCharacterNonGreedy(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        void jumpToBacktrack(YarrGenerator* generator, Jump jump)
-        {
-            m_backtrack.jumpToBacktrack(generator, jump);
-        }
+        const RegisterID countRegister = regT1;
 
-        void jumpToBacktrack(YarrGenerator* generator, JumpList& jumps)
-        {
-            m_backtrack.jumpToBacktrack(generator, jumps);
-        }
+        move(TrustedImm32(0), countRegister);
+        op.m_reentry = label();
+        storeToFrame(countRegister, term->frameLocation + BackTrackInfoPatternCharacter::matchAmountIndex());
+    }
+    void backtrackPatternCharacterNonGreedy(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
+        UChar32 ch = term->patternCharacter;
 
-        bool plantJumpToBacktrackIfExists(YarrGenerator* generator)
-        {
-            return m_backtrack.plantJumpToBacktrackIfExists(generator);
-        }
+        const RegisterID character = regT0;
+        const RegisterID countRegister = regT1;
 
-        void linkDataLabelToBacktrackIfExists(YarrGenerator* generator, DataLabelPtr dataLabel)
-        {
-            // If we have a stack offset backtrack destination, use it directly
-            if (m_backtrack.isStackOffset()) {
-                generator->m_expressionState.addIndirectJumpEntry(m_backtrack.getStackOffset(), dataLabel);
-                m_backtrack.clearSubDataLabelPtr();
-            } else {
-                // If we have a backtrack label, connect the datalabel to it directly.
-                if (m_backtrack.isLabel())
-                    generator->m_expressionState.m_backtrackRecords.append(AlternativeBacktrackRecord(dataLabel, m_backtrack.getLabel()));
-                else
-                    setBacktrackDataLabel(dataLabel);
+        m_backtrackingState.link(this);
+
+        loadFromFrame(term->frameLocation + BackTrackInfoPatternCharacter::matchAmountIndex(), countRegister);
+
+        // Unless have a 16 bit pattern character and an 8 bit string - short circuit
+        if (!((ch > 0xff) && (m_charSize == Char8))) {
+            JumpList nonGreedyFailures;
+            nonGreedyFailures.append(atEndOfInput());
+            if (term->quantityMaxCount != quantifyInfinite)
+                nonGreedyFailures.append(branch32(Equal, countRegister, Imm32(term->quantityMaxCount.unsafeGet())));
+            nonGreedyFailures.append(jumpIfCharNotEquals(ch, m_checkedOffset - term->inputPosition, character));
+
+            add32(TrustedImm32(1), index);
+#ifdef JIT_UNICODE_EXPRESSIONS
+            if (m_decodeSurrogatePairs && !U_IS_BMP(ch)) {
+                Jump surrogatePairOk = notAtEndOfInput();
+                sub32(TrustedImm32(1), index);
+                nonGreedyFailures.append(jump());
+                surrogatePairOk.link(this);
+                add32(TrustedImm32(1), index);
             }
-        }
+#endif
+            add32(TrustedImm32(1), countRegister);
 
-        void addBacktrackJump(Jump jump)
-        {
-            m_backtrack.addBacktrackJump(jump);
+            jump(op.m_reentry);
+            nonGreedyFailures.link(this);
         }
 
-        void setBacktrackDataLabel(DataLabelPtr dp)
-        {
-            m_backtrack.setDataLabel(dp);
+        if (m_decodeSurrogatePairs && !U_IS_BMP(ch)) {
+            // subtract countRegister*2 for non-BMP characters
+            lshift32(TrustedImm32(1), countRegister);
         }
 
-        void setBackTrackStackOffset(int32_t stackOffset)
-        {
-            m_backtrack.setStackOffset(stackOffset);
-        }
+        sub32(countRegister, index);
+        m_backtrackingState.fallthrough();
+    }
 
-        void setBacktrackLabel(Label label)
-        {
-            m_backtrack.setLabel(label);
-        }
+    void generateCharacterClassOnce(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        void linkAlternativeBacktracks(YarrGenerator* generator, bool nextIteration = false)
-        {
-            m_backtrack.linkAlternativeBacktracks(generator, nextIteration);
-            m_linkedBacktrack = 0;
-        }
+        const RegisterID character = regT0;
 
-        void linkAlternativeBacktracksTo(YarrGenerator* generator, Label label, bool nextIteration = false)
-        {
-            m_backtrack.linkAlternativeBacktracksTo(generator, label, nextIteration);
-        }
+        if (m_decodeSurrogatePairs)
+            storeToFrame(index, term->frameLocation + BackTrackInfoCharacterClass::beginIndex());
 
-        void setBacktrackLink(BacktrackDestination* linkedBacktrack)
-        {
-            m_linkedBacktrack = linkedBacktrack;
+        JumpList matchDest;
+        readCharacter(m_checkedOffset - term->inputPosition, character);
+        // If we are matching the "any character" builtin class we only need to read the
+        // character and don't need to match as it will always succeed.
+        if (term->invert() || !term->characterClass->m_anyCharacter) {
+            matchCharacterClass(character, matchDest, term->characterClass);
+
+            if (term->invert())
+                op.m_jumps.append(matchDest);
+            else {
+                op.m_jumps.append(jump());
+                matchDest.link(this);
+            }
         }
-
-        void chainBacktracks(BacktrackDestination* followonBacktrack)
-        {
-            if (m_linkedBacktrack)
-                m_linkedBacktrack->linkToNextBacktrack(followonBacktrack);
+#ifdef JIT_UNICODE_EXPRESSIONS
+        if (m_decodeSurrogatePairs) {
+            Jump isBMPChar = branch32(LessThan, character, supplementaryPlanesBase);
+            add32(TrustedImm32(1), index);
+            isBMPChar.link(this);
         }
+#endif
+    }
+    void backtrackCharacterClassOnce(size_t opIndex)
+    {
+#ifdef JIT_UNICODE_EXPRESSIONS
+        if (m_decodeSurrogatePairs) {
+            YarrOp& op = m_ops[opIndex];
+            PatternTerm* term = op.m_term;
 
-        BacktrackDestination& getBacktrackDestination()
-        {
-            return m_backtrack;
+            m_backtrackingState.link(this);
+            loadFromFrame(term->frameLocation + BackTrackInfoCharacterClass::beginIndex(), index);
+            m_backtrackingState.fallthrough();
         }
+#endif
+        backtrackTermDefault(opIndex);
+    }
 
-        void propagateBacktrackingFrom(YarrGenerator* generator, BacktrackDestination& backtrack, bool doJump = true)
-        {
-            if (doJump)
-                m_backtrack.jumpToBacktrack(generator, backtrack.getBacktrackJumps());
+    void generateCharacterClassFixed(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-            if (m_backtrack.isLabel() && backtrack.hasBacktrackToLabel())
-                backtrack.linkBacktrackToLabel(m_backtrack.getLabel());
+        const RegisterID character = regT0;
+        const RegisterID countRegister = regT1;
 
-            if (backtrack.hasDestination()) {
-                if (m_backtrack.hasDataLabel())
-                    generator->m_expressionState.addDataLabelToNextIteration(m_backtrack.getDataLabel());
+        move(index, countRegister);
+        sub32(Imm32(term->quantityMaxCount.unsafeGet()), countRegister);
 
-                m_backtrack.copyTarget(backtrack, doJump);
+        Label loop(this);
+        JumpList matchDest;
+        readCharacter(m_checkedOffset - term->inputPosition - term->quantityMaxCount, character, countRegister);
+        // If we are matching the "any character" builtin class we only need to read the
+        // character and don't need to match as it will always succeed.
+        if (term->invert() || !term->characterClass->m_anyCharacter) {
+            matchCharacterClass(character, matchDest, term->characterClass);
+
+            if (term->invert())
+                op.m_jumps.append(matchDest);
+            else {
+                op.m_jumps.append(jump());
+                matchDest.link(this);
             }
         }
 
-        PatternDisjunction* disjunction;
-        int checkedTotal;
-    private:
-        unsigned alt;
-        unsigned t;
-        unsigned m_subParenNum;
-        BacktrackDestination m_backtrack;
-        BacktrackDestination* m_linkedBacktrack;
-        JumpList* m_jumpList;
-    };
-
-    struct ParenthesesTail {
-        ParenthesesTail(PatternTerm& term, int nestingLevel, JumpList* jumpListToPriorParen)
-            : m_term(term)
-            , m_nestingLevel(nestingLevel)
-            , m_subParenIndex(0)
-            , m_jumpListToPriorParen(jumpListToPriorParen)
-        {
+        add32(TrustedImm32(1), countRegister);
+#ifdef JIT_UNICODE_EXPRESSIONS
+        if (m_decodeSurrogatePairs) {
+            Jump isBMPChar = branch32(LessThan, character, supplementaryPlanesBase);
+            op.m_jumps.append(atEndOfInput());
+            add32(TrustedImm32(1), countRegister);
+            add32(TrustedImm32(1), index);
+            isBMPChar.link(this);
         }
+#endif
+        branch32(NotEqual, countRegister, index).linkTo(loop, this);
+    }
+    void backtrackCharacterClassFixed(size_t opIndex)
+    {
+        backtrackTermDefault(opIndex);
+    }
 
-        void processBacktracks(YarrGenerator* generator, TermGenerationState& state, TermGenerationState& parenthesesState, Label nonGreedyTryParentheses, Label fallThrough)
-        {
-            m_nonGreedyTryParentheses = nonGreedyTryParentheses;
-            m_fallThrough = fallThrough;
+    void generateCharacterClassGreedy(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-            m_subParenIndex = state.getSubParenNum();
-            parenthesesState.getBacktrackDestination().copyTo(m_parenBacktrack);
-            state.chainBacktracks(&m_backtrack);
-            BacktrackDestination& stateBacktrack = state.getBacktrackDestination();
-            stateBacktrack.copyTo(m_backtrack);
-            stateBacktrack.setBacktrackToLabel(&m_backtrackToLabel);
-            state.setBacktrackLink(&m_backtrack);
-            stateBacktrack.setSubDataLabelPtr(&m_dataAfterLabelPtr);
+        const RegisterID character = regT0;
+        const RegisterID countRegister = regT1;
 
-            m_doDirectBacktrack = m_parenBacktrack.hasDestination();
+        if (m_decodeSurrogatePairs)
+            storeToFrame(index, term->frameLocation + BackTrackInfoCharacterClass::beginIndex());
+        move(TrustedImm32(0), countRegister);
 
-            if ((m_term.quantityType == QuantifierGreedy) || (m_term.quantityType == QuantifierNonGreedy))
-                m_doDirectBacktrack = false;
+        JumpList failures;
+        Label loop(this);
+        failures.append(atEndOfInput());
 
-            if (m_doDirectBacktrack)
-                state.propagateBacktrackingFrom(generator, m_parenBacktrack, false);
-            else {
-                stateBacktrack.setBacktrackJumpList(&m_afterBacktrackJumps);
-                stateBacktrack.setBacktrackSourceLabel(&m_backtrackFromAfterParens);
+        if (term->invert()) {
+            readCharacter(m_checkedOffset - term->inputPosition, character);
+            matchCharacterClass(character, failures, term->characterClass);
+        } else {
+            JumpList matchDest;
+            readCharacter(m_checkedOffset - term->inputPosition, character);
+            // If we are matching the "any character" builtin class we only need to read the
+            // character and don't need to match as it will always succeed.
+            if (!term->characterClass->m_anyCharacter) {
+                matchCharacterClass(character, matchDest, term->characterClass);
+                failures.append(jump());
             }
+            matchDest.link(this);
         }
 
-        void setNextIteration(Label nextIteration)
-        {
-            if (!m_nestingLevel && !m_backtrackToLabel.isSet())
-                m_backtrackToLabel = nextIteration;
-        }
-
-        void addAfterParenJump(Jump jump)
-        {
-            m_afterBacktrackJumps.append(jump);
+        add32(TrustedImm32(1), index);
+#ifdef JIT_UNICODE_EXPRESSIONS
+        if (m_decodeSurrogatePairs) {
+            failures.append(atEndOfInput());
+            Jump isBMPChar = branch32(LessThan, character, supplementaryPlanesBase);
+            add32(TrustedImm32(1), index);
+            isBMPChar.link(this);
         }
+#endif
+        add32(TrustedImm32(1), countRegister);
 
-        bool generateCode(YarrGenerator* generator, JumpList& jumpsToNext, bool priorBackTrackFallThrough, bool nextBacktrackFallThrough)
-        {
-            const RegisterID indexTemporary = regT0;
-            unsigned parenthesesFrameLocation = m_term.frameLocation;
-            Jump fromPriorBacktrack;
-            bool needJumpForPriorParenTail = false;
-
-            if (priorBackTrackFallThrough
-                && ((m_term.quantityType == QuantifierGreedy)
-                 || (m_term.quantityType == QuantifierNonGreedy)
-                 || (!m_doDirectBacktrack && m_parenBacktrack.hasDestination()))) {
-                // If the prior paren tail code assumed that it could fall through,
-                // but we need to generate after paren backtrack code, then provide
-                // a jump around that code for the prior paren tail code.
-                // A regular expressing like ((xxx)...)? needs this.
-                fromPriorBacktrack = generator->jump();
-                needJumpForPriorParenTail = true;
-            }
+        if (term->quantityMaxCount != quantifyInfinite) {
+            branch32(NotEqual, countRegister, Imm32(term->quantityMaxCount.unsafeGet())).linkTo(loop, this);
+            failures.append(jump());
+        } else
+            jump(loop);
 
-            if (!m_backtrack.hasDestination()) {
-                if (m_backtrackToLabel.isSet()) {
-                    m_backtrack.setLabel(m_backtrackToLabel);
-                    nextBacktrackFallThrough = false;
-                } else if (m_jumpListToPriorParen) {
-                    // If we don't have a destination, go back to either the prior paren or the next outer paren.
-                    m_backtrack.setBacktrackJumpList(m_jumpListToPriorParen);
-                    nextBacktrackFallThrough = false;
-                } else
-                    m_backtrack.setBacktrackJumpList(&jumpsToNext);
-            } else
-                nextBacktrackFallThrough = false;
+        failures.link(this);
+        op.m_reentry = label();
 
-            // A failure AFTER the parens jumps here - Backtrack to this paren
-            m_backtrackFromAfterParens = generator->label();
+        storeToFrame(countRegister, term->frameLocation + BackTrackInfoCharacterClass::matchAmountIndex());
+    }
+    void backtrackCharacterClassGreedy(size_t opIndex)
+    {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-            if (m_dataAfterLabelPtr.isSet())
-                generator->m_expressionState.m_backtrackRecords.append(AlternativeBacktrackRecord(m_dataAfterLabelPtr, m_backtrackFromAfterParens));
+        const RegisterID countRegister = regT1;
 
-            m_afterBacktrackJumps.link(generator);
+        m_backtrackingState.link(this);
 
-            if (m_term.quantityType == QuantifierGreedy) {
-                // If this is -1 we have now tested with both with and without the parens.
-                generator->loadFromFrame(parenthesesFrameLocation, indexTemporary);
-                m_backtrack.jumpToBacktrack(generator, generator->branch32(Equal, indexTemporary, TrustedImm32(-1)));
-            } else if (m_term.quantityType == QuantifierNonGreedy) {
-                // If this is -1 we have now tested with both with and without the parens.
-                generator->loadFromFrame(parenthesesFrameLocation, indexTemporary);
-                generator->branch32(Equal, indexTemporary, TrustedImm32(-1)).linkTo(m_nonGreedyTryParentheses, generator);
-            }
+        loadFromFrame(term->frameLocation + BackTrackInfoCharacterClass::matchAmountIndex(), countRegister);
+        m_backtrackingState.append(branchTest32(Zero, countRegister));
+        sub32(TrustedImm32(1), countRegister);
+        if (!m_decodeSurrogatePairs)
+            sub32(TrustedImm32(1), index);
+        else {
+            const RegisterID character = regT0;
 
-            if (!m_doDirectBacktrack)
-                m_parenBacktrack.plantJumpToBacktrackIfExists(generator);
-
-            // A failure WITHIN the parens jumps here
-            if (needJumpForPriorParenTail)
-                fromPriorBacktrack.link(generator);
-            m_parenBacktrack.linkAlternativeBacktracks(generator);
-            m_withinBacktrackJumps.link(generator);
-
-            if (m_term.capture())
-                generator->store32(TrustedImm32(-1), Address(output, (m_term.parentheses.subpatternId << 1) * sizeof(int)));
-
-            if (m_term.quantityType == QuantifierGreedy) {
-                generator->storeToFrame(TrustedImm32(-1), parenthesesFrameLocation);
-                generator->jump().linkTo(m_fallThrough, generator);
-                nextBacktrackFallThrough = false;
-            } else if (!nextBacktrackFallThrough)
-                m_backtrack.jumpToBacktrack(generator);
-
-            if (!m_doDirectBacktrack)
-                m_backtrack.setNextBacktrackLabel(m_backtrackFromAfterParens);
-
-            return nextBacktrackFallThrough;
-        }
-
-        PatternTerm& m_term;
-        int m_nestingLevel;
-        unsigned m_subParenIndex;
-        JumpList* m_jumpListToPriorParen;
-        Label m_nonGreedyTryParentheses;
-        Label m_fallThrough;
-        Label m_backtrackToLabel;
-        Label m_backtrackFromAfterParens;
-        DataLabelPtr m_dataAfterLabelPtr;
-        JumpList m_withinBacktrackJumps;
-        JumpList m_afterBacktrackJumps;
-        BacktrackDestination m_parenBacktrack;
-        BacktrackDestination m_backtrack;
-        bool m_doDirectBacktrack;
-    };
+            loadFromFrame(term->frameLocation + BackTrackInfoCharacterClass::beginIndex(), index);
+            // Rematch one less
+            storeToFrame(countRegister, term->frameLocation + BackTrackInfoCharacterClass::matchAmountIndex());
 
-    void generateAssertionBOL(TermGenerationState& state)
-    {
-        PatternTerm& term = state.term();
+            Label rematchLoop(this);
+            readCharacter(m_checkedOffset - term->inputPosition, character);
 
-        if (m_pattern.m_multiline) {
-            const RegisterID character = regT0;
+            sub32(TrustedImm32(1), countRegister);
+            add32(TrustedImm32(1), index);
 
-            JumpList matchDest;
-            if (!term.inputPosition)
-                matchDest.append(branch32(Equal, index, Imm32(state.checkedTotal)));
+#ifdef JIT_UNICODE_EXPRESSIONS
+            Jump isBMPChar = branch32(LessThan, character, supplementaryPlanesBase);
+            add32(TrustedImm32(1), index);
+            isBMPChar.link(this);
+#endif
 
-            readCharacter(state.inputOffset() - 1, character);
-            matchCharacterClass(character, matchDest, m_pattern.newlineCharacterClass());
-            state.jumpToBacktrack(this);
+            branchTest32(Zero, countRegister).linkTo(rematchLoop, this);
 
-            matchDest.link(this);
-        } else {
-            // Erk, really should poison out these alternatives early. :-/
-            if (term.inputPosition)
-                state.jumpToBacktrack(this);
-            else
-                state.jumpToBacktrack(this, branch32(NotEqual, index, Imm32(state.checkedTotal)));
+            loadFromFrame(term->frameLocation + BackTrackInfoCharacterClass::matchAmountIndex(), countRegister);
         }
+        jump(op.m_reentry);
     }
 
-    void generateAssertionEOL(TermGenerationState& state)
+    void generateCharacterClassNonGreedy(size_t opIndex)
     {
-        PatternTerm& term = state.term();
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        if (m_pattern.m_multiline) {
-            const RegisterID character = regT0;
-
-            JumpList matchDest;
-            if (term.inputPosition == state.checkedTotal)
-                matchDest.append(atEndOfInput());
-
-            readCharacter(state.inputOffset(), character);
-            matchCharacterClass(character, matchDest, m_pattern.newlineCharacterClass());
-            state.jumpToBacktrack(this);
+        const RegisterID countRegister = regT1;
 
-            matchDest.link(this);
-        } else {
-            if (term.inputPosition == state.checkedTotal)
-                state.jumpToBacktrack(this, notAtEndOfInput());
-            // Erk, really should poison out these alternatives early. :-/
-            else
-                state.jumpToBacktrack(this);
-        }
+        move(TrustedImm32(0), countRegister);
+        op.m_reentry = label();
+        if (m_decodeSurrogatePairs)
+            storeToFrame(index, term->frameLocation + BackTrackInfoCharacterClass::beginIndex());
+        storeToFrame(countRegister, term->frameLocation + BackTrackInfoCharacterClass::matchAmountIndex());
     }
 
-    // Also falls though on nextIsNotWordChar.
-    void matchAssertionWordchar(TermGenerationState& state, JumpList& nextIsWordChar, JumpList& nextIsNotWordChar)
+    void backtrackCharacterClassNonGreedy(size_t opIndex)
     {
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
+
         const RegisterID character = regT0;
-        PatternTerm& term = state.term();
+        const RegisterID countRegister = regT1;
 
-        if (term.inputPosition == state.checkedTotal)
-            nextIsNotWordChar.append(atEndOfInput());
+        JumpList nonGreedyFailures;
 
-        readCharacter(state.inputOffset(), character);
-        matchCharacterClass(character, nextIsWordChar, m_pattern.wordcharCharacterClass());
-    }
+        m_backtrackingState.link(this);
 
-    void generateAssertionWordBoundary(TermGenerationState& state)
-    {
-        const RegisterID character = regT0;
-        PatternTerm& term = state.term();
+        if (m_decodeSurrogatePairs)
+            loadFromFrame(term->frameLocation + BackTrackInfoCharacterClass::beginIndex(), index);
+        loadFromFrame(term->frameLocation + BackTrackInfoCharacterClass::matchAmountIndex(), countRegister);
 
-        Jump atBegin;
-        JumpList matchDest;
-        if (!term.inputPosition)
-            atBegin = branch32(Equal, index, Imm32(state.checkedTotal));
-        readCharacter(state.inputOffset() - 1, character);
-        matchCharacterClass(character, matchDest, m_pattern.wordcharCharacterClass());
-        if (!term.inputPosition)
-            atBegin.link(this);
+        nonGreedyFailures.append(atEndOfInput());
+        nonGreedyFailures.append(branch32(Equal, countRegister, Imm32(term->quantityMaxCount.unsafeGet())));
 
-        // We fall through to here if the last character was not a wordchar.
-        JumpList nonWordCharThenWordChar;
-        JumpList nonWordCharThenNonWordChar;
-        if (term.invert()) {
-            matchAssertionWordchar(state, nonWordCharThenNonWordChar, nonWordCharThenWordChar);
-            nonWordCharThenWordChar.append(jump());
-        } else {
-            matchAssertionWordchar(state, nonWordCharThenWordChar, nonWordCharThenNonWordChar);
-            nonWordCharThenNonWordChar.append(jump());
+        JumpList matchDest;
+        readCharacter(m_checkedOffset - term->inputPosition, character);
+        // If we are matching the "any character" builtin class we only need to read the
+        // character and don't need to match as it will always succeed.
+        if (term->invert() || !term->characterClass->m_anyCharacter) {
+            matchCharacterClass(character, matchDest, term->characterClass);
+
+            if (term->invert())
+                nonGreedyFailures.append(matchDest);
+            else {
+                nonGreedyFailures.append(jump());
+                matchDest.link(this);
+            }
         }
-        state.jumpToBacktrack(this, nonWordCharThenNonWordChar);
 
-        // We jump here if the last character was a wordchar.
-        matchDest.link(this);
-        JumpList wordCharThenWordChar;
-        JumpList wordCharThenNonWordChar;
-        if (term.invert()) {
-            matchAssertionWordchar(state, wordCharThenNonWordChar, wordCharThenWordChar);
-            wordCharThenWordChar.append(jump());
-        } else {
-            matchAssertionWordchar(state, wordCharThenWordChar, wordCharThenNonWordChar);
-            // This can fall-though!
+        add32(TrustedImm32(1), index);
+#ifdef JIT_UNICODE_EXPRESSIONS
+        if (m_decodeSurrogatePairs) {
+            nonGreedyFailures.append(atEndOfInput());
+            Jump isBMPChar = branch32(LessThan, character, supplementaryPlanesBase);
+            add32(TrustedImm32(1), index);
+            isBMPChar.link(this);
         }
+#endif
+        add32(TrustedImm32(1), countRegister);
 
-        state.jumpToBacktrack(this, wordCharThenWordChar);
+        jump(op.m_reentry);
 
-        nonWordCharThenWordChar.link(this);
-        wordCharThenNonWordChar.link(this);
+        nonGreedyFailures.link(this);
+        sub32(countRegister, index);
+        m_backtrackingState.fallthrough();
     }
 
-    void generatePatternCharacterSingle(TermGenerationState& state)
+    void generateDotStarEnclosure(size_t opIndex)
     {
-        const RegisterID character = regT0;
-        UChar ch = state.term().patternCharacter;
-
-        if (m_pattern.m_ignoreCase && isASCIIAlpha(ch)) {
-            readCharacter(state.inputOffset(), character);
-            or32(TrustedImm32(32), character);
-            state.jumpToBacktrack(this, branch32(NotEqual, character, Imm32(Unicode::toLower(ch))));
-        } else {
-            ASSERT(!m_pattern.m_ignoreCase || (Unicode::toLower(ch) == Unicode::toUpper(ch)));
-            state.jumpToBacktrack(this, jumpIfCharNotEquals(ch, state.inputOffset()));
-        }
-    }
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-    void generatePatternCharacterPair(TermGenerationState& state)
-    {
         const RegisterID character = regT0;
-        UChar ch1 = state.term().patternCharacter;
-        UChar ch2 = state.lookaheadTerm().patternCharacter;
+        const RegisterID matchPos = regT1;
+#ifndef HAVE_INITIAL_START_REG
+        const RegisterID initialStart = character;
+#endif
 
-        int mask = 0;
-        int chPair = ch1 | (ch2 << 16);
+        JumpList foundBeginningNewLine;
+        JumpList saveStartIndex;
+        JumpList foundEndingNewLine;
 
-        if (m_pattern.m_ignoreCase) {
-            if (isASCIIAlpha(ch1))
-                mask |= 32;
-            if (isASCIIAlpha(ch2))
-                mask |= 32 << 16;
+        if (m_pattern.dotAll()) {
+            move(TrustedImm32(0), matchPos);
+            setMatchStart(matchPos);
+            move(length, index);
+            return;
         }
 
-        if (mask) {
-            load32WithUnalignedHalfWords(BaseIndex(input, index, TimesTwo, state.inputOffset() * sizeof(UChar)), character);
-            or32(Imm32(mask), character);
-            state.jumpToBacktrack(this, branch32(NotEqual, character, Imm32(chPair | mask)));
-        } else
-            state.jumpToBacktrack(this, branch32WithUnalignedHalfWords(NotEqual, BaseIndex(input, index, TimesTwo, state.inputOffset() * sizeof(UChar)), Imm32(chPair)));
-    }
+        ASSERT(!m_pattern.m_body->m_hasFixedSize);
+        getMatchStart(matchPos);
 
-    void generatePatternCharacterFixed(TermGenerationState& state)
-    {
-        const RegisterID character = regT0;
-        const RegisterID countRegister = regT1;
-        PatternTerm& term = state.term();
-        UChar ch = term.patternCharacter;
+#ifndef HAVE_INITIAL_START_REG
+        loadFromFrame(m_pattern.m_initialStartValueFrameLocation, initialStart);
+#endif
+        saveStartIndex.append(branch32(BelowOrEqual, matchPos, initialStart));
+        Label findBOLLoop(this);
+        sub32(TrustedImm32(1), matchPos);
+        if (m_charSize == Char8)
+            load8(BaseIndex(input, matchPos, TimesOne, 0), character);
+        else
+            load16(BaseIndex(input, matchPos, TimesTwo, 0), character);
+        matchCharacterClass(character, foundBeginningNewLine, m_pattern.newlineCharacterClass());
+
+#ifndef HAVE_INITIAL_START_REG
+        loadFromFrame(m_pattern.m_initialStartValueFrameLocation, initialStart);
+#endif
+        branch32(Above, matchPos, initialStart).linkTo(findBOLLoop, this);
+        saveStartIndex.append(jump());
 
-        move(index, countRegister);
-        sub32(Imm32(term.quantityCount), countRegister);
+        foundBeginningNewLine.link(this);
+        add32(TrustedImm32(1), matchPos); // Advance past newline
+        saveStartIndex.link(this);
 
-        Label loop(this);
-        if (m_pattern.m_ignoreCase && isASCIIAlpha(ch)) {
-            load16(BaseIndex(input, countRegister, TimesTwo, (state.inputOffset() + term.quantityCount) * sizeof(UChar)), character);
-            or32(TrustedImm32(32), character);
-            state.jumpToBacktrack(this, branch32(NotEqual, character, Imm32(Unicode::toLower(ch))));
-        } else {
-            ASSERT(!m_pattern.m_ignoreCase || (Unicode::toLower(ch) == Unicode::toUpper(ch)));
-            state.jumpToBacktrack(this, branch16(NotEqual, BaseIndex(input, countRegister, TimesTwo, (state.inputOffset() + term.quantityCount) * sizeof(UChar)), Imm32(ch)));
-        }
-        add32(TrustedImm32(1), countRegister);
-        branch32(NotEqual, countRegister, index).linkTo(loop, this);
-    }
+        if (!m_pattern.multiline() && term->anchors.bolAnchor)
+            op.m_jumps.append(branchTest32(NonZero, matchPos));
 
-    void generatePatternCharacterGreedy(TermGenerationState& state)
-    {
-        const RegisterID character = regT0;
-        const RegisterID countRegister = regT1;
-        PatternTerm& term = state.term();
-        UChar ch = term.patternCharacter;
+        ASSERT(!m_pattern.m_body->m_hasFixedSize);
+        setMatchStart(matchPos);
 
-        move(TrustedImm32(0), countRegister);
+        move(index, matchPos);
 
-        JumpList failures;
-        Label loop(this);
-        failures.append(atEndOfInput());
-        if (m_pattern.m_ignoreCase && isASCIIAlpha(ch)) {
-            readCharacter(state.inputOffset(), character);
-            or32(TrustedImm32(32), character);
-            failures.append(branch32(NotEqual, character, Imm32(Unicode::toLower(ch))));
-        } else {
-            ASSERT(!m_pattern.m_ignoreCase || (Unicode::toLower(ch) == Unicode::toUpper(ch)));
-            failures.append(jumpIfCharNotEquals(ch, state.inputOffset()));
-        }
-
-        add32(TrustedImm32(1), countRegister);
-        add32(TrustedImm32(1), index);
-        if (term.quantityCount != quantifyInfinite) {
-            branch32(NotEqual, countRegister, Imm32(term.quantityCount)).linkTo(loop, this);
-            failures.append(jump());
-        } else
-            jump(loop);
-
-        Label backtrackBegin(this);
-        loadFromFrame(term.frameLocation, countRegister);
-        state.jumpToBacktrack(this, branchTest32(Zero, countRegister));
-        sub32(TrustedImm32(1), countRegister);
-        sub32(TrustedImm32(1), index);
+        Label findEOLLoop(this);        
+        foundEndingNewLine.append(branch32(Equal, matchPos, length));
+        if (m_charSize == Char8)
+            load8(BaseIndex(input, matchPos, TimesOne, 0), character);
+        else
+            load16(BaseIndex(input, matchPos, TimesTwo, 0), character);
+        matchCharacterClass(character, foundEndingNewLine, m_pattern.newlineCharacterClass());
+        add32(TrustedImm32(1), matchPos);
+        jump(findEOLLoop);
 
-        failures.link(this);
+        foundEndingNewLine.link(this);
 
-        storeToFrame(countRegister, term.frameLocation);
+        if (!m_pattern.multiline() && term->anchors.eolAnchor)
+            op.m_jumps.append(branch32(NotEqual, matchPos, length));
 
-        state.setBacktrackLabel(backtrackBegin);
+        move(matchPos, index);
     }
 
-    void generatePatternCharacterNonGreedy(TermGenerationState& state)
+    void backtrackDotStarEnclosure(size_t opIndex)
     {
-        const RegisterID character = regT0;
-        const RegisterID countRegister = regT1;
-        PatternTerm& term = state.term();
-        UChar ch = term.patternCharacter;
-
-        move(TrustedImm32(0), countRegister);
-
-        Jump firstTimeDoNothing = jump();
-
-        Label hardFail(this);
-        sub32(countRegister, index);
-        state.jumpToBacktrack(this);
-
-        Label backtrackBegin(this);
-        loadFromFrame(term.frameLocation, countRegister);
-
-        atEndOfInput().linkTo(hardFail, this);
-        if (term.quantityCount != quantifyInfinite)
-            branch32(Equal, countRegister, Imm32(term.quantityCount), hardFail);
-        if (m_pattern.m_ignoreCase && isASCIIAlpha(ch)) {
-            readCharacter(state.inputOffset(), character);
-            or32(TrustedImm32(32), character);
-            branch32(NotEqual, character, Imm32(Unicode::toLower(ch))).linkTo(hardFail, this);
-        } else {
-            ASSERT(!m_pattern.m_ignoreCase || (Unicode::toLower(ch) == Unicode::toUpper(ch)));
-            jumpIfCharNotEquals(ch, state.inputOffset()).linkTo(hardFail, this);
-        }
-
-        add32(TrustedImm32(1), countRegister);
-        add32(TrustedImm32(1), index);
-
-        firstTimeDoNothing.link(this);
-        storeToFrame(countRegister, term.frameLocation);
-
-        state.setBacktrackLabel(backtrackBegin);
+        backtrackTermDefault(opIndex);
     }
-
-    void generateCharacterClassSingle(TermGenerationState& state)
+    
+    // Code generation/backtracking for simple terms
+    // (pattern characters, character classes, and assertions).
+    // These methods farm out work to the set of functions above.
+    void generateTerm(size_t opIndex)
     {
-        const RegisterID character = regT0;
-        PatternTerm& term = state.term();
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        JumpList matchDest;
-        readCharacter(state.inputOffset(), character);
-        matchCharacterClass(character, matchDest, term.characterClass);
+        switch (term->type) {
+        case PatternTerm::TypePatternCharacter:
+            switch (term->quantityType) {
+            case QuantifierFixedCount:
+                if (term->quantityMaxCount == 1)
+                    generatePatternCharacterOnce(opIndex);
+                else
+                    generatePatternCharacterFixed(opIndex);
+                break;
+            case QuantifierGreedy:
+                generatePatternCharacterGreedy(opIndex);
+                break;
+            case QuantifierNonGreedy:
+                generatePatternCharacterNonGreedy(opIndex);
+                break;
+            }
+            break;
 
-        if (term.invert())
-            state.jumpToBacktrack(this, matchDest);
-        else {
-            state.jumpToBacktrack(this);
-            matchDest.link(this);
-        }
-    }
+        case PatternTerm::TypeCharacterClass:
+            switch (term->quantityType) {
+            case QuantifierFixedCount:
+                if (term->quantityMaxCount == 1)
+                    generateCharacterClassOnce(opIndex);
+                else
+                    generateCharacterClassFixed(opIndex);
+                break;
+            case QuantifierGreedy:
+                generateCharacterClassGreedy(opIndex);
+                break;
+            case QuantifierNonGreedy:
+                generateCharacterClassNonGreedy(opIndex);
+                break;
+            }
+            break;
 
-    void generateCharacterClassFixed(TermGenerationState& state)
-    {
-        const RegisterID character = regT0;
-        const RegisterID countRegister = regT1;
-        PatternTerm& term = state.term();
+        case PatternTerm::TypeAssertionBOL:
+            generateAssertionBOL(opIndex);
+            break;
 
-        move(index, countRegister);
-        sub32(Imm32(term.quantityCount), countRegister);
+        case PatternTerm::TypeAssertionEOL:
+            generateAssertionEOL(opIndex);
+            break;
 
-        Label loop(this);
-        JumpList matchDest;
-        load16(BaseIndex(input, countRegister, TimesTwo, (state.inputOffset() + term.quantityCount) * sizeof(UChar)), character);
-        matchCharacterClass(character, matchDest, term.characterClass);
+        case PatternTerm::TypeAssertionWordBoundary:
+            generateAssertionWordBoundary(opIndex);
+            break;
 
-        if (term.invert())
-            state.jumpToBacktrack(this, matchDest);
-        else {
-            state.jumpToBacktrack(this);
-            matchDest.link(this);
-        }
+        case PatternTerm::TypeForwardReference:
+            break;
 
-        add32(TrustedImm32(1), countRegister);
-        branch32(NotEqual, countRegister, index).linkTo(loop, this);
+        case PatternTerm::TypeParenthesesSubpattern:
+        case PatternTerm::TypeParentheticalAssertion:
+            RELEASE_ASSERT_NOT_REACHED();
+        case PatternTerm::TypeBackReference:
+            m_failureReason = JITFailureReason::BackReference;
+            break;
+        case PatternTerm::TypeDotStarEnclosure:
+            generateDotStarEnclosure(opIndex);
+            break;
+        }
     }
-
-    void generateCharacterClassGreedy(TermGenerationState& state)
+    void backtrackTerm(size_t opIndex)
     {
-        const RegisterID character = regT0;
-        const RegisterID countRegister = regT1;
-        PatternTerm& term = state.term();
+        YarrOp& op = m_ops[opIndex];
+        PatternTerm* term = op.m_term;
 
-        move(TrustedImm32(0), countRegister);
+        switch (term->type) {
+        case PatternTerm::TypePatternCharacter:
+            switch (term->quantityType) {
+            case QuantifierFixedCount:
+                if (term->quantityMaxCount == 1)
+                    backtrackPatternCharacterOnce(opIndex);
+                else
+                    backtrackPatternCharacterFixed(opIndex);
+                break;
+            case QuantifierGreedy:
+                backtrackPatternCharacterGreedy(opIndex);
+                break;
+            case QuantifierNonGreedy:
+                backtrackPatternCharacterNonGreedy(opIndex);
+                break;
+            }
+            break;
 
-        JumpList failures;
-        Label loop(this);
-        failures.append(atEndOfInput());
+        case PatternTerm::TypeCharacterClass:
+            switch (term->quantityType) {
+            case QuantifierFixedCount:
+                if (term->quantityMaxCount == 1)
+                    backtrackCharacterClassOnce(opIndex);
+                else
+                    backtrackCharacterClassFixed(opIndex);
+                break;
+            case QuantifierGreedy:
+                backtrackCharacterClassGreedy(opIndex);
+                break;
+            case QuantifierNonGreedy:
+                backtrackCharacterClassNonGreedy(opIndex);
+                break;
+            }
+            break;
 
-        if (term.invert()) {
-            readCharacter(state.inputOffset(), character);
-            matchCharacterClass(character, failures, term.characterClass);
-        } else {
-            JumpList matchDest;
-            readCharacter(state.inputOffset(), character);
-            matchCharacterClass(character, matchDest, term.characterClass);
-            failures.append(jump());
-            matchDest.link(this);
-        }
+        case PatternTerm::TypeAssertionBOL:
+            backtrackAssertionBOL(opIndex);
+            break;
 
-        add32(TrustedImm32(1), countRegister);
-        add32(TrustedImm32(1), index);
-        if (term.quantityCount != quantifyInfinite) {
-            branch32(NotEqual, countRegister, Imm32(term.quantityCount)).linkTo(loop, this);
-            failures.append(jump());
-        } else
-            jump(loop);
+        case PatternTerm::TypeAssertionEOL:
+            backtrackAssertionEOL(opIndex);
+            break;
 
-        Label backtrackBegin(this);
-        loadFromFrame(term.frameLocation, countRegister);
-        state.jumpToBacktrack(this, branchTest32(Zero, countRegister));
-        sub32(TrustedImm32(1), countRegister);
-        sub32(TrustedImm32(1), index);
+        case PatternTerm::TypeAssertionWordBoundary:
+            backtrackAssertionWordBoundary(opIndex);
+            break;
 
-        failures.link(this);
+        case PatternTerm::TypeForwardReference:
+            break;
+
+        case PatternTerm::TypeParenthesesSubpattern:
+        case PatternTerm::TypeParentheticalAssertion:
+            RELEASE_ASSERT_NOT_REACHED();
 
-        storeToFrame(countRegister, term.frameLocation);
+        case PatternTerm::TypeDotStarEnclosure:
+            backtrackDotStarEnclosure(opIndex);
+            break;
 
-        state.setBacktrackLabel(backtrackBegin);
+        case PatternTerm::TypeBackReference:
+            m_failureReason = JITFailureReason::BackReference;
+            break;
+        }
     }
 
-    void generateCharacterClassNonGreedy(TermGenerationState& state)
+    void generate()
     {
-        const RegisterID character = regT0;
-        const RegisterID countRegister = regT1;
-        PatternTerm& term = state.term();
-
-        move(TrustedImm32(0), countRegister);
-
-        Jump firstTimeDoNothing = jump();
+        // Forwards generate the matching code.
+        ASSERT(m_ops.size());
+        size_t opIndex = 0;
 
-        Label hardFail(this);
-        sub32(countRegister, index);
-        state.jumpToBacktrack(this);
+        do {
+            YarrOp& op = m_ops[opIndex];
+            switch (op.m_op) {
 
-        Label backtrackBegin(this);
-        loadFromFrame(term.frameLocation, countRegister);
+            case OpTerm:
+                generateTerm(opIndex);
+                break;
 
-        atEndOfInput().linkTo(hardFail, this);
-        branch32(Equal, countRegister, Imm32(term.quantityCount), hardFail);
+            // OpBodyAlternativeBegin/Next/End
+            //
+            // These nodes wrap the set of alternatives in the body of the regular expression.
+            // There may be either one or two chains of OpBodyAlternative nodes, one representing
+            // the 'once through' sequence of alternatives (if any exist), and one representing
+            // the repeating alternatives (again, if any exist).
+            //
+            // Upon normal entry to the Begin alternative, we will check that input is available.
+            // Reentry to the Begin alternative will take place after the check has taken place,
+            // and will assume that the input position has already been progressed as appropriate.
+            //
+            // Entry to subsequent Next/End alternatives occurs when the prior alternative has
+            // successfully completed a match - return a success state from JIT code.
+            //
+            // Next alternatives allow for reentry optimized to suit backtracking from its
+            // preceding alternative. It expects the input position to still be set to a position
+            // appropriate to its predecessor, and it will only perform an input check if the
+            // predecessor had a minimum size less than its own.
+            //
+            // In the case 'once through' expressions, the End node will also have a reentry
+            // point to jump to when the last alternative fails. Again, this expects the input
+            // position to still reflect that expected by the prior alternative.
+            case OpBodyAlternativeBegin: {
+                PatternAlternative* alternative = op.m_alternative;
+
+                // Upon entry at the head of the set of alternatives, check if input is available
+                // to run the first alternative. (This progresses the input position).
+                op.m_jumps.append(jumpIfNoAvailableInput(alternative->m_minimumSize));
+                // We will reenter after the check, and assume the input position to have been
+                // set as appropriate to this alternative.
+                op.m_reentry = label();
+
+                m_checkedOffset += alternative->m_minimumSize;
+                break;
+            }
+            case OpBodyAlternativeNext:
+            case OpBodyAlternativeEnd: {
+                PatternAlternative* priorAlternative = m_ops[op.m_previousOp].m_alternative;
+                PatternAlternative* alternative = op.m_alternative;
+
+                // If we get here, the prior alternative matched - return success.
+                
+                // Adjust the stack pointer to remove the pattern's frame.
+                removeCallFrame();
+
+                // Load appropriate values into the return register and the first output
+                // slot, and return. In the case of pattern with a fixed size, we will
+                // not have yet set the value in the first 
+                ASSERT(index != returnRegister);
+                if (m_pattern.m_body->m_hasFixedSize) {
+                    move(index, returnRegister);
+                    if (priorAlternative->m_minimumSize)
+                        sub32(Imm32(priorAlternative->m_minimumSize), returnRegister);
+                    if (compileMode == IncludeSubpatterns)
+                        store32(returnRegister, output);
+                } else
+                    getMatchStart(returnRegister);
+                if (compileMode == IncludeSubpatterns)
+                    store32(index, Address(output, 4));
+                move(index, returnRegister2);
+
+                generateReturn();
+
+                // This is the divide between the tail of the prior alternative, above, and
+                // the head of the subsequent alternative, below.
+
+                if (op.m_op == OpBodyAlternativeNext) {
+                    // This is the reentry point for the Next alternative. We expect any code
+                    // that jumps here to do so with the input position matching that of the
+                    // PRIOR alteranative, and we will only check input availability if we
+                    // need to progress it forwards.
+                    op.m_reentry = label();
+                    if (alternative->m_minimumSize > priorAlternative->m_minimumSize) {
+                        add32(Imm32(alternative->m_minimumSize - priorAlternative->m_minimumSize), index);
+                        op.m_jumps.append(jumpIfNoAvailableInput());
+                    } else if (priorAlternative->m_minimumSize > alternative->m_minimumSize)
+                        sub32(Imm32(priorAlternative->m_minimumSize - alternative->m_minimumSize), index);
+                } else if (op.m_nextOp == notFound) {
+                    // This is the reentry point for the End of 'once through' alternatives,
+                    // jumped to when the last alternative fails to match.
+                    op.m_reentry = label();
+                    sub32(Imm32(priorAlternative->m_minimumSize), index);
+                }
 
-        JumpList matchDest;
-        readCharacter(state.inputOffset(), character);
-        matchCharacterClass(character, matchDest, term.characterClass);
+                if (op.m_op == OpBodyAlternativeNext)
+                    m_checkedOffset += alternative->m_minimumSize;
+                m_checkedOffset -= priorAlternative->m_minimumSize;
+                break;
+            }
 
-        if (term.invert())
-            matchDest.linkTo(hardFail, this);
-        else {
-            jump(hardFail);
-            matchDest.link(this);
-        }
+            // OpSimpleNestedAlternativeBegin/Next/End
+            // OpNestedAlternativeBegin/Next/End
+            //
+            // These nodes are used to handle sets of alternatives that are nested within
+            // subpatterns and parenthetical assertions. The 'simple' forms are used where
+            // we do not need to be able to backtrack back into any alternative other than
+            // the last, the normal forms allow backtracking into any alternative.
+            //
+            // Each Begin/Next node is responsible for planting an input check to ensure
+            // sufficient input is available on entry. Next nodes additionally need to
+            // jump to the end - Next nodes use the End node's m_jumps list to hold this
+            // set of jumps.
+            //
+            // In the non-simple forms, successful alternative matches must store a
+            // 'return address' using a DataLabelPtr, used to store the address to jump
+            // to when backtracking, to get to the code for the appropriate alternative.
+            case OpSimpleNestedAlternativeBegin:
+            case OpNestedAlternativeBegin: {
+                PatternTerm* term = op.m_term;
+                PatternAlternative* alternative = op.m_alternative;
+                PatternDisjunction* disjunction = term->parentheses.disjunction;
+
+                // Calculate how much input we need to check for, and if non-zero check.
+                op.m_checkAdjust = Checked<unsigned>(alternative->m_minimumSize);
+                if ((term->quantityType == QuantifierFixedCount) && (term->type != PatternTerm::TypeParentheticalAssertion))
+                    op.m_checkAdjust -= disjunction->m_minimumSize;
+                if (op.m_checkAdjust)
+                    op.m_jumps.append(jumpIfNoAvailableInput(op.m_checkAdjust.unsafeGet()));
+
+                m_checkedOffset += op.m_checkAdjust;
+                break;
+            }
+            case OpSimpleNestedAlternativeNext:
+            case OpNestedAlternativeNext: {
+                PatternTerm* term = op.m_term;
+                PatternAlternative* alternative = op.m_alternative;
+                PatternDisjunction* disjunction = term->parentheses.disjunction;
+
+                // In the non-simple case, store a 'return address' so we can backtrack correctly.
+                if (op.m_op == OpNestedAlternativeNext) {
+                    unsigned parenthesesFrameLocation = term->frameLocation;
+                    op.m_returnAddress = storeToFrameWithPatch(parenthesesFrameLocation + BackTrackInfoParentheses::returnAddressIndex());
+                }
 
-        add32(TrustedImm32(1), countRegister);
-        add32(TrustedImm32(1), index);
+                if (term->quantityType != QuantifierFixedCount && !m_ops[op.m_previousOp].m_alternative->m_minimumSize) {
+                    // If the previous alternative matched without consuming characters then
+                    // backtrack to try to match while consumming some input.
+                    op.m_zeroLengthMatch = branch32(Equal, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
+                }
 
-        firstTimeDoNothing.link(this);
-        storeToFrame(countRegister, term.frameLocation);
+                // If we reach here then the last alternative has matched - jump to the
+                // End node, to skip over any further alternatives.
+                //
+                // FIXME: this is logically O(N^2) (though N can be expected to be very
+                // small). We could avoid this either by adding an extra jump to the JIT
+                // data structures, or by making backtracking code that jumps to Next
+                // alternatives are responsible for checking that input is available (if
+                // we didn't need to plant the input checks, then m_jumps would be free).
+                YarrOp* endOp = &m_ops[op.m_nextOp];
+                while (endOp->m_nextOp != notFound) {
+                    ASSERT(endOp->m_op == OpSimpleNestedAlternativeNext || endOp->m_op == OpNestedAlternativeNext);
+                    endOp = &m_ops[endOp->m_nextOp];
+                }
+                ASSERT(endOp->m_op == OpSimpleNestedAlternativeEnd || endOp->m_op == OpNestedAlternativeEnd);
+                endOp->m_jumps.append(jump());
+
+                // This is the entry point for the next alternative.
+                op.m_reentry = label();
+
+                // Calculate how much input we need to check for, and if non-zero check.
+                op.m_checkAdjust = alternative->m_minimumSize;
+                if ((term->quantityType == QuantifierFixedCount) && (term->type != PatternTerm::TypeParentheticalAssertion))
+                    op.m_checkAdjust -= disjunction->m_minimumSize;
+                if (op.m_checkAdjust)
+                    op.m_jumps.append(jumpIfNoAvailableInput(op.m_checkAdjust.unsafeGet()));
+
+                YarrOp& lastOp = m_ops[op.m_previousOp];
+                m_checkedOffset -= lastOp.m_checkAdjust;
+                m_checkedOffset += op.m_checkAdjust;
+                break;
+            }
+            case OpSimpleNestedAlternativeEnd:
+            case OpNestedAlternativeEnd: {
+                PatternTerm* term = op.m_term;
+
+                // In the non-simple case, store a 'return address' so we can backtrack correctly.
+                if (op.m_op == OpNestedAlternativeEnd) {
+                    unsigned parenthesesFrameLocation = term->frameLocation;
+                    op.m_returnAddress = storeToFrameWithPatch(parenthesesFrameLocation + BackTrackInfoParentheses::returnAddressIndex());
+                }
 
-        state.setBacktrackLabel(backtrackBegin);
-    }
+                if (term->quantityType != QuantifierFixedCount && !m_ops[op.m_previousOp].m_alternative->m_minimumSize) {
+                    // If the previous alternative matched without consuming characters then
+                    // backtrack to try to match while consumming some input.
+                    op.m_zeroLengthMatch = branch32(Equal, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
+                }
 
-    void generateParenthesesDisjunction(PatternTerm& parenthesesTerm, TermGenerationState& state, unsigned alternativeFrameLocation)
-    {
-        ASSERT((parenthesesTerm.type == PatternTerm::TypeParenthesesSubpattern) || (parenthesesTerm.type == PatternTerm::TypeParentheticalAssertion));
-        ASSERT(parenthesesTerm.quantityCount == 1);
+                // If this set of alternatives contains more than one alternative,
+                // then the Next nodes will have planted jumps to the End, and added
+                // them to this node's m_jumps list.
+                op.m_jumps.link(this);
+                op.m_jumps.clear();
 
-        PatternDisjunction* disjunction = parenthesesTerm.parentheses.disjunction;
-        unsigned preCheckedCount = ((parenthesesTerm.quantityType == QuantifierFixedCount) && (parenthesesTerm.type != PatternTerm::TypeParentheticalAssertion)) ? disjunction->m_minimumSize : 0;
+                YarrOp& lastOp = m_ops[op.m_previousOp];
+                m_checkedOffset -= lastOp.m_checkAdjust;
+                break;
+            }
 
-        if (disjunction->m_alternatives.size() == 1) {
-            state.resetAlternative();
-            ASSERT(state.alternativeValid());
-            PatternAlternative* alternative = state.alternative();
-            optimizeAlternative(alternative);
+            // OpParenthesesSubpatternOnceBegin/End
+            //
+            // These nodes support (optionally) capturing subpatterns, that have a
+            // quantity count of 1 (this covers fixed once, and ?/?? quantifiers). 
+            case OpParenthesesSubpatternOnceBegin: {
+                PatternTerm* term = op.m_term;
+                unsigned parenthesesFrameLocation = term->frameLocation;
+                const RegisterID indexTemporary = regT0;
+                ASSERT(term->quantityMaxCount == 1);
+
+                // Upon entry to a Greedy quantified set of parenthese store the index.
+                // We'll use this for two purposes:
+                //  - To indicate which iteration we are on of mathing the remainder of
+                //    the expression after the parentheses - the first, including the
+                //    match within the parentheses, or the second having skipped over them.
+                //  - To check for empty matches, which must be rejected.
+                //
+                // At the head of a NonGreedy set of parentheses we'll immediately set the
+                // value on the stack to -1 (indicating a match skipping the subpattern),
+                // and plant a jump to the end. We'll also plant a label to backtrack to
+                // to reenter the subpattern later, with a store to set up index on the
+                // second iteration.
+                //
+                // FIXME: for capturing parens, could use the index in the capture array?
+                if (term->quantityType == QuantifierGreedy)
+                    storeToFrame(index, parenthesesFrameLocation + BackTrackInfoParenthesesOnce::beginIndex());
+                else if (term->quantityType == QuantifierNonGreedy) {
+                    storeToFrame(TrustedImm32(-1), parenthesesFrameLocation + BackTrackInfoParenthesesOnce::beginIndex());
+                    op.m_jumps.append(jump());
+                    op.m_reentry = label();
+                    storeToFrame(index, parenthesesFrameLocation + BackTrackInfoParenthesesOnce::beginIndex());
+                }
 
-            int countToCheck = alternative->m_minimumSize - preCheckedCount;
-            if (countToCheck) {
-                ASSERT((parenthesesTerm.type == PatternTerm::TypeParentheticalAssertion) || (parenthesesTerm.quantityType != QuantifierFixedCount));
+                // If the parenthese are capturing, store the starting index value to the
+                // captures array, offsetting as necessary.
+                //
+                // FIXME: could avoid offsetting this value in JIT code, apply
+                // offsets only afterwards, at the point the results array is
+                // being accessed.
+                if (term->capture() && compileMode == IncludeSubpatterns) {
+                    unsigned inputOffset = (m_checkedOffset - term->inputPosition).unsafeGet();
+                    if (term->quantityType == QuantifierFixedCount)
+                        inputOffset += term->parentheses.disjunction->m_minimumSize;
+                    if (inputOffset) {
+                        move(index, indexTemporary);
+                        sub32(Imm32(inputOffset), indexTemporary);
+                        setSubpatternStart(indexTemporary, term->parentheses.subpatternId);
+                    } else
+                        setSubpatternStart(index, term->parentheses.subpatternId);
+                }
+                break;
+            }
+            case OpParenthesesSubpatternOnceEnd: {
+                PatternTerm* term = op.m_term;
+                const RegisterID indexTemporary = regT0;
+                ASSERT(term->quantityMaxCount == 1);
+
+                // Runtime ASSERT to make sure that the nested alternative handled the
+                // "no input consumed" check.
+                if (!ASSERT_DISABLED && term->quantityType != QuantifierFixedCount && !term->parentheses.disjunction->m_minimumSize) {
+                    Jump pastBreakpoint;
+                    pastBreakpoint = branch32(NotEqual, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
+                    abortWithReason(YARRNoInputConsumed);
+                    pastBreakpoint.link(this);
+                }
 
-                // FIXME: This is quite horrible.  The call to 'plantJumpToBacktrackIfExists'
-                // will be forced to always trampoline into here, just to decrement the index.
-                // Ick. 
-                Jump skip = jump();
+                // If the parenthese are capturing, store the ending index value to the
+                // captures array, offsetting as necessary.
+                //
+                // FIXME: could avoid offsetting this value in JIT code, apply
+                // offsets only afterwards, at the point the results array is
+                // being accessed.
+                if (term->capture() && compileMode == IncludeSubpatterns) {
+                    unsigned inputOffset = (m_checkedOffset - term->inputPosition).unsafeGet();
+                    if (inputOffset) {
+                        move(index, indexTemporary);
+                        sub32(Imm32(inputOffset), indexTemporary);
+                        setSubpatternEnd(indexTemporary, term->parentheses.subpatternId);
+                    } else
+                        setSubpatternEnd(index, term->parentheses.subpatternId);
+                }
 
-                Label backtrackBegin(this);
-                sub32(Imm32(countToCheck), index);
-                state.addBacktrackJump(jump());
+                // If the parentheses are quantified Greedy then add a label to jump back
+                // to if get a failed match from after the parentheses. For NonGreedy
+                // parentheses, link the jump from before the subpattern to here.
+                if (term->quantityType == QuantifierGreedy)
+                    op.m_reentry = label();
+                else if (term->quantityType == QuantifierNonGreedy) {
+                    YarrOp& beginOp = m_ops[op.m_previousOp];
+                    beginOp.m_jumps.link(this);
+                }
+                break;
+            }
 
-                skip.link(this);
+            // OpParenthesesSubpatternTerminalBegin/End
+            case OpParenthesesSubpatternTerminalBegin: {
+                PatternTerm* term = op.m_term;
+                ASSERT(term->quantityType == QuantifierGreedy);
+                ASSERT(term->quantityMaxCount == quantifyInfinite);
+                ASSERT(!term->capture());
 
-                state.setBacktrackLabel(backtrackBegin);
+                // Upon entry set a label to loop back to.
+                op.m_reentry = label();
 
-                state.jumpToBacktrack(this, jumpIfNoAvailableInput(countToCheck));
-                state.checkedTotal += countToCheck;
+                // Store the start index of the current match; we need to reject zero
+                // length matches.
+                storeToFrame(index, term->frameLocation + BackTrackInfoParenthesesTerminal::beginIndex());
+                break;
             }
+            case OpParenthesesSubpatternTerminalEnd: {
+                YarrOp& beginOp = m_ops[op.m_previousOp];
+                if (!ASSERT_DISABLED) {
+                    PatternTerm* term = op.m_term;
+                    
+                    // Runtime ASSERT to make sure that the nested alternative handled the
+                    // "no input consumed" check.
+                    Jump pastBreakpoint;
+                    pastBreakpoint = branch32(NotEqual, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
+                    abortWithReason(YARRNoInputConsumed);
+                    pastBreakpoint.link(this);
+                }
 
-            for (state.resetTerm(); state.termValid(); state.nextTerm())
-                generateTerm(state);
-
-            state.checkedTotal -= countToCheck;
-        } else {
-            JumpList successes;
-            bool propogateBacktrack = false;
+                // We know that the match is non-zero, we can accept it and
+                // loop back up to the head of the subpattern.
+                jump(beginOp.m_reentry);
 
-            // Save current state's paren jump list for use with each alternative 
-            JumpList* outerJumpList = state.getJumpListToPriorParen();
+                // This is the entry point to jump to when we stop matching - we will
+                // do so once the subpattern cannot match any more.
+                op.m_reentry = label();
+                break;
+            }
 
-            for (state.resetAlternative(); state.alternativeValid(); state.nextAlternative(), state.setJumpListToPriorParen(outerJumpList)) {
-                PatternAlternative* alternative = state.alternative();
-                optimizeAlternative(alternative);
+            // OpParenthesesSubpatternBegin/End
+            //
+            // These nodes support generic subpatterns.
+            case OpParenthesesSubpatternBegin: {
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+                PatternTerm* term = op.m_term;
+                unsigned parenthesesFrameLocation = term->frameLocation;
+
+                // Upon entry to a Greedy quantified set of parenthese store the index.
+                // We'll use this for two purposes:
+                //  - To indicate which iteration we are on of mathing the remainder of
+                //    the expression after the parentheses - the first, including the
+                //    match within the parentheses, or the second having skipped over them.
+                //  - To check for empty matches, which must be rejected.
+                //
+                // At the head of a NonGreedy set of parentheses we'll immediately set the
+                // value on the stack to -1 (indicating a match skipping the subpattern),
+                // and plant a jump to the end. We'll also plant a label to backtrack to
+                // to reenter the subpattern later, with a store to set up index on the
+                // second iteration.
+                //
+                // FIXME: for capturing parens, could use the index in the capture array?
+                if (term->quantityType == QuantifierGreedy || term->quantityType == QuantifierNonGreedy) {
+                    storeToFrame(TrustedImm32(0), parenthesesFrameLocation + BackTrackInfoParentheses::matchAmountIndex());
+                    storeToFrame(TrustedImmPtr(nullptr), parenthesesFrameLocation + BackTrackInfoParentheses::parenContextHeadIndex());
+
+                    if (term->quantityType == QuantifierNonGreedy) {
+                        storeToFrame(TrustedImm32(-1), parenthesesFrameLocation + BackTrackInfoParentheses::beginIndex());
+                        op.m_jumps.append(jump());
+                    }
+                    
+                    op.m_reentry = label();
+                    RegisterID currParenContextReg = regT0;
+                    RegisterID newParenContextReg = regT1;
+
+                    loadFromFrame(parenthesesFrameLocation + BackTrackInfoParentheses::parenContextHeadIndex(), currParenContextReg);
+                    allocateParenContext(newParenContextReg);
+                    storePtr(currParenContextReg, newParenContextReg);
+                    storeToFrame(newParenContextReg, parenthesesFrameLocation + BackTrackInfoParentheses::parenContextHeadIndex());
+                    saveParenContext(newParenContextReg, regT2, term->parentheses.subpatternId, term->parentheses.lastSubpatternId, parenthesesFrameLocation);
+                    storeToFrame(index, parenthesesFrameLocation + BackTrackInfoParentheses::beginIndex());
+                }
 
-                ASSERT(alternative->m_minimumSize >= preCheckedCount);
-                int countToCheck = alternative->m_minimumSize - preCheckedCount;
-                if (countToCheck) {
-                    state.addBacktrackJump(jumpIfNoAvailableInput(countToCheck));
-                    state.checkedTotal += countToCheck;
+                // If the parenthese are capturing, store the starting index value to the
+                // captures array, offsetting as necessary.
+                //
+                // FIXME: could avoid offsetting this value in JIT code, apply
+                // offsets only afterwards, at the point the results array is
+                // being accessed.
+                if (term->capture() && compileMode == IncludeSubpatterns) {
+                    const RegisterID indexTemporary = regT0;
+                    unsigned inputOffset = (m_checkedOffset - term->inputPosition).unsafeGet();
+                    if (term->quantityType == QuantifierFixedCount)
+                        inputOffset += term->parentheses.disjunction->m_minimumSize;
+                    if (inputOffset) {
+                        move(index, indexTemporary);
+                        sub32(Imm32(inputOffset), indexTemporary);
+                        setSubpatternStart(indexTemporary, term->parentheses.subpatternId);
+                    } else
+                        setSubpatternStart(index, term->parentheses.subpatternId);
+                }
+#else // !YARR_JIT_ALL_PARENS_EXPRESSIONS
+                RELEASE_ASSERT_NOT_REACHED();
+#endif
+                break;
+            }
+            case OpParenthesesSubpatternEnd: {
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+                PatternTerm* term = op.m_term;
+                unsigned parenthesesFrameLocation = term->frameLocation;
+
+                // Runtime ASSERT to make sure that the nested alternative handled the
+                // "no input consumed" check.
+                if (!ASSERT_DISABLED && term->quantityType != QuantifierFixedCount && !term->parentheses.disjunction->m_minimumSize) {
+                    Jump pastBreakpoint;
+                    pastBreakpoint = branch32(NotEqual, index, Address(stackPointerRegister, parenthesesFrameLocation * sizeof(void*)));
+                    abortWithReason(YARRNoInputConsumed);
+                    pastBreakpoint.link(this);
                 }
 
-                for (state.resetTerm(); state.termValid(); state.nextTerm())
-                    generateTerm(state);
+                const RegisterID countTemporary = regT1;
+
+                YarrOp& beginOp = m_ops[op.m_previousOp];
+                loadFromFrame(parenthesesFrameLocation + BackTrackInfoParentheses::matchAmountIndex(), countTemporary);
+                add32(TrustedImm32(1), countTemporary);
+                storeToFrame(countTemporary, parenthesesFrameLocation + BackTrackInfoParentheses::matchAmountIndex());
+
+                // If the parenthese are capturing, store the ending index value to the
+                // captures array, offsetting as necessary.
+                //
+                // FIXME: could avoid offsetting this value in JIT code, apply
+                // offsets only afterwards, at the point the results array is
+                // being accessed.
+                if (term->capture() && compileMode == IncludeSubpatterns) {
+                    const RegisterID indexTemporary = regT0;
+                    
+                    unsigned inputOffset = (m_checkedOffset - term->inputPosition).unsafeGet();
+                    if (inputOffset) {
+                        move(index, indexTemporary);
+                        sub32(Imm32(inputOffset), indexTemporary);
+                        setSubpatternEnd(indexTemporary, term->parentheses.subpatternId);
+                    } else
+                        setSubpatternEnd(index, term->parentheses.subpatternId);
+                }
 
-                // Matched an alternative.
-                DataLabelPtr dataLabel = storeToFrameWithPatch(alternativeFrameLocation);
+                // If the parentheses are quantified Greedy then add a label to jump back
+                // to if get a failed match from after the parentheses. For NonGreedy
+                // parentheses, link the jump from before the subpattern to here.
+                if (term->quantityType == QuantifierGreedy) {
+                    if (term->quantityMaxCount != quantifyInfinite)
+                        branch32(Below, countTemporary, Imm32(term->quantityMaxCount.unsafeGet())).linkTo(beginOp.m_reentry, this);
+                    else
+                        jump(beginOp.m_reentry);
+                    
+                    op.m_reentry = label();
+                } else if (term->quantityType == QuantifierNonGreedy) {
+                    YarrOp& beginOp = m_ops[op.m_previousOp];
+                    beginOp.m_jumps.link(this);
+                }
+#else // !YARR_JIT_ALL_PARENS_EXPRESSIONS
+                RELEASE_ASSERT_NOT_REACHED();
+#endif
+                break;
+            }
 
-                if (!state.isLastAlternative() || countToCheck)
-                    successes.append(jump());
+            // OpParentheticalAssertionBegin/End
+            case OpParentheticalAssertionBegin: {
+                PatternTerm* term = op.m_term;
 
-                // Alternative did not match.
+                // Store the current index - assertions should not update index, so
+                // we will need to restore it upon a successful match.
+                unsigned parenthesesFrameLocation = term->frameLocation;
+                storeToFrame(index, parenthesesFrameLocation + BackTrackInfoParentheticalAssertion::beginIndex());
 
-                // Do we have a backtrack destination?
-                //    if so, link the data label to it.
-                state.linkDataLabelToBacktrackIfExists(this, dataLabel);
+                // Check 
+                op.m_checkAdjust = m_checkedOffset - term->inputPosition;
+                if (op.m_checkAdjust)
+                    sub32(Imm32(op.m_checkAdjust.unsafeGet()), index);
 
-                if (!state.isLastAlternative() || countToCheck)
-                    state.linkAlternativeBacktracks(this);
+                m_checkedOffset -= op.m_checkAdjust;
+                break;
+            }
+            case OpParentheticalAssertionEnd: {
+                PatternTerm* term = op.m_term;
+
+                // Restore the input index value.
+                unsigned parenthesesFrameLocation = term->frameLocation;
+                loadFromFrame(parenthesesFrameLocation + BackTrackInfoParentheticalAssertion::beginIndex(), index);
+
+                // If inverted, a successful match of the assertion must be treated
+                // as a failure, so jump to backtracking.
+                if (term->invert()) {
+                    op.m_jumps.append(jump());
+                    op.m_reentry = label();
+                }
 
-                if (countToCheck) {
-                    sub32(Imm32(countToCheck), index);
-                    state.checkedTotal -= countToCheck;
-                } else if (state.isLastAlternative())
-                    propogateBacktrack = true;
+                YarrOp& lastOp = m_ops[op.m_previousOp];
+                m_checkedOffset += lastOp.m_checkAdjust;
+                break;
             }
-            // We fall through to here when the last alternative fails.
-            // Add a backtrack out of here for the parenthese handling code to link up.
-            if (!propogateBacktrack)
-                state.addBacktrackJump(jump());
 
-            // Save address on stack for the parens code to backtrack to, to retry the
-            // next alternative.
-            state.setBackTrackStackOffset(alternativeFrameLocation * sizeof(void*));
+            case OpMatchFailed:
+                removeCallFrame();
+                generateFailReturn();
+                break;
+            }
 
-            successes.link(this);
-        }
+            ++opIndex;
+        } while (opIndex < m_ops.size());
     }
 
-    void generateParenthesesSingle(TermGenerationState& state)
+    void backtrack()
     {
-        const RegisterID indexTemporary = regT0;
-        PatternTerm& term = state.term();
-        PatternDisjunction* disjunction = term.parentheses.disjunction;
-        ASSERT(term.quantityCount == 1);
+        // Backwards generate the backtracking code.
+        size_t opIndex = m_ops.size();
+        ASSERT(opIndex);
 
-        unsigned preCheckedCount = (term.quantityType == QuantifierFixedCount) ? disjunction->m_minimumSize : 0;
-
-        unsigned parenthesesFrameLocation = term.frameLocation;
-        unsigned alternativeFrameLocation = parenthesesFrameLocation;
-        if (term.quantityType != QuantifierFixedCount)
-            alternativeFrameLocation += YarrStackSpaceForBackTrackInfoParenthesesOnce;
+        do {
+            --opIndex;
+            YarrOp& op = m_ops[opIndex];
+            switch (op.m_op) {
 
-        // optimized case - no capture & no quantifier can be handled in a light-weight manner.
-        if (!term.capture() && (term.quantityType == QuantifierFixedCount)) {
-            m_expressionState.incrementParenNestingLevel();
+            case OpTerm:
+                backtrackTerm(opIndex);
+                break;
 
-            TermGenerationState parenthesesState(disjunction, state.checkedTotal);
+            // OpBodyAlternativeBegin/Next/End
+            //
+            // For each Begin/Next node representing an alternative, we need to decide what to do
+            // in two circumstances:
+            //  - If we backtrack back into this node, from within the alternative.
+            //  - If the input check at the head of the alternative fails (if this exists).
+            //
+            // We treat these two cases differently since in the former case we have slightly
+            // more information - since we are backtracking out of a prior alternative we know
+            // that at least enough input was available to run it. For example, given the regular
+            // expression /a|b/, if we backtrack out of the first alternative (a failed pattern
+            // character match of 'a'), then we need not perform an additional input availability
+            // check before running the second alternative.
+            //
+            // Backtracking required differs for the last alternative, which in the case of the
+            // repeating set of alternatives must loop. The code generated for the last alternative
+            // will also be used to handle all input check failures from any prior alternatives -
+            // these require similar functionality, in seeking the next available alternative for
+            // which there is sufficient input.
+            //
+            // Since backtracking of all other alternatives simply requires us to link backtracks
+            // to the reentry point for the subsequent alternative, we will only be generating any
+            // code when backtracking the last alternative.
+            case OpBodyAlternativeBegin:
+            case OpBodyAlternativeNext: {
+                PatternAlternative* alternative = op.m_alternative;
+
+                if (op.m_op == OpBodyAlternativeNext) {
+                    PatternAlternative* priorAlternative = m_ops[op.m_previousOp].m_alternative;
+                    m_checkedOffset += priorAlternative->m_minimumSize;
+                }
+                m_checkedOffset -= alternative->m_minimumSize;
 
-            // Use the current state's jump list for the nested parentheses.
-            parenthesesState.setJumpListToPriorParen(state.getJumpListToPriorParen());
+                // Is this the last alternative? If not, then if we backtrack to this point we just
+                // need to jump to try to match the next alternative.
+                if (m_ops[op.m_nextOp].m_op != OpBodyAlternativeEnd) {
+                    m_backtrackingState.linkTo(m_ops[op.m_nextOp].m_reentry, this);
+                    break;
+                }
+                YarrOp& endOp = m_ops[op.m_nextOp];
 
-            generateParenthesesDisjunction(state.term(), parenthesesState, alternativeFrameLocation);
-            // this expects that any backtracks back out of the parentheses will be in the
-            // parenthesesState's m_backTrackJumps vector, and that if they need backtracking
-            // they will have set an entry point on the parenthesesState's m_backtrackLabel.
-            BacktrackDestination& parenthesesBacktrack = parenthesesState.getBacktrackDestination();
-            BacktrackDestination& stateBacktrack = state.getBacktrackDestination();
+                YarrOp* beginOp = &op;
+                while (beginOp->m_op != OpBodyAlternativeBegin) {
+                    ASSERT(beginOp->m_op == OpBodyAlternativeNext);
+                    beginOp = &m_ops[beginOp->m_previousOp];
+                }
 
-            state.propagateBacktrackingFrom(this, parenthesesBacktrack);
-            stateBacktrack.propagateBacktrackToLabel(parenthesesBacktrack);
+                bool onceThrough = endOp.m_nextOp == notFound;
+                
+                JumpList lastStickyAlternativeFailures;
+
+                // First, generate code to handle cases where we backtrack out of an attempted match
+                // of the last alternative. If this is a 'once through' set of alternatives then we
+                // have nothing to do - link this straight through to the End.
+                if (onceThrough)
+                    m_backtrackingState.linkTo(endOp.m_reentry, this);
+                else {
+                    // If we don't need to move the input poistion, and the pattern has a fixed size
+                    // (in which case we omit the store of the start index until the pattern has matched)
+                    // then we can just link the backtrack out of the last alternative straight to the
+                    // head of the first alternative.
+                    if (m_pattern.m_body->m_hasFixedSize
+                        && (alternative->m_minimumSize > beginOp->m_alternative->m_minimumSize)
+                        && (alternative->m_minimumSize - beginOp->m_alternative->m_minimumSize == 1))
+                        m_backtrackingState.linkTo(beginOp->m_reentry, this);
+                    else if (m_pattern.sticky() && m_ops[op.m_nextOp].m_op == OpBodyAlternativeEnd) {
+                        // It is a sticky pattern and the last alternative failed, jump to the end.
+                        m_backtrackingState.takeBacktracksToJumpList(lastStickyAlternativeFailures, this);
+                    } else {
+                        // We need to generate a trampoline of code to execute before looping back
+                        // around to the first alternative.
+                        m_backtrackingState.link(this);
+
+                        // No need to advance and retry for a sticky pattern.
+                        if (!m_pattern.sticky()) {
+                            // If the pattern size is not fixed, then store the start index for use if we match.
+                            if (!m_pattern.m_body->m_hasFixedSize) {
+                                if (alternative->m_minimumSize == 1)
+                                    setMatchStart(index);
+                                else {
+                                    move(index, regT0);
+                                    if (alternative->m_minimumSize)
+                                        sub32(Imm32(alternative->m_minimumSize - 1), regT0);
+                                    else
+                                        add32(TrustedImm32(1), regT0);
+                                    setMatchStart(regT0);
+                                }
+                            }
+
+                            // Generate code to loop. Check whether the last alternative is longer than the
+                            // first (e.g. /a|xy/ or /a|xyz/).
+                            if (alternative->m_minimumSize > beginOp->m_alternative->m_minimumSize) {
+                                // We want to loop, and increment input position. If the delta is 1, it is
+                                // already correctly incremented, if more than one then decrement as appropriate.
+                                unsigned delta = alternative->m_minimumSize - beginOp->m_alternative->m_minimumSize;
+                                ASSERT(delta);
+                                if (delta != 1)
+                                    sub32(Imm32(delta - 1), index);
+                                jump(beginOp->m_reentry);
+                            } else {
+                                // If the first alternative has minimum size 0xFFFFFFFFu, then there cannot
+                                // be sufficent input available to handle this, so just fall through.
+                                unsigned delta = beginOp->m_alternative->m_minimumSize - alternative->m_minimumSize;
+                                if (delta != 0xFFFFFFFFu) {
+                                    // We need to check input because we are incrementing the input.
+                                    add32(Imm32(delta + 1), index);
+                                    checkInput().linkTo(beginOp->m_reentry, this);
+                                }
+                            }
+                        }
+                    }
+                }
 
-            state.setJumpListToPriorParen(parenthesesState.getJumpListToPriorParen());
+                // We can reach this point in the code in two ways:
+                //  - Fallthrough from the code above (a repeating alternative backtracked out of its
+                //    last alternative, and did not have sufficent input to run the first).
+                //  - We will loop back up to the following label when a repeating alternative loops,
+                //    following a failed input check.
+                //
+                // Either way, we have just failed the input check for the first alternative.
+                Label firstInputCheckFailed(this);
+
+                // Generate code to handle input check failures from alternatives except the last.
+                // prevOp is the alternative we're handling a bail out from (initially Begin), and
+                // nextOp is the alternative we will be attempting to reenter into.
+                // 
+                // We will link input check failures from the forwards matching path back to the code
+                // that can handle them.
+                YarrOp* prevOp = beginOp;
+                YarrOp* nextOp = &m_ops[beginOp->m_nextOp];
+                while (nextOp->m_op != OpBodyAlternativeEnd) {
+                    prevOp->m_jumps.link(this);
+
+                    // We only get here if an input check fails, it is only worth checking again
+                    // if the next alternative has a minimum size less than the last.
+                    if (prevOp->m_alternative->m_minimumSize > nextOp->m_alternative->m_minimumSize) {
+                        // FIXME: if we added an extra label to YarrOp, we could avoid needing to
+                        // subtract delta back out, and reduce this code. Should performance test
+                        // the benefit of this.
+                        unsigned delta = prevOp->m_alternative->m_minimumSize - nextOp->m_alternative->m_minimumSize;
+                        sub32(Imm32(delta), index);
+                        Jump fail = jumpIfNoAvailableInput();
+                        add32(Imm32(delta), index);
+                        jump(nextOp->m_reentry);
+                        fail.link(this);
+                    } else if (prevOp->m_alternative->m_minimumSize < nextOp->m_alternative->m_minimumSize)
+                        add32(Imm32(nextOp->m_alternative->m_minimumSize - prevOp->m_alternative->m_minimumSize), index);
+                    prevOp = nextOp;
+                    nextOp = &m_ops[nextOp->m_nextOp];
+                }
 
-            m_expressionState.decrementParenNestingLevel();
-        } else {
-            Jump nonGreedySkipParentheses;
-            Label nonGreedyTryParentheses;
-            if (term.quantityType == QuantifierGreedy)
-                storeToFrame(index, parenthesesFrameLocation);
-            else if (term.quantityType == QuantifierNonGreedy) {
-                storeToFrame(TrustedImm32(-1), parenthesesFrameLocation);
-                nonGreedySkipParentheses = jump();
-                nonGreedyTryParentheses = label();
-                storeToFrame(index, parenthesesFrameLocation);
-            }
+                // We fall through to here if there is insufficient input to run the last alternative.
 
-            // store the match start index
-            if (term.capture()) {
-                int inputOffset = state.inputOffset() - preCheckedCount;
-                if (inputOffset) {
-                    move(index, indexTemporary);
-                    add32(Imm32(inputOffset), indexTemporary);
-                    store32(indexTemporary, Address(output, (term.parentheses.subpatternId << 1) * sizeof(int)));
-                } else
-                    store32(index, Address(output, (term.parentheses.subpatternId << 1) * sizeof(int)));
-            }
+                // If there is insufficient input to run the last alternative, then for 'once through'
+                // alternatives we are done - just jump back up into the forwards matching path at the End.
+                if (onceThrough) {
+                    op.m_jumps.linkTo(endOp.m_reentry, this);
+                    jump(endOp.m_reentry);
+                    break;
+                }
 
-            ParenthesesTail* parenthesesTail = m_expressionState.addParenthesesTail(term, state.getJumpListToPriorParen());
+                // For repeating alternatives, link any input check failure from the last alternative to
+                // this point.
+                op.m_jumps.link(this);
 
-            m_expressionState.incrementParenNestingLevel();
+                bool needsToUpdateMatchStart = !m_pattern.m_body->m_hasFixedSize;
 
-            TermGenerationState parenthesesState(disjunction, state.checkedTotal);
+                // Check for cases where input position is already incremented by 1 for the last
+                // alternative (this is particularly useful where the minimum size of the body
+                // disjunction is 0, e.g. /a*|b/).
+                if (needsToUpdateMatchStart && alternative->m_minimumSize == 1) {
+                    // index is already incremented by 1, so just store it now!
+                    setMatchStart(index);
+                    needsToUpdateMatchStart = false;
+                }
 
-            // Save the parenthesesTail for backtracking from nested parens to this one.
-            parenthesesState.setJumpListToPriorParen(&parenthesesTail->m_withinBacktrackJumps);
+                if (!m_pattern.sticky()) {
+                    // Check whether there is sufficient input to loop. Increment the input position by
+                    // one, and check. Also add in the minimum disjunction size before checking - there
+                    // is no point in looping if we're just going to fail all the input checks around
+                    // the next iteration.
+                    ASSERT(alternative->m_minimumSize >= m_pattern.m_body->m_minimumSize);
+                    if (alternative->m_minimumSize == m_pattern.m_body->m_minimumSize) {
+                        // If the last alternative had the same minimum size as the disjunction,
+                        // just simply increment input pos by 1, no adjustment based on minimum size.
+                        add32(TrustedImm32(1), index);
+                    } else {
+                        // If the minumum for the last alternative was one greater than than that
+                        // for the disjunction, we're already progressed by 1, nothing to do!
+                        unsigned delta = (alternative->m_minimumSize - m_pattern.m_body->m_minimumSize) - 1;
+                        if (delta)
+                            sub32(Imm32(delta), index);
+                    }
+                    Jump matchFailed = jumpIfNoAvailableInput();
+
+                    if (needsToUpdateMatchStart) {
+                        if (!m_pattern.m_body->m_minimumSize)
+                            setMatchStart(index);
+                        else {
+                            move(index, regT0);
+                            sub32(Imm32(m_pattern.m_body->m_minimumSize), regT0);
+                            setMatchStart(regT0);
+                        }
+                    }
 
-            // generate the body of the parentheses
-            generateParenthesesDisjunction(state.term(), parenthesesState, alternativeFrameLocation);
+                    // Calculate how much more input the first alternative requires than the minimum
+                    // for the body as a whole. If no more is needed then we dont need an additional
+                    // input check here - jump straight back up to the start of the first alternative.
+                    if (beginOp->m_alternative->m_minimumSize == m_pattern.m_body->m_minimumSize)
+                        jump(beginOp->m_reentry);
+                    else {
+                        if (beginOp->m_alternative->m_minimumSize > m_pattern.m_body->m_minimumSize)
+                            add32(Imm32(beginOp->m_alternative->m_minimumSize - m_pattern.m_body->m_minimumSize), index);
+                        else
+                            sub32(Imm32(m_pattern.m_body->m_minimumSize - beginOp->m_alternative->m_minimumSize), index);
+                        checkInput().linkTo(beginOp->m_reentry, this);
+                        jump(firstInputCheckFailed);
+                    }
 
-            // For non-fixed counts, backtrack if we didn't match anything.
-            if (term.quantityType != QuantifierFixedCount)
-                parenthesesTail->addAfterParenJump(branch32(Equal, index, Address(stackPointerRegister, (parenthesesFrameLocation * sizeof(void*)))));
+                    // We jump to here if we iterate to the point that there is insufficient input to
+                    // run any matches, and need to return a failure state from JIT code.
+                    matchFailed.link(this);
+                }
 
-            // store the match end index
-            if (term.capture()) {
-                int inputOffset = state.inputOffset();
-                if (inputOffset) {
-                    move(index, indexTemporary);
-                    add32(Imm32(state.inputOffset()), indexTemporary);
-                    store32(indexTemporary, Address(output, ((term.parentheses.subpatternId << 1) + 1) * sizeof(int)));
-                } else
-                    store32(index, Address(output, ((term.parentheses.subpatternId << 1) + 1) * sizeof(int)));
+                lastStickyAlternativeFailures.link(this);
+                removeCallFrame();
+                generateFailReturn();
+                break;
             }
+            case OpBodyAlternativeEnd: {
+                // We should never backtrack back into a body disjunction.
+                ASSERT(m_backtrackingState.isEmpty());
 
-            m_expressionState.decrementParenNestingLevel();
-
-            parenthesesTail->processBacktracks(this, state, parenthesesState, nonGreedyTryParentheses, label());
-
-            state.setJumpListToPriorParen(&parenthesesTail->m_afterBacktrackJumps);
-            
-            parenthesesState.getBacktrackDestination().clear();
-
-            if (term.quantityType == QuantifierNonGreedy)
-                nonGreedySkipParentheses.link(this);
-        }
-    }
-
-    void generateParenthesesGreedyNoBacktrack(TermGenerationState& state)
-    {
-        PatternTerm& parenthesesTerm = state.term();
-        PatternDisjunction* disjunction = parenthesesTerm.parentheses.disjunction;
-        ASSERT(parenthesesTerm.type == PatternTerm::TypeParenthesesSubpattern);
-        ASSERT(parenthesesTerm.quantityCount != 1); // Handled by generateParenthesesSingle.
+                PatternAlternative* priorAlternative = m_ops[op.m_previousOp].m_alternative;
+                m_checkedOffset += priorAlternative->m_minimumSize;
+                break;
+            }
 
-        TermGenerationState parenthesesState(disjunction, state.checkedTotal);
+            // OpSimpleNestedAlternativeBegin/Next/End
+            // OpNestedAlternativeBegin/Next/End
+            //
+            // Generate code for when we backtrack back out of an alternative into
+            // a Begin or Next node, or when the entry input count check fails. If
+            // there are more alternatives we need to jump to the next alternative,
+            // if not we backtrack back out of the current set of parentheses.
+            //
+            // In the case of non-simple nested assertions we need to also link the
+            // 'return address' appropriately to backtrack back out into the correct
+            // alternative.
+            case OpSimpleNestedAlternativeBegin:
+            case OpSimpleNestedAlternativeNext:
+            case OpNestedAlternativeBegin:
+            case OpNestedAlternativeNext: {
+                YarrOp& nextOp = m_ops[op.m_nextOp];
+                bool isBegin = op.m_previousOp == notFound;
+                bool isLastAlternative = nextOp.m_nextOp == notFound;
+                ASSERT(isBegin == (op.m_op == OpSimpleNestedAlternativeBegin || op.m_op == OpNestedAlternativeBegin));
+                ASSERT(isLastAlternative == (nextOp.m_op == OpSimpleNestedAlternativeEnd || nextOp.m_op == OpNestedAlternativeEnd));
+
+                // Treat an input check failure the same as a failed match.
+                m_backtrackingState.append(op.m_jumps);
+
+                // Set the backtracks to jump to the appropriate place. We may need
+                // to link the backtracks in one of three different way depending on
+                // the type of alternative we are dealing with:
+                //  - A single alternative, with no simplings.
+                //  - The last alternative of a set of two or more.
+                //  - An alternative other than the last of a set of two or more.
+                //
+                // In the case of a single alternative on its own, we don't need to
+                // jump anywhere - if the alternative fails to match we can just
+                // continue to backtrack out of the parentheses without jumping.
+                //
+                // In the case of the last alternative in a set of more than one, we
+                // need to jump to return back out to the beginning. We'll do so by
+                // adding a jump to the End node's m_jumps list, and linking this
+                // when we come to generate the Begin node. For alternatives other
+                // than the last, we need to jump to the next alternative.
+                //
+                // If the alternative had adjusted the input position we must link
+                // backtracking to here, correct, and then jump on. If not we can
+                // link the backtracks directly to their destination.
+                if (op.m_checkAdjust) {
+                    // Handle the cases where we need to link the backtracks here.
+                    m_backtrackingState.link(this);
+                    sub32(Imm32(op.m_checkAdjust.unsafeGet()), index);
+                    if (!isLastAlternative) {
+                        // An alternative that is not the last should jump to its successor.
+                        jump(nextOp.m_reentry);
+                    } else if (!isBegin) {
+                        // The last of more than one alternatives must jump back to the beginning.
+                        nextOp.m_jumps.append(jump());
+                    } else {
+                        // A single alternative on its own can fall through.
+                        m_backtrackingState.fallthrough();
+                    }
+                } else {
+                    // Handle the cases where we can link the backtracks directly to their destinations.
+                    if (!isLastAlternative) {
+                        // An alternative that is not the last should jump to its successor.
+                        m_backtrackingState.linkTo(nextOp.m_reentry, this);
+                    } else if (!isBegin) {
+                        // The last of more than one alternatives must jump back to the beginning.
+                        m_backtrackingState.takeBacktracksToJumpList(nextOp.m_jumps, this);
+                    }
+                    // In the case of a single alternative on its own do nothing - it can fall through.
+                }
 
-        Label matchAgain(this);
+                // If there is a backtrack jump from a zero length match link it here.
+                if (op.m_zeroLengthMatch.isSet())
+                    m_backtrackingState.append(op.m_zeroLengthMatch);
+
+                // At this point we've handled the backtracking back into this node.
+                // Now link any backtracks that need to jump to here.
+
+                // For non-simple alternatives, link the alternative's 'return address'
+                // so that we backtrack back out into the previous alternative.
+                if (op.m_op == OpNestedAlternativeNext)
+                    m_backtrackingState.append(op.m_returnAddress);
+
+                // If there is more than one alternative, then the last alternative will
+                // have planted a jump to be linked to the end. This jump was added to the
+                // End node's m_jumps list. If we are back at the beginning, link it here.
+                if (isBegin) {
+                    YarrOp* endOp = &m_ops[op.m_nextOp];
+                    while (endOp->m_nextOp != notFound) {
+                        ASSERT(endOp->m_op == OpSimpleNestedAlternativeNext || endOp->m_op == OpNestedAlternativeNext);
+                        endOp = &m_ops[endOp->m_nextOp];
+                    }
+                    ASSERT(endOp->m_op == OpSimpleNestedAlternativeEnd || endOp->m_op == OpNestedAlternativeEnd);
+                    m_backtrackingState.append(endOp->m_jumps);
+                }
 
-        storeToFrame(index, parenthesesTerm.frameLocation); // Save the current index to check for zero len matches later.
+                if (!isBegin) {
+                    YarrOp& lastOp = m_ops[op.m_previousOp];
+                    m_checkedOffset += lastOp.m_checkAdjust;
+                }
+                m_checkedOffset -= op.m_checkAdjust;
+                break;
+            }
+            case OpSimpleNestedAlternativeEnd:
+            case OpNestedAlternativeEnd: {
+                PatternTerm* term = op.m_term;
+
+                // If there is a backtrack jump from a zero length match link it here.
+                if (op.m_zeroLengthMatch.isSet())
+                    m_backtrackingState.append(op.m_zeroLengthMatch);
+
+                // If we backtrack into the end of a simple subpattern do nothing;
+                // just continue through into the last alternative. If we backtrack
+                // into the end of a non-simple set of alterntives we need to jump
+                // to the backtracking return address set up during generation.
+                if (op.m_op == OpNestedAlternativeEnd) {
+                    m_backtrackingState.link(this);
+
+                    // Plant a jump to the return address.
+                    unsigned parenthesesFrameLocation = term->frameLocation;
+                    loadFromFrameAndJump(parenthesesFrameLocation + BackTrackInfoParentheses::returnAddressIndex());
+
+                    // Link the DataLabelPtr associated with the end of the last
+                    // alternative to this point.
+                    m_backtrackingState.append(op.m_returnAddress);
+                }
 
-        for (parenthesesState.resetAlternative(); parenthesesState.alternativeValid(); parenthesesState.nextAlternative()) {
+                YarrOp& lastOp = m_ops[op.m_previousOp];
+                m_checkedOffset += lastOp.m_checkAdjust;
+                break;
+            }
 
-            PatternAlternative* alternative = parenthesesState.alternative();
-            optimizeAlternative(alternative);
+            // OpParenthesesSubpatternOnceBegin/End
+            //
+            // When we are backtracking back out of a capturing subpattern we need
+            // to clear the start index in the matches output array, to record that
+            // this subpattern has not been captured.
+            //
+            // When backtracking back out of a Greedy quantified subpattern we need
+            // to catch this, and try running the remainder of the alternative after
+            // the subpattern again, skipping the parentheses.
+            //
+            // Upon backtracking back into a quantified set of parentheses we need to
+            // check whether we were currently skipping the subpattern. If not, we
+            // can backtrack into them, if we were we need to either backtrack back
+            // out of the start of the parentheses, or jump back to the forwards
+            // matching start, depending of whether the match is Greedy or NonGreedy.
+            case OpParenthesesSubpatternOnceBegin: {
+                PatternTerm* term = op.m_term;
+                ASSERT(term->quantityMaxCount == 1);
+
+                // We only need to backtrack to this point if capturing or greedy.
+                if ((term->capture() && compileMode == IncludeSubpatterns) || term->quantityType == QuantifierGreedy) {
+                    m_backtrackingState.link(this);
+
+                    // If capturing, clear the capture (we only need to reset start).
+                    if (term->capture() && compileMode == IncludeSubpatterns)
+                        clearSubpatternStart(term->parentheses.subpatternId);
+
+                    // If Greedy, jump to the end.
+                    if (term->quantityType == QuantifierGreedy) {
+                        // Clear the flag in the stackframe indicating we ran through the subpattern.
+                        unsigned parenthesesFrameLocation = term->frameLocation;
+                        storeToFrame(TrustedImm32(-1), parenthesesFrameLocation + BackTrackInfoParenthesesOnce::beginIndex());
+                        // Jump to after the parentheses, skipping the subpattern.
+                        jump(m_ops[op.m_nextOp].m_reentry);
+                        // A backtrack from after the parentheses, when skipping the subpattern,
+                        // will jump back to here.
+                        op.m_jumps.link(this);
+                    }
 
-            int countToCheck = alternative->m_minimumSize;
-            if (countToCheck) {
-                parenthesesState.addBacktrackJump(jumpIfNoAvailableInput(countToCheck));
-                parenthesesState.checkedTotal += countToCheck;
+                    m_backtrackingState.fallthrough();
+                }
+                break;
             }
+            case OpParenthesesSubpatternOnceEnd: {
+                PatternTerm* term = op.m_term;
+
+                if (term->quantityType != QuantifierFixedCount) {
+                    m_backtrackingState.link(this);
+
+                    // Check whether we should backtrack back into the parentheses, or if we
+                    // are currently in a state where we had skipped over the subpattern
+                    // (in which case the flag value on the stack will be -1).
+                    unsigned parenthesesFrameLocation = term->frameLocation;
+                    Jump hadSkipped = branch32(Equal, Address(stackPointerRegister, (parenthesesFrameLocation + BackTrackInfoParenthesesOnce::beginIndex()) * sizeof(void*)), TrustedImm32(-1));
+
+                    if (term->quantityType == QuantifierGreedy) {
+                        // For Greedy parentheses, we skip after having already tried going
+                        // through the subpattern, so if we get here we're done.
+                        YarrOp& beginOp = m_ops[op.m_previousOp];
+                        beginOp.m_jumps.append(hadSkipped);
+                    } else {
+                        // For NonGreedy parentheses, we try skipping the subpattern first,
+                        // so if we get here we need to try running through the subpattern
+                        // next. Jump back to the start of the parentheses in the forwards
+                        // matching path.
+                        ASSERT(term->quantityType == QuantifierNonGreedy);
+                        YarrOp& beginOp = m_ops[op.m_previousOp];
+                        hadSkipped.linkTo(beginOp.m_reentry, this);
+                    }
 
-            for (parenthesesState.resetTerm(); parenthesesState.termValid(); parenthesesState.nextTerm())
-                generateTerm(parenthesesState);
+                    m_backtrackingState.fallthrough();
+                }
 
-            // If we get here, we matched! If the index advanced then try to match more since limit isn't supported yet.
-            branch32(NotEqual, index, Address(stackPointerRegister, (parenthesesTerm.frameLocation * sizeof(void*))), matchAgain);
+                m_backtrackingState.append(op.m_jumps);
+                break;
+            }
 
-            // If we get here we matched, but we matched "" - cannot accept this alternative as is, so either backtrack,
-            // or fall through to try the next alternative if no backtrack is available.
-            parenthesesState.plantJumpToBacktrackIfExists(this);
+            // OpParenthesesSubpatternTerminalBegin/End
+            //
+            // Terminal subpatterns will always match - there is nothing after them to
+            // force a backtrack, and they have a minimum count of 0, and as such will
+            // always produce an acceptable result.
+            case OpParenthesesSubpatternTerminalBegin: {
+                // We will backtrack to this point once the subpattern cannot match any
+                // more. Since no match is accepted as a successful match (we are Greedy
+                // quantified with a minimum of zero) jump back to the forwards matching
+                // path at the end.
+                YarrOp& endOp = m_ops[op.m_nextOp];
+                m_backtrackingState.linkTo(endOp.m_reentry, this);
+                break;
+            }
+            case OpParenthesesSubpatternTerminalEnd:
+                // We should never be backtracking to here (hence the 'terminal' in the name).
+                ASSERT(m_backtrackingState.isEmpty());
+                m_backtrackingState.append(op.m_jumps);
+                break;
 
-            parenthesesState.linkAlternativeBacktracks(this);
+            // OpParenthesesSubpatternBegin/End
+            //
+            // When we are backtracking back out of a capturing subpattern we need
+            // to clear the start index in the matches output array, to record that
+            // this subpattern has not been captured.
+            //
+            // When backtracking back out of a Greedy quantified subpattern we need
+            // to catch this, and try running the remainder of the alternative after
+            // the subpattern again, skipping the parentheses.
+            //
+            // Upon backtracking back into a quantified set of parentheses we need to
+            // check whether we were currently skipping the subpattern. If not, we
+            // can backtrack into them, if we were we need to either backtrack back
+            // out of the start of the parentheses, or jump back to the forwards
+            // matching start, depending of whether the match is Greedy or NonGreedy.
+            case OpParenthesesSubpatternBegin: {
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+                PatternTerm* term = op.m_term;
+                unsigned parenthesesFrameLocation = term->frameLocation;
 
-            // We get here if the alternative fails to match - fall through to the next iteration, or out of the loop.
+                if (term->quantityType != QuantifierFixedCount) {
+                    m_backtrackingState.link(this);
 
-            if (countToCheck) {
-                sub32(Imm32(countToCheck), index);
-                parenthesesState.checkedTotal -= countToCheck;
-            }
-        }
+                    if (term->quantityType == QuantifierGreedy) {
+                        RegisterID currParenContextReg = regT0;
+                        RegisterID newParenContextReg = regT1;
 
-        // If the last alternative falls through to here, we have a failed match...
-        // Which means that we match whatever we have matched up to this point (even if nothing).
-    }
+                        loadFromFrame(parenthesesFrameLocation + BackTrackInfoParentheses::parenContextHeadIndex(), currParenContextReg);
 
-    void generateParentheticalAssertion(TermGenerationState& state)
-    {
-        PatternTerm& term = state.term();
-        PatternDisjunction* disjunction = term.parentheses.disjunction;
-        ASSERT(term.quantityCount == 1);
-        ASSERT(term.quantityType == QuantifierFixedCount);
+                        restoreParenContext(currParenContextReg, regT2, term->parentheses.subpatternId, term->parentheses.lastSubpatternId, parenthesesFrameLocation);
 
-        unsigned parenthesesFrameLocation = term.frameLocation;
-        unsigned alternativeFrameLocation = parenthesesFrameLocation + YarrStackSpaceForBackTrackInfoParentheticalAssertion;
+                        freeParenContext(currParenContextReg, newParenContextReg);
+                        storeToFrame(newParenContextReg, parenthesesFrameLocation + BackTrackInfoParentheses::parenContextHeadIndex());
+                        const RegisterID countTemporary = regT0;
+                        loadFromFrame(parenthesesFrameLocation + BackTrackInfoParentheses::matchAmountIndex(), countTemporary);
+                        Jump zeroLengthMatch = branchTest32(Zero, countTemporary);
 
-        int countCheckedAfterAssertion = state.checkedTotal - term.inputPosition;
+                        sub32(TrustedImm32(1), countTemporary);
+                        storeToFrame(countTemporary, parenthesesFrameLocation + BackTrackInfoParentheses::matchAmountIndex());
 
-        if (term.invert()) {
-            // Inverted case
-            storeToFrame(index, parenthesesFrameLocation);
+                        jump(m_ops[op.m_nextOp].m_reentry);
 
-            state.checkedTotal -= countCheckedAfterAssertion;
-            if (countCheckedAfterAssertion)
-                sub32(Imm32(countCheckedAfterAssertion), index);
+                        zeroLengthMatch.link(this);
 
-            TermGenerationState parenthesesState(disjunction, state.checkedTotal);
-            generateParenthesesDisjunction(state.term(), parenthesesState, alternativeFrameLocation);
-            // Success! - which means - Fail!
-            loadFromFrame(parenthesesFrameLocation, index);
-            state.jumpToBacktrack(this);
+                        // Clear the flag in the stackframe indicating we didn't run through the subpattern.
+                        storeToFrame(TrustedImm32(-1), parenthesesFrameLocation + BackTrackInfoParentheses::beginIndex());
 
-            // And fail means success.
-            parenthesesState.linkAlternativeBacktracks(this);
+                        jump(m_ops[op.m_nextOp].m_reentry);
+                    }
 
-            loadFromFrame(parenthesesFrameLocation, index);
+                    // If Greedy, jump to the end.
+                    if (term->quantityType == QuantifierGreedy) {
+                        // A backtrack from after the parentheses, when skipping the subpattern,
+                        // will jump back to here.
+                        op.m_jumps.link(this);
+                    }
 
-            state.checkedTotal += countCheckedAfterAssertion;
-        } else {
-            // Normal case
-            storeToFrame(index, parenthesesFrameLocation);
+                    m_backtrackingState.fallthrough();
+                }
+#else // !YARR_JIT_ALL_PARENS_EXPRESSIONS
+                RELEASE_ASSERT_NOT_REACHED();
+#endif
+                break;
+            }
+            case OpParenthesesSubpatternEnd: {
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+                PatternTerm* term = op.m_term;
+
+                if (term->quantityType != QuantifierFixedCount) {
+                    m_backtrackingState.link(this);
+
+                    // Check whether we should backtrack back into the parentheses, or if we
+                    // are currently in a state where we had skipped over the subpattern
+                    // (in which case the flag value on the stack will be -1).
+                    unsigned parenthesesFrameLocation = term->frameLocation;
+                    Jump hadSkipped = branch32(Equal, Address(stackPointerRegister, (parenthesesFrameLocation  + BackTrackInfoParentheses::beginIndex()) * sizeof(void*)), TrustedImm32(-1));
+
+                    if (term->quantityType == QuantifierGreedy) {
+                        // For Greedy parentheses, we skip after having already tried going
+                        // through the subpattern, so if we get here we're done.
+                        YarrOp& beginOp = m_ops[op.m_previousOp];
+                        beginOp.m_jumps.append(hadSkipped);
+                    } else {
+                        // For NonGreedy parentheses, we try skipping the subpattern first,
+                        // so if we get here we need to try running through the subpattern
+                        // next. Jump back to the start of the parentheses in the forwards
+                        // matching path.
+                        ASSERT(term->quantityType == QuantifierNonGreedy);
+                        YarrOp& beginOp = m_ops[op.m_previousOp];
+                        hadSkipped.linkTo(beginOp.m_reentry, this);
+                    }
 
-            state.checkedTotal -= countCheckedAfterAssertion;
-            if (countCheckedAfterAssertion)
-                sub32(Imm32(countCheckedAfterAssertion), index);
+                    m_backtrackingState.fallthrough();
+                }
 
-            TermGenerationState parenthesesState(disjunction, state.checkedTotal);
-            generateParenthesesDisjunction(state.term(), parenthesesState, alternativeFrameLocation);
-            // Success! - which means - Success!
-            loadFromFrame(parenthesesFrameLocation, index);
-            Jump success = jump();
+                m_backtrackingState.append(op.m_jumps);
+#else // !YARR_JIT_ALL_PARENS_EXPRESSIONS
+                RELEASE_ASSERT_NOT_REACHED();
+#endif
+                break;
+            }
 
-            parenthesesState.linkAlternativeBacktracks(this);
+            // OpParentheticalAssertionBegin/End
+            case OpParentheticalAssertionBegin: {
+                PatternTerm* term = op.m_term;
+                YarrOp& endOp = m_ops[op.m_nextOp];
 
-            loadFromFrame(parenthesesFrameLocation, index);
-            state.jumpToBacktrack(this);
+                // We need to handle the backtracks upon backtracking back out
+                // of a parenthetical assertion if either we need to correct
+                // the input index, or the assertion was inverted.
+                if (op.m_checkAdjust || term->invert()) {
+                     m_backtrackingState.link(this);
 
-            success.link(this);
+                    if (op.m_checkAdjust)
+                        add32(Imm32(op.m_checkAdjust.unsafeGet()), index);
 
-            state.checkedTotal += countCheckedAfterAssertion;
-        }
-    }
+                    // In an inverted assertion failure to match the subpattern
+                    // is treated as a successful match - jump to the end of the
+                    // subpattern. We already have adjusted the input position
+                    // back to that before the assertion, which is correct.
+                    if (term->invert())
+                        jump(endOp.m_reentry);
 
-    void generateTerm(TermGenerationState& state)
-    {
-        PatternTerm& term = state.term();
+                    m_backtrackingState.fallthrough();
+                }
 
-        switch (term.type) {
-        case PatternTerm::TypeAssertionBOL:
-            generateAssertionBOL(state);
-            break;
+                // The End node's jump list will contain any backtracks into
+                // the end of the assertion. Also, if inverted, we will have
+                // added the failure caused by a successful match to this.
+                m_backtrackingState.append(endOp.m_jumps);
 
-        case PatternTerm::TypeAssertionEOL:
-            generateAssertionEOL(state);
-            break;
+                m_checkedOffset += op.m_checkAdjust;
+                break;
+            }
+            case OpParentheticalAssertionEnd: {
+                // FIXME: We should really be clearing any nested subpattern
+                // matches on bailing out from after the pattern. Firefox has
+                // this bug too (presumably because they use YARR!)
 
-        case PatternTerm::TypeAssertionWordBoundary:
-            generateAssertionWordBoundary(state);
-            break;
+                // Never backtrack into an assertion; later failures bail to before the begin.
+                m_backtrackingState.takeBacktracksToJumpList(op.m_jumps, this);
 
-        case PatternTerm::TypePatternCharacter:
-            switch (term.quantityType) {
-            case QuantifierFixedCount:
-                if (term.quantityCount == 1) {
-                    if (state.isSinglePatternCharacterLookaheadTerm() && (state.lookaheadTerm().inputPosition == (term.inputPosition + 1))) {
-                        generatePatternCharacterPair(state);
-                        state.nextTerm();
-                    } else
-                        generatePatternCharacterSingle(state);
-                } else
-                    generatePatternCharacterFixed(state);
-                break;
-            case QuantifierGreedy:
-                generatePatternCharacterGreedy(state);
-                break;
-            case QuantifierNonGreedy:
-                generatePatternCharacterNonGreedy(state);
+                YarrOp& lastOp = m_ops[op.m_previousOp];
+                m_checkedOffset -= lastOp.m_checkAdjust;
                 break;
             }
-            break;
 
-        case PatternTerm::TypeCharacterClass:
-            switch (term.quantityType) {
-            case QuantifierFixedCount:
-                if (term.quantityCount == 1)
-                    generateCharacterClassSingle(state);
-                else
-                    generateCharacterClassFixed(state);
-                break;
-            case QuantifierGreedy:
-                generateCharacterClassGreedy(state);
-                break;
-            case QuantifierNonGreedy:
-                generateCharacterClassNonGreedy(state);
+            case OpMatchFailed:
                 break;
             }
-            break;
-
-        case PatternTerm::TypeBackReference:
-            m_shouldFallBack = true;
-            break;
 
-        case PatternTerm::TypeForwardReference:
-            break;
-
-        case PatternTerm::TypeParenthesesSubpattern:
-            if (term.quantityCount == 1 && !term.parentheses.isCopy)
-                generateParenthesesSingle(state);
-            else if (term.parentheses.isTerminal)
-                generateParenthesesGreedyNoBacktrack(state);
-            else
-                m_shouldFallBack = true;
-            break;
-
-        case PatternTerm::TypeParentheticalAssertion:
-            generateParentheticalAssertion(state);
-            break;
-        }
+        } while (opIndex);
     }
 
-    void generateDisjunction(PatternDisjunction* disjunction)
+    // Compilation methods:
+    // ====================
+
+    // opCompileParenthesesSubpattern
+    // Emits ops for a subpattern (set of parentheses). These consist
+    // of a set of alternatives wrapped in an outer set of nodes for
+    // the parentheses.
+    // Supported types of parentheses are 'Once' (quantityMaxCount == 1),
+    // 'Terminal' (non-capturing parentheses quantified as greedy
+    // and infinite), and 0 based greedy quantified parentheses.
+    // Alternatives will use the 'Simple' set of ops if either the
+    // subpattern is terminal (in which case we will never need to
+    // backtrack), or if the subpattern only contains one alternative.
+    void opCompileParenthesesSubpattern(PatternTerm* term)
     {
-        TermGenerationState state(disjunction, 0);
-        state.resetAlternative();
+        YarrOpCode parenthesesBeginOpCode;
+        YarrOpCode parenthesesEndOpCode;
+        YarrOpCode alternativeBeginOpCode = OpSimpleNestedAlternativeBegin;
+        YarrOpCode alternativeNextOpCode = OpSimpleNestedAlternativeNext;
+        YarrOpCode alternativeEndOpCode = OpSimpleNestedAlternativeEnd;
+
+        // We can currently only compile quantity 1 subpatterns that are
+        // not copies. We generate a copy in the case of a range quantifier,
+        // e.g. /(?:x){3,9}/, or /(?:x)+/ (These are effectively expanded to
+        // /(?:x){3,3}(?:x){0,6}/ and /(?:x)(?:x)*/ repectively). The problem
+        // comes where the subpattern is capturing, in which case we would
+        // need to restore the capture from the first subpattern upon a
+        // failure in the second.
+        if (term->quantityMinCount && term->quantityMinCount != term->quantityMaxCount) {
+            m_failureReason = JITFailureReason::VariableCountedParenthesisWithNonZeroMinimum;
+            return;
+        } if (term->quantityMaxCount == 1 && !term->parentheses.isCopy) {
+            // Select the 'Once' nodes.
+            parenthesesBeginOpCode = OpParenthesesSubpatternOnceBegin;
+            parenthesesEndOpCode = OpParenthesesSubpatternOnceEnd;
+
+            // If there is more than one alternative we cannot use the 'simple' nodes.
+            if (term->parentheses.disjunction->m_alternatives.size() != 1) {
+                alternativeBeginOpCode = OpNestedAlternativeBegin;
+                alternativeNextOpCode = OpNestedAlternativeNext;
+                alternativeEndOpCode = OpNestedAlternativeEnd;
+            }
+        } else if (term->parentheses.isTerminal) {
+            // Select the 'Terminal' nodes.
+            parenthesesBeginOpCode = OpParenthesesSubpatternTerminalBegin;
+            parenthesesEndOpCode = OpParenthesesSubpatternTerminalEnd;
+        } else {
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+            // We only handle generic parenthesis with greedy counts.
+            if (term->quantityType != QuantifierGreedy) {
+                // This subpattern is not supported by the JIT.
+                m_failureReason = JITFailureReason::NonGreedyParenthesizedSubpattern;
+                return;
+            }
 
-        // check availability for the next alternative
-        int countCheckedForCurrentAlternative = 0;
-        int countToCheckForFirstAlternative = 0;
-        bool hasShorterAlternatives = false;
-        bool setRepeatAlternativeLabels = false;
-        JumpList notEnoughInputForPreviousAlternative;
-        Label firstAlternative;
-        Label firstAlternativeInputChecked;
+            m_containsNestedSubpatterns = true;
 
-        // The label 'firstAlternative' is used to plant a check to see if there is 
-        // sufficient input available to run the first repeating alternative.
-        // The label 'firstAlternativeInputChecked' will jump directly to matching 
-        // the first repeating alternative having skipped this check.
+            // Select the 'Generic' nodes.
+            parenthesesBeginOpCode = OpParenthesesSubpatternBegin;
+            parenthesesEndOpCode = OpParenthesesSubpatternEnd;
 
-        if (state.alternativeValid()) {
-            PatternAlternative* alternative = state.alternative();
-            if (!alternative->onceThrough()) {
-                firstAlternative = Label(this);
-                setRepeatAlternativeLabels = true;
+            // If there is more than one alternative we cannot use the 'simple' nodes.
+            if (term->parentheses.disjunction->m_alternatives.size() != 1) {
+                alternativeBeginOpCode = OpNestedAlternativeBegin;
+                alternativeNextOpCode = OpNestedAlternativeNext;
+                alternativeEndOpCode = OpNestedAlternativeEnd;
             }
-            countToCheckForFirstAlternative = alternative->m_minimumSize;
-            state.checkedTotal += countToCheckForFirstAlternative;
-            if (countToCheckForFirstAlternative)
-                notEnoughInputForPreviousAlternative.append(jumpIfNoAvailableInput(countToCheckForFirstAlternative));
-            countCheckedForCurrentAlternative = countToCheckForFirstAlternative;
+#else
+            // This subpattern is not supported by the JIT.
+            m_failureReason = JITFailureReason::ParenthesizedSubpattern;
+            return;
+#endif
         }
 
-        if (setRepeatAlternativeLabels)
-            firstAlternativeInputChecked = Label(this);
+        size_t parenBegin = m_ops.size();
+        m_ops.append(parenthesesBeginOpCode);
 
-        while (state.alternativeValid()) {
-            PatternAlternative* alternative = state.alternative();
-            optimizeAlternative(alternative);
+        m_ops.append(alternativeBeginOpCode);
+        m_ops.last().m_previousOp = notFound;
+        m_ops.last().m_term = term;
+        Vector<std::unique_ptr<PatternAlternative>>& alternatives = term->parentheses.disjunction->m_alternatives;
+        for (unsigned i = 0; i < alternatives.size(); ++i) {
+            size_t lastOpIndex = m_ops.size() - 1;
 
-            // Track whether any alternatives are shorter than the first one.
-            if (!alternative->onceThrough())
-                hasShorterAlternatives = hasShorterAlternatives || (countCheckedForCurrentAlternative < countToCheckForFirstAlternative);
+            PatternAlternative* nestedAlternative = alternatives[i].get();
+            opCompileAlternative(nestedAlternative);
 
-            for (state.resetTerm(); state.termValid(); state.nextTerm())
-                generateTerm(state);
+            size_t thisOpIndex = m_ops.size();
+            m_ops.append(YarrOp(alternativeNextOpCode));
 
-            // If we get here, the alternative matched.
-            if (m_pattern.m_body->m_callFrameSize)
-                addPtr(Imm32(m_pattern.m_body->m_callFrameSize * sizeof(void*)), stackPointerRegister);
+            YarrOp& lastOp = m_ops[lastOpIndex];
+            YarrOp& thisOp = m_ops[thisOpIndex];
 
-            ASSERT(index != returnRegister);
-            if (m_pattern.m_body->m_hasFixedSize) {
-                move(index, returnRegister);
-                if (alternative->m_minimumSize)
-                    sub32(Imm32(alternative->m_minimumSize), returnRegister);
+            lastOp.m_alternative = nestedAlternative;
+            lastOp.m_nextOp = thisOpIndex;
+            thisOp.m_previousOp = lastOpIndex;
+            thisOp.m_term = term;
+        }
+        YarrOp& lastOp = m_ops.last();
+        ASSERT(lastOp.m_op == alternativeNextOpCode);
+        lastOp.m_op = alternativeEndOpCode;
+        lastOp.m_alternative = 0;
+        lastOp.m_nextOp = notFound;
 
-                store32(returnRegister, output);
-            } else
-                load32(Address(output), returnRegister);
+        size_t parenEnd = m_ops.size();
+        m_ops.append(parenthesesEndOpCode);
 
-            store32(index, Address(output, 4));
+        m_ops[parenBegin].m_term = term;
+        m_ops[parenBegin].m_previousOp = notFound;
+        m_ops[parenBegin].m_nextOp = parenEnd;
+        m_ops[parenEnd].m_term = term;
+        m_ops[parenEnd].m_previousOp = parenBegin;
+        m_ops[parenEnd].m_nextOp = notFound;
+    }
 
-            generateReturn();
+    // opCompileParentheticalAssertion
+    // Emits ops for a parenthetical assertion. These consist of an
+    // OpSimpleNestedAlternativeBegin/Next/End set of nodes wrapping
+    // the alternatives, with these wrapped by an outer pair of
+    // OpParentheticalAssertionBegin/End nodes.
+    // We can always use the OpSimpleNestedAlternative nodes in the
+    // case of parenthetical assertions since these only ever match
+    // once, and will never backtrack back into the assertion.
+    void opCompileParentheticalAssertion(PatternTerm* term)
+    {
+        size_t parenBegin = m_ops.size();
+        m_ops.append(OpParentheticalAssertionBegin);
+
+        m_ops.append(OpSimpleNestedAlternativeBegin);
+        m_ops.last().m_previousOp = notFound;
+        m_ops.last().m_term = term;
+        Vector<std::unique_ptr<PatternAlternative>>& alternatives =  term->parentheses.disjunction->m_alternatives;
+        for (unsigned i = 0; i < alternatives.size(); ++i) {
+            size_t lastOpIndex = m_ops.size() - 1;
+
+            PatternAlternative* nestedAlternative = alternatives[i].get();
+            opCompileAlternative(nestedAlternative);
+
+            size_t thisOpIndex = m_ops.size();
+            m_ops.append(YarrOp(OpSimpleNestedAlternativeNext));
+
+            YarrOp& lastOp = m_ops[lastOpIndex];
+            YarrOp& thisOp = m_ops[thisOpIndex];
+
+            lastOp.m_alternative = nestedAlternative;
+            lastOp.m_nextOp = thisOpIndex;
+            thisOp.m_previousOp = lastOpIndex;
+            thisOp.m_term = term;
+        }
+        YarrOp& lastOp = m_ops.last();
+        ASSERT(lastOp.m_op == OpSimpleNestedAlternativeNext);
+        lastOp.m_op = OpSimpleNestedAlternativeEnd;
+        lastOp.m_alternative = 0;
+        lastOp.m_nextOp = notFound;
+
+        size_t parenEnd = m_ops.size();
+        m_ops.append(OpParentheticalAssertionEnd);
+
+        m_ops[parenBegin].m_term = term;
+        m_ops[parenBegin].m_previousOp = notFound;
+        m_ops[parenBegin].m_nextOp = parenEnd;
+        m_ops[parenEnd].m_term = term;
+        m_ops[parenEnd].m_previousOp = parenBegin;
+        m_ops[parenEnd].m_nextOp = notFound;
+    }
 
-            state.nextAlternative();
-            if (alternative->onceThrough() && state.alternativeValid())
-                state.clearBacktrack();
+    // opCompileAlternative
+    // Called to emit nodes for all terms in an alternative.
+    void opCompileAlternative(PatternAlternative* alternative)
+    {
+        optimizeAlternative(alternative);
 
-            // if there are any more alternatives, plant the check for input before looping.
-            if (state.alternativeValid()) {
-                state.setJumpListToPriorParen(0);
-                PatternAlternative* nextAlternative = state.alternative();
-                if (!setRepeatAlternativeLabels && !nextAlternative->onceThrough()) {
-                    // We have handled non-repeating alternatives, jump to next iteration 
-                    // and loop over repeating alternatives.
-                    state.jumpToBacktrack(this);
+        for (unsigned i = 0; i < alternative->m_terms.size(); ++i) {
+            PatternTerm* term = &alternative->m_terms[i];
 
-                    countToCheckForFirstAlternative = nextAlternative->m_minimumSize;
+            switch (term->type) {
+            case PatternTerm::TypeParenthesesSubpattern:
+                opCompileParenthesesSubpattern(term);
+                break;
 
-                    // If we get here, there the last input checked failed.
-                    notEnoughInputForPreviousAlternative.link(this);
+            case PatternTerm::TypeParentheticalAssertion:
+                opCompileParentheticalAssertion(term);
+                break;
 
-                    state.linkAlternativeBacktracks(this);
+            default:
+                m_ops.append(term);
+            }
+        }
+    }
 
-                    // Back up to start the looping alternatives.
-                    if (countCheckedForCurrentAlternative)
-                        sub32(Imm32(countCheckedForCurrentAlternative), index);
+    // opCompileBody
+    // This method compiles the body disjunction of the regular expression.
+    // The body consists of two sets of alternatives - zero or more 'once
+    // through' (BOL anchored) alternatives, followed by zero or more
+    // repeated alternatives.
+    // For each of these two sets of alteratives, if not empty they will be
+    // wrapped in a set of OpBodyAlternativeBegin/Next/End nodes (with the
+    // 'begin' node referencing the first alternative, and 'next' nodes
+    // referencing any further alternatives. The begin/next/end nodes are
+    // linked together in a doubly linked list. In the case of repeating
+    // alternatives, the end node is also linked back to the beginning.
+    // If no repeating alternatives exist, then a OpMatchFailed node exists
+    // to return the failing result.
+    void opCompileBody(PatternDisjunction* disjunction)
+    {
+        Vector<std::unique_ptr<PatternAlternative>>& alternatives = disjunction->m_alternatives;
+        size_t currentAlternativeIndex = 0;
 
-                    firstAlternative = Label(this);
+        // Emit the 'once through' alternatives.
+        if (alternatives.size() && alternatives[0]->onceThrough()) {
+            m_ops.append(YarrOp(OpBodyAlternativeBegin));
+            m_ops.last().m_previousOp = notFound;
 
-                    state.checkedTotal = countToCheckForFirstAlternative;
-                    if (countToCheckForFirstAlternative)
-                        notEnoughInputForPreviousAlternative.append(jumpIfNoAvailableInput(countToCheckForFirstAlternative));
+            do {
+                size_t lastOpIndex = m_ops.size() - 1;
+                PatternAlternative* alternative = alternatives[currentAlternativeIndex].get();
+                opCompileAlternative(alternative);
 
-                    countCheckedForCurrentAlternative = countToCheckForFirstAlternative;
+                size_t thisOpIndex = m_ops.size();
+                m_ops.append(YarrOp(OpBodyAlternativeNext));
 
-                    firstAlternativeInputChecked = Label(this);
+                YarrOp& lastOp = m_ops[lastOpIndex];
+                YarrOp& thisOp = m_ops[thisOpIndex];
 
-                    setRepeatAlternativeLabels = true;
-                } else {
-                    int countToCheckForNextAlternative = nextAlternative->m_minimumSize;
-
-                    if (countCheckedForCurrentAlternative > countToCheckForNextAlternative) { // CASE 1: current alternative was longer than the next one.
-                        // If we get here, then the last input checked failed.
-                        notEnoughInputForPreviousAlternative.link(this);
-
-                        // Check if sufficent input available to run the next alternative 
-                        notEnoughInputForPreviousAlternative.append(jumpIfNoAvailableInput(countToCheckForNextAlternative - countCheckedForCurrentAlternative));
-                        // We are now in the correct state to enter the next alternative; this add is only required
-                        // to mirror and revert operation of the sub32, just below.
-                        add32(Imm32(countCheckedForCurrentAlternative - countToCheckForNextAlternative), index);
-
-                        // If we get here, then the last input checked passed.
-                        state.linkAlternativeBacktracks(this);
-
-                        // No need to check if we can run the next alternative, since it is shorter -
-                        // just update index.
-                        sub32(Imm32(countCheckedForCurrentAlternative - countToCheckForNextAlternative), index);
-                    } else if (countCheckedForCurrentAlternative < countToCheckForNextAlternative) { // CASE 2: next alternative is longer than the current one.
-                        // If we get here, then the last input checked failed.
-                        // If there is insufficient input to run the current alternative, and the next alternative is longer,
-                        // then there is definitely not enough input to run it - don't even check. Just adjust index, as if
-                        // we had checked.
-                        notEnoughInputForPreviousAlternative.link(this);
-                        add32(Imm32(countToCheckForNextAlternative - countCheckedForCurrentAlternative), index);
-                        notEnoughInputForPreviousAlternative.append(jump());
-
-                        // The next alternative is longer than the current one; check the difference.
-                        state.linkAlternativeBacktracks(this);
-
-                        notEnoughInputForPreviousAlternative.append(jumpIfNoAvailableInput(countToCheckForNextAlternative - countCheckedForCurrentAlternative));
-                    } else { // CASE 3: Both alternatives are the same length.
-                        ASSERT(countCheckedForCurrentAlternative == countToCheckForNextAlternative);
-
-                        // If the next alterative is the same length as this one, then no need to check the input -
-                        // if there was sufficent input to run the current alternative then there is sufficient
-                        // input to run the next one; if not, there isn't.
-                        state.linkAlternativeBacktracks(this);
-                    }
-                    state.checkedTotal -= countCheckedForCurrentAlternative;
-                    countCheckedForCurrentAlternative = countToCheckForNextAlternative;
-                    state.checkedTotal += countCheckedForCurrentAlternative;
-                }
-            }
+                lastOp.m_alternative = alternative;
+                lastOp.m_nextOp = thisOpIndex;
+                thisOp.m_previousOp = lastOpIndex;
+                
+                ++currentAlternativeIndex;
+            } while (currentAlternativeIndex < alternatives.size() && alternatives[currentAlternativeIndex]->onceThrough());
+
+            YarrOp& lastOp = m_ops.last();
+
+            ASSERT(lastOp.m_op == OpBodyAlternativeNext);
+            lastOp.m_op = OpBodyAlternativeEnd;
+            lastOp.m_alternative = 0;
+            lastOp.m_nextOp = notFound;
         }
 
-        // If we get here, all Alternatives failed...
+        if (currentAlternativeIndex == alternatives.size()) {
+            m_ops.append(YarrOp(OpMatchFailed));
+            return;
+        }
 
-        state.checkedTotal -= countCheckedForCurrentAlternative;
+        // Emit the repeated alternatives.
+        size_t repeatLoop = m_ops.size();
+        m_ops.append(YarrOp(OpBodyAlternativeBegin));
+        m_ops.last().m_previousOp = notFound;
+        do {
+            size_t lastOpIndex = m_ops.size() - 1;
+            PatternAlternative* alternative = alternatives[currentAlternativeIndex].get();
+            ASSERT(!alternative->onceThrough());
+            opCompileAlternative(alternative);
 
-        if (!setRepeatAlternativeLabels) {
-            // If there are no alternatives that need repeating (all are marked 'onceThrough') then just link
-            // the match failures to this point, and fall through to the return below.
-            state.linkAlternativeBacktracks(this, true);
+            size_t thisOpIndex = m_ops.size();
+            m_ops.append(YarrOp(OpBodyAlternativeNext));
 
-            notEnoughInputForPreviousAlternative.link(this);
-        } else {
-            // How much more input need there be to be able to retry from the first alternative?
-            // examples:
-            //   /yarr_jit/ or /wrec|pcre/
-            //     In these examples we need check for one more input before looping.
-            //   /yarr_jit|pcre/
-            //     In this case we need check for 5 more input to loop (+4 to allow for the first alterative
-            //     being four longer than the last alternative checked, and another +1 to effectively move
-            //     the start position along by one).
-            //   /yarr|rules/ or /wrec|notsomuch/
-            //     In these examples, provided that there was sufficient input to have just been matching for
-            //     the second alternative we can loop without checking for available input (since the second
-            //     alternative is longer than the first).  In the latter example we need to decrement index
-            //     (by 4) so the start position is only progressed by 1 from the last iteration.
-            int incrementForNextIter = (countToCheckForFirstAlternative - countCheckedForCurrentAlternative) + 1;
-
-            // First, deal with the cases where there was sufficient input to try the last alternative.
-            if (incrementForNextIter > 0) // We need to check for more input anyway, fall through to the checking below.
-                state.linkAlternativeBacktracks(this, true);
-            else if (m_pattern.m_body->m_hasFixedSize && !incrementForNextIter) // No need to update anything, link these backtracks straight to the to pof the loop!
-                state.linkAlternativeBacktracksTo(this, firstAlternativeInputChecked, true);
-            else { // no need to check the input, but we do have some bookkeeping to do first.
-                state.linkAlternativeBacktracks(this, true);
-
-                // Where necessary update our preserved start position.
-                if (!m_pattern.m_body->m_hasFixedSize) {
-                    move(index, regT0);
-                    sub32(Imm32(countCheckedForCurrentAlternative - 1), regT0);
-                    store32(regT0, Address(output));
-                }
+            YarrOp& lastOp = m_ops[lastOpIndex];
+            YarrOp& thisOp = m_ops[thisOpIndex];
 
-                // Update index if necessary, and loop (without checking).
-                if (incrementForNextIter)
-                    add32(Imm32(incrementForNextIter), index);
-                jump().linkTo(firstAlternativeInputChecked, this);
-            }
+            lastOp.m_alternative = alternative;
+            lastOp.m_nextOp = thisOpIndex;
+            thisOp.m_previousOp = lastOpIndex;
+            
+            ++currentAlternativeIndex;
+        } while (currentAlternativeIndex < alternatives.size());
+        YarrOp& lastOp = m_ops.last();
+        ASSERT(lastOp.m_op == OpBodyAlternativeNext);
+        lastOp.m_op = OpBodyAlternativeEnd;
+        lastOp.m_alternative = 0;
+        lastOp.m_nextOp = repeatLoop;
+    }
 
-            notEnoughInputForPreviousAlternative.link(this);
-            // Update our idea of the start position, if we're tracking this.
-            if (!m_pattern.m_body->m_hasFixedSize) {
-                if (countCheckedForCurrentAlternative - 1) {
-                    move(index, regT0);
-                    sub32(Imm32(countCheckedForCurrentAlternative - 1), regT0);
-                    store32(regT0, Address(output));
-                } else
-                    store32(index, Address(output));
-            }
+    void generateTryReadUnicodeCharacterHelper()
+    {
+#ifdef JIT_UNICODE_EXPRESSIONS
+        if (m_tryReadUnicodeCharacterCalls.isEmpty())
+            return;
 
-            // Check if there is sufficent input to run the first alternative again.
-            jumpIfAvailableInput(incrementForNextIter).linkTo(firstAlternativeInputChecked, this);
-            // No - insufficent input to run the first alteranative, are there any other alternatives we
-            // might need to check?  If so, the last check will have left the index incremented by
-            // (countToCheckForFirstAlternative + 1), so we need test whether countToCheckForFirstAlternative
-            // LESS input is available, to have the effect of just progressing the start position by 1
-            // from the last iteration.  If this check passes we can just jump up to the check associated
-            // with the first alternative in the loop.  This is a bit sad, since we'll end up trying the
-            // first alternative again, and this check will fail (otherwise the check planted just above
-            // here would have passed).  This is a bit sad, however it saves trying to do something more
-            // complex here in compilation, and in the common case we should end up coallescing the checks.
-            //
-            // FIXME: a nice improvement here may be to stop trying to match sooner, based on the least
-            // of the minimum-alternative-lengths.  E.g. if I have two alternatives of length 200 and 150,
-            // and a string of length 100, we'll end up looping index from 0 to 100, checking whether there
-            // is sufficient input to run either alternative (constantly failing).  If there had been only
-            // one alternative, or if the shorter alternative had come first, we would have terminated
-            // immediately. :-/
-            if (hasShorterAlternatives)
-                jumpIfAvailableInput(-countToCheckForFirstAlternative).linkTo(firstAlternative, this);
-            // index will now be a bit garbled (depending on whether 'hasShorterAlternatives' is true,
-            // it has either been incremented by 1 or by (countToCheckForFirstAlternative + 1) ... 
-            // but since we're about to return a failure this doesn't really matter!)
-        }
+        ASSERT(m_decodeSurrogatePairs);
 
-        if (m_pattern.m_body->m_callFrameSize)
-            addPtr(Imm32(m_pattern.m_body->m_callFrameSize * sizeof(void*)), stackPointerRegister);
+        m_tryReadUnicodeCharacterEntry = label();
 
-        move(TrustedImm32(-1), returnRegister);
+#if CPU(ARM64)
+        tagPtr(linkRegister, stackPointerRegister);
+#endif
 
-        generateReturn();
+        tryReadUnicodeCharImpl(regT0);
 
-        m_expressionState.emitParenthesesTail(this);
-        m_expressionState.emitIndirectJumpTable(this);
-        m_expressionState.linkToNextIteration(this);
+        ret();
+#endif
     }
 
     void generateEnter()
@@ -2166,7 +3297,37 @@ class YarrGenerator : private MacroAssembler {
 #if CPU(X86_64)
         push(X86Registers::ebp);
         move(stackPointerRegister, X86Registers::ebp);
-        push(X86Registers::ebx);
+
+        if (m_pattern.m_saveInitialStartValue)
+            push(X86Registers::ebx);
+
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+        if (m_containsNestedSubpatterns) {
+#if OS(WINDOWS)
+            push(X86Registers::edi);
+            push(X86Registers::esi);
+#endif
+            push(X86Registers::r12);
+        }
+#endif
+
+        if (m_decodeSurrogatePairs) {
+            push(X86Registers::r13);
+            push(X86Registers::r14);
+            push(X86Registers::r15);
+
+            move(TrustedImm32(0xd800), leadingSurrogateTag);
+            move(TrustedImm32(0xdc00), trailingSurrogateTag);
+        }
+        // The ABI doesn't guarantee the upper bits are zero on unsigned arguments, so clear them ourselves.
+        zeroExtend32ToPtr(index, index);
+        zeroExtend32ToPtr(length, length);
+#if OS(WINDOWS)
+        if (compileMode == IncludeSubpatterns)
+            loadPtr(Address(X86Registers::ebp, 6 * sizeof(void*)), output);
+        // rcx is the pointer to the allocated space for result in x64 Windows.
+        push(X86Registers::ecx);
+#endif
 #elif CPU(X86)
         push(X86Registers::ebp);
         move(stackPointerRegister, X86Registers::ebp);
@@ -2179,46 +3340,81 @@ class YarrGenerator : private MacroAssembler {
         loadPtr(Address(X86Registers::ebp, 2 * sizeof(void*)), input);
         loadPtr(Address(X86Registers::ebp, 3 * sizeof(void*)), index);
         loadPtr(Address(X86Registers::ebp, 4 * sizeof(void*)), length);
-        loadPtr(Address(X86Registers::ebp, 5 * sizeof(void*)), output);
+        if (compileMode == IncludeSubpatterns)
+            loadPtr(Address(X86Registers::ebp, 5 * sizeof(void*)), output);
     #else
-        loadPtr(Address(X86Registers::ebp, 2 * sizeof(void*)), output);
+        if (compileMode == IncludeSubpatterns)
+            loadPtr(Address(X86Registers::ebp, 2 * sizeof(void*)), output);
     #endif
+#elif CPU(ARM64)
+        tagPtr(linkRegister, stackPointerRegister);
+        if (m_decodeSurrogatePairs) {
+            pushPair(framePointerRegister, linkRegister);
+            move(TrustedImm32(0x10000), supplementaryPlanesBase);
+            move(TrustedImm32(0xfffffc00), surrogateTagMask);
+            move(TrustedImm32(0xd800), leadingSurrogateTag);
+            move(TrustedImm32(0xdc00), trailingSurrogateTag);
+        }
+
+        // The ABI doesn't guarantee the upper bits are zero on unsigned arguments, so clear them ourselves.
+        zeroExtend32ToPtr(index, index);
+        zeroExtend32ToPtr(length, length);
 #elif CPU(ARM)
         push(ARMRegisters::r4);
         push(ARMRegisters::r5);
         push(ARMRegisters::r6);
-#if CPU(ARM_TRADITIONAL)
-        push(ARMRegisters::r8); // scratch register
-#endif
-        move(ARMRegisters::r3, output);
-#elif CPU(SH4)
-        push(SH4Registers::r11);
-        push(SH4Registers::r13);
+        push(ARMRegisters::r8);
 #elif CPU(MIPS)
         // Do nothing.
 #endif
+
+        store8(TrustedImm32(1), &m_vm->isExecutingInRegExpJIT);
     }
 
     void generateReturn()
     {
+        store8(TrustedImm32(0), &m_vm->isExecutingInRegExpJIT);
+
 #if CPU(X86_64)
-        pop(X86Registers::ebx);
+#if OS(WINDOWS)
+        // Store the return value in the allocated space pointed by rcx.
+        pop(X86Registers::ecx);
+        store64(returnRegister, Address(X86Registers::ecx));
+        store64(returnRegister2, Address(X86Registers::ecx, sizeof(void*)));
+        move(X86Registers::ecx, returnRegister);
+#endif
+        if (m_decodeSurrogatePairs) {
+            pop(X86Registers::r15);
+            pop(X86Registers::r14);
+            pop(X86Registers::r13);
+        }
+
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+        if (m_containsNestedSubpatterns) {
+            pop(X86Registers::r12);
+#if OS(WINDOWS)
+            pop(X86Registers::esi);
+            pop(X86Registers::edi);
+#endif
+        }
+#endif
+
+        if (m_pattern.m_saveInitialStartValue)
+            pop(X86Registers::ebx);
         pop(X86Registers::ebp);
 #elif CPU(X86)
         pop(X86Registers::esi);
         pop(X86Registers::edi);
         pop(X86Registers::ebx);
         pop(X86Registers::ebp);
+#elif CPU(ARM64)
+        if (m_decodeSurrogatePairs)
+            popPair(framePointerRegister, linkRegister);
 #elif CPU(ARM)
-#if CPU(ARM_TRADITIONAL)
-        pop(ARMRegisters::r8); // scratch register
-#endif
+        pop(ARMRegisters::r8);
         pop(ARMRegisters::r6);
         pop(ARMRegisters::r5);
         pop(ARMRegisters::r4);
-#elif CPU(SH4)
-        pop(SH4Registers::r13);
-        pop(SH4Registers::r11);
 #elif CPU(MIPS)
         // Do nothing
 #endif
@@ -2226,52 +3422,195 @@ class YarrGenerator : private MacroAssembler {
     }
 
 public:
-    YarrGenerator(YarrPattern& pattern)
-        : m_pattern(pattern)
-        , m_shouldFallBack(false)
+    YarrGenerator(VM* vm, YarrPattern& pattern, YarrCodeBlock& codeBlock, YarrCharSize charSize)
+        : m_vm(vm)
+        , m_pattern(pattern)
+        , m_codeBlock(codeBlock)
+        , m_charSize(charSize)
+        , m_decodeSurrogatePairs(m_charSize == Char16 && m_pattern.unicode())
+        , m_unicodeIgnoreCase(m_pattern.unicode() && m_pattern.ignoreCase())
+        , m_canonicalMode(m_pattern.unicode() ? CanonicalMode::Unicode : CanonicalMode::UCS2)
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+        , m_containsNestedSubpatterns(false)
+        , m_parenContextSizes(compileMode == IncludeSubpatterns ? m_pattern.m_numSubpatterns : 0, m_pattern.m_body->m_callFrameSize)
+#endif
     {
     }
 
-    void generate()
+    void compile()
     {
+        YarrCodeBlock& codeBlock = m_codeBlock;
+
+#ifndef JIT_UNICODE_EXPRESSIONS
+        if (m_decodeSurrogatePairs) {
+            codeBlock.setFallBackWithFailureReason(JITFailureReason::DecodeSurrogatePair);
+            return;
+        }
+#endif
+
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+        if (m_containsNestedSubpatterns)
+            codeBlock.setUsesPaternContextBuffer();
+#endif
+
+        // We need to compile before generating code since we set flags based on compilation that
+        // are used during generation.
+        opCompileBody(m_pattern.m_body);
+        
+        if (m_failureReason) {
+            codeBlock.setFallBackWithFailureReason(*m_failureReason);
+            return;
+        }
+        
         generateEnter();
 
+        Jump hasInput = checkInput();
+        generateFailReturn();
+        hasInput.link(this);
+
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+        if (m_containsNestedSubpatterns)
+            move(TrustedImm32(matchLimit), remainingMatchCount);
+#endif
+
+        if (compileMode == IncludeSubpatterns) {
+            for (unsigned i = 0; i < m_pattern.m_numSubpatterns + 1; ++i)
+                store32(TrustedImm32(-1), Address(output, (i << 1) * sizeof(int)));
+        }
+
         if (!m_pattern.m_body->m_hasFixedSize)
-            store32(index, Address(output));
+            setMatchStart(index);
 
-        if (m_pattern.m_body->m_callFrameSize)
-            subPtr(Imm32(m_pattern.m_body->m_callFrameSize * sizeof(void*)), stackPointerRegister);
+        initCallFrame();
 
-        generateDisjunction(m_pattern.m_body);
-    }
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+        if (m_containsNestedSubpatterns)
+            initParenContextFreeList();
+#endif
+        
+        if (m_pattern.m_saveInitialStartValue) {
+#ifdef HAVE_INITIAL_START_REG
+            move(index, initialStart);
+#else
+            storeToFrame(index, m_pattern.m_initialStartValueFrameLocation);
+#endif
+        }
 
-    void compile(JSGlobalData* globalData, YarrCodeBlock& jitObject)
-    {
         generate();
+        backtrack();
+
+        generateTryReadUnicodeCharacterHelper();
+
+        generateJITFailReturn();
 
-        LinkBuffer patchBuffer(this, globalData->regexAllocator.poolForSize(size()), 0);
+        LinkBuffer linkBuffer(*this, REGEXP_CODE_ID, JITCompilationCanFail);
+        if (linkBuffer.didFailToAllocate()) {
+            codeBlock.setFallBackWithFailureReason(JITFailureReason::ExecutableMemoryAllocationFailure);
+            return;
+        }
+
+        if (!m_tryReadUnicodeCharacterCalls.isEmpty()) {
+            CodeLocationLabel tryReadUnicodeCharacterHelper = linkBuffer.locationOf(m_tryReadUnicodeCharacterEntry, NearCallPtrTag);
+
+            for (auto call : m_tryReadUnicodeCharacterCalls)
+                linkBuffer.link(call, tryReadUnicodeCharacterHelper);
+        }
 
-        for (unsigned i = 0; i < m_expressionState.m_backtrackRecords.size(); ++i)
-            patchBuffer.patch(m_expressionState.m_backtrackRecords[i].dataLabel, patchBuffer.locationOf(m_expressionState.m_backtrackRecords[i].backtrackLocation));
+        m_backtrackingState.linkDataLabels(linkBuffer, codeBlock);
 
-        jitObject.set(patchBuffer.finalizeCode());
-        jitObject.setFallBack(m_shouldFallBack);
+        if (compileMode == MatchOnly) {
+            if (m_charSize == Char8)
+                codeBlock.set8BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, ptrTag(YarrMatchOnly8BitPtrTag, &codeBlock), "Match-only 8-bit regular expression"));
+            else
+                codeBlock.set16BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, ptrTag(YarrMatchOnly16BitPtrTag, &codeBlock), "Match-only 16-bit regular expression"));
+        } else {
+            if (m_charSize == Char8)
+                codeBlock.set8BitCode(FINALIZE_CODE(linkBuffer, ptrTag(Yarr8BitPtrTag, &codeBlock), "8-bit regular expression"));
+            else
+                codeBlock.set16BitCode(FINALIZE_CODE(linkBuffer, ptrTag(Yarr16BitPtrTag, &codeBlock), "16-bit regular expression"));
+        }
+        if (m_failureReason)
+            codeBlock.setFallBackWithFailureReason(*m_failureReason);
     }
 
 private:
+    VM* m_vm;
+
     YarrPattern& m_pattern;
-    bool m_shouldFallBack;
-    GenerationState m_expressionState;
+
+    YarrCodeBlock& m_codeBlock;
+    YarrCharSize m_charSize;
+
+    // Used to detect regular expression constructs that are not currently
+    // supported in the JIT; fall back to the interpreter when this is detected.
+    std::optional<JITFailureReason> m_failureReason;
+
+    bool m_decodeSurrogatePairs;
+    bool m_unicodeIgnoreCase;
+    CanonicalMode m_canonicalMode;
+#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
+    bool m_containsNestedSubpatterns;
+    ParenContextSizes m_parenContextSizes;
+#endif
+    JumpList m_abortExecution;
+    JumpList m_hitMatchLimit;
+    Vector<Call> m_tryReadUnicodeCharacterCalls;
+    Label m_tryReadUnicodeCharacterEntry;
+
+    // The regular expression expressed as a linear sequence of operations.
+    Vector<YarrOp, 128> m_ops;
+
+    // This records the current input offset being applied due to the current
+    // set of alternatives we are nested within. E.g. when matching the
+    // character 'b' within the regular expression /abc/, we will know that
+    // the minimum size for the alternative is 3, checked upon entry to the
+    // alternative, and that 'b' is at offset 1 from the start, and as such
+    // when matching 'b' we need to apply an offset of -2 to the load.
+    //
+    // FIXME: This should go away. Rather than tracking this value throughout
+    // code generation, we should gather this information up front & store it
+    // on the YarrOp structure.
+    Checked<unsigned> m_checkedOffset;
+
+    // This class records state whilst generating the backtracking path of code.
+    BacktrackingState m_backtrackingState;
 };
 
-void jitCompile(YarrPattern& pattern, JSGlobalData* globalData, YarrCodeBlock& jitObject)
+static void dumpCompileFailure(JITFailureReason failure)
 {
-    YarrGenerator(pattern).compile(globalData, jitObject);
+    switch (failure) {
+    case JITFailureReason::DecodeSurrogatePair:
+        dataLog("Can't JIT a pattern decoding surrogate pairs\n");
+        break;
+    case JITFailureReason::BackReference:
+        dataLog("Can't JIT a pattern containing back references\n");
+        break;
+    case JITFailureReason::VariableCountedParenthesisWithNonZeroMinimum:
+        dataLog("Can't JIT a pattern containing a variable counted parenthesis with a non-zero minimum\n");
+        break;
+    case JITFailureReason::ParenthesizedSubpattern:
+        dataLog("Can't JIT a pattern containing parenthesized subpatterns\n");
+        break;
+    case JITFailureReason::NonGreedyParenthesizedSubpattern:
+        dataLog("Can't JIT a pattern containing non-greedy parenthesized subpatterns\n");
+        break;
+    case JITFailureReason::ExecutableMemoryAllocationFailure:
+        dataLog("Can't JIT because of failure of allocation of executable memory\n");
+        break;
+    }
 }
 
-int execute(YarrCodeBlock& jitObject, const UChar* input, unsigned start, unsigned length, int* output)
+void jitCompile(YarrPattern& pattern, YarrCharSize charSize, VM* vm, YarrCodeBlock& codeBlock, YarrJITCompileMode mode)
 {
-    return jitObject.execute(input, start, length, output);
+    if (mode == MatchOnly)
+        YarrGenerator<MatchOnly>(vm, pattern, codeBlock, charSize).compile();
+    else
+        YarrGenerator<IncludeSubpatterns>(vm, pattern, codeBlock, charSize).compile();
+
+    if (auto failureReason = codeBlock.failureReason()) {
+        if (Options::dumpCompiledRegExpPatterns())
+            dumpCompileFailure(*failureReason);
+    }
 }
 
 }}